gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2015 Terracotta, Inc., a Software AG company.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.offheapstore.paging;
import org.junit.Assert;
import org.junit.Test;
import org.terracotta.offheapstore.WriteLockedOffHeapClockCache;
import org.terracotta.offheapstore.buffersource.HeapBufferSource;
import org.terracotta.offheapstore.buffersource.OffHeapBufferSource;
import org.terracotta.offheapstore.storage.IntegerStorageEngine;
import org.terracotta.offheapstore.storage.OffHeapBufferHalfStorageEngine;
import org.terracotta.offheapstore.storage.SplitStorageEngine;
import org.terracotta.offheapstore.storage.portability.ByteArrayPortability;
import org.terracotta.offheapstore.util.PointerSizeParameterizedTest;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import static org.hamcrest.collection.IsArrayWithSize.arrayWithSize;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class OffHeapStorageAreaTest extends PointerSizeParameterizedTest {
@Test
public void testNonStraddlingReadBuffersReturnsSingleBuffer() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 1024, false, false);
assertThat(osa.readBuffers(osa.allocate(64), 64), arrayWithSize(1));
}
@Test
public void testNonStraddlingReadBuffersReturnsReadOnlyBuffer() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 1024, false, false);
assertThat(osa.readBuffers(osa.allocate(64), 64)[0].isReadOnly(), is(true));
}
@Test
public void testStraddlingReadBuffersReturnsMultipleBuffers() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 128, false, false);
assertThat(osa.readBuffers(osa.allocate(256), 256), arrayWithSize(3));
}
@Test
public void testStraddlingReadBuffersReturnsReadOnlyBuffers() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 128, false, false);
for (ByteBuffer buffer : osa.readBuffers(osa.allocate(256), 256)) {
assertThat(buffer.isReadOnly(), is(true));
}
}
@Test
public void testReadBuffersReturnsCorrectData() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 128, false, false);
long base = osa.allocate(1024);
for (int i = 0; i < 1024; i++) {
osa.writeByte(base + i, (byte) i);
}
for (int i = 1; i < 512; i <<= 1) {
for (int o = -1; o <= 1; o++) {
ByteBuffer[] buffers = osa.readBuffers(base, i + o);
int j = 0;
for (ByteBuffer buffer : buffers) {
while (buffer.hasRemaining()) {
assertThat(buffer.get(), is((byte) j));
j++;
}
}
assertThat(j, is(i + o));
}
}
}
@Test
public void testNonStraddlingReadBufferReturnsReadOnlyBuffer() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 1024, false, false);
assertThat(osa.readBuffer(osa.allocate(64), 64).isReadOnly(), is(true));
}
@Test
public void testStraddlingReadBufferReturnsReadOnlyBuffer() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 128, false, false);
assertThat(osa.readBuffer(osa.allocate(256), 256).isReadOnly(), is(true));
}
@Test
public void testReadBufferReturnsCorrectData() {
PageSource pageSource = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea osa = new OffHeapStorageArea(getPointerSize(), null, pageSource, 128, false, false);
long base = osa.allocate(1024);
for (int i = 0; i < 1024; i++) {
osa.writeByte(base + i, (byte) i);
}
for (int i = 1; i < 512; i <<= 1) {
for (int o = -1; o <= 1; o++) {
ByteBuffer buffer = osa.readBuffer(base, i + o);
int j = 0;
while (buffer.hasRemaining()) {
assertThat(buffer.get(), is((byte) j));
j++;
}
assertThat(j, is(i + o));
}
}
}
@Test
public void testRecoveryOfPages() {
GettablePageSource source = new GettablePageSource();
Map<Integer, byte[]> test = new WriteLockedOffHeapClockCache<>(new UnlimitedPageSource(new OffHeapBufferSource()), new SplitStorageEngine<>(new IntegerStorageEngine(), new OffHeapBufferHalfStorageEngine<>(source, 1024, ByteArrayPortability.INSTANCE)));
int put = 0;
while (source.allocated.size() < 2) {
test.put(put++, new byte[128]);
}
Assert.assertEquals(put, test.size());
source.release();
Assert.assertEquals(put - 1, test.size());
Assert.assertNull(test.get(put - 1));
for (int i = 0; i < put - 1; i++) {
Assert.assertNotNull(test.get(i));
}
source.release();
Assert.assertTrue(test.isEmpty());
for (int i = 0; i < put; i++) {
Assert.assertNull(test.get(i));
}
}
@Test
public void testVariablePageSize() {
PageSource source = new UnlimitedPageSource(new HeapBufferSource());
OffHeapStorageArea storage = new OffHeapStorageArea(getPointerSize(), null, source, 1, 1024, false, false);
Map<Integer, Long> locations = new HashMap<>();
for (int i = 0; i < 2048; i++) {
long pointer = storage.allocate(Integer.SIZE / Byte.SIZE);
storage.writeInt(pointer, i);
locations.put(i, pointer);
}
System.err.println(storage);
for (int i = 0; i < 2048; i++) {
int pointer = locations.get(i).intValue();
Assert.assertEquals(i, storage.readInt(pointer));
}
for (Long pointer : locations.values()) {
storage.free(pointer);
}
}
// @Test
// public void testVariablePageSizeAddressLogic() {
// PageSource source = new UnlimitedPageSource(new HeapBufferSource());
//
// OffHeapStorageArea storage = new OffHeapStorageArea(null, source, 1, 1024, false, false);
//
// for (int i = 0, address = 0; i < 100; i++) {
// int size = storage.pageSizeFor(i);
// int base = storage.addressForPage(i);
// Assert.assertTrue(size <= 1024);
// Assert.assertEquals(address, base);
// address += size;
//// System.err.println("Page : " + i);
//// System.err.println("Size : " + size);
//// System.err.println("Base : " + base);
//// System.err.println();
// }
//
// for (int i = 0; i < 10240; i++) {
// int page = storage.pageIndexFor(i);
// int pageAddress = storage.pageAddressFor(i);
// Assert.assertEquals(i, pageAddress + storage.addressForPage(page));
//// System.err.println("Address : " + i);
//// System.err.println("Page Index : " + page);
//// System.err.println("Page Address : " + pageAddress);
//// System.err.println();
// }
// }
static class GettablePageSource implements PageSource {
final Random rndm = new Random();
final PageSource delegate = new UnlimitedPageSource(new OffHeapBufferSource());
final List<Page> allocated = new LinkedList<>();
@Override
public Page allocate(int size, boolean thief, boolean victim, OffHeapStorageArea owner) {
Page p = delegate.allocate(size, thief, victim, owner);
allocated.add(p);
return p;
}
@Override
public void free(Page page) {
delegate.free(page);
allocated.remove(page);
}
public void release() {
Page p = allocated.get(rndm.nextInt(allocated.size()));
p.binding().release(new LinkedList<>(Collections.singleton(p)));
free(p);
}
}
}
| |
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.impl.root.component;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.auraframework.Aura;
import org.auraframework.def.AttributeDef;
import org.auraframework.def.AttributeDefRef;
import org.auraframework.def.BaseComponentDef;
import org.auraframework.def.ComponentDefRefArray;
import org.auraframework.def.ControllerDef;
import org.auraframework.def.DefDescriptor;
import org.auraframework.def.InterfaceDef;
import org.auraframework.def.ModelDef;
import org.auraframework.def.RendererDef;
import org.auraframework.def.RootDefinition;
import org.auraframework.def.TypeDef;
import org.auraframework.expression.PropertyReference;
import org.auraframework.impl.java.model.JavaModel;
import org.auraframework.impl.root.AttributeDefImpl;
import org.auraframework.impl.root.AttributeSetImpl;
import org.auraframework.impl.system.DefDescriptorImpl;
import org.auraframework.instance.Action;
import org.auraframework.instance.AttributeSet;
import org.auraframework.instance.AuraValueProviderType;
import org.auraframework.instance.BaseComponent;
import org.auraframework.instance.Component;
import org.auraframework.instance.Instance;
import org.auraframework.instance.InstanceStack;
import org.auraframework.instance.Model;
import org.auraframework.instance.ValueProvider;
import org.auraframework.service.LoggingService;
import org.auraframework.system.AuraContext;
import org.auraframework.system.MasterDefRegistry;
import org.auraframework.throwable.AuraRuntimeException;
import org.auraframework.throwable.quickfix.DefinitionNotFoundException;
import org.auraframework.throwable.quickfix.InvalidDefinitionException;
import org.auraframework.throwable.quickfix.MissingRequiredAttributeException;
import org.auraframework.throwable.quickfix.QuickFixException;
import org.auraframework.util.AuraTextUtil;
import org.auraframework.util.json.Json;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
public abstract class BaseComponentImpl<D extends BaseComponentDef, I extends BaseComponent<D, I>> implements
BaseComponent<D, I> {
/**
* Top level component instance with attributes passed in. Builds out the tree recursively, but only after the
* attribute values are all set.
*
* @param descriptor
* @param attributes
* @throws QuickFixException
*/
public BaseComponentImpl(DefDescriptor<D> descriptor, Map<String, Object> attributes) throws QuickFixException {
this(descriptor, null, (Map<String, Object>) null, null, null);
LoggingService loggingService = Aura.getLoggingService();
loggingService.startTimer(LoggingService.TIMER_COMPONENT_CREATION);
try {
this.attributeSet.set(attributes);
finishInit();
} finally {
loggingService.stopTimer(LoggingService.TIMER_COMPONENT_CREATION);
}
Aura.getContextService().getCurrentContext().getInstanceStack().popInstance(this);
}
@SuppressWarnings("unchecked")
public <T extends D> BaseComponentImpl(T def, Map<String, Object> attributes) throws QuickFixException {
this((DefDescriptor<D>) def.getDescriptor(), null, (Map<String, Object>) null, null, def);
LoggingService loggingService = Aura.getLoggingService();
loggingService.startTimer(LoggingService.TIMER_COMPONENT_CREATION);
try {
this.attributeSet.set(attributes);
finishInit();
} finally {
loggingService.stopTimer(LoggingService.TIMER_COMPONENT_CREATION);
}
Aura.getContextService().getCurrentContext().getInstanceStack().popInstance(this);
}
/**
* Constructor used to create instances for all ComponentDefRefs, which come from both the children and the facets
* (attributes). Builds out the tree recursively, but only after all the attribute values, including facets are set.
*
* @throws QuickFixException
*/
public BaseComponentImpl(DefDescriptor<D> descriptor, Collection<AttributeDefRef> attributeDefRefs,
BaseComponent<?, ?> attributeValueProvider, String localId) throws QuickFixException {
this(descriptor, attributeValueProvider, null, null, null);
LoggingService loggingService = Aura.getLoggingService();
loggingService.startTimer(LoggingService.TIMER_COMPONENT_CREATION);
try {
this.attributeSet.set(attributeDefRefs);
finishInit();
} finally {
loggingService.stopTimer(LoggingService.TIMER_COMPONENT_CREATION);
}
Aura.getContextService().getCurrentContext().getInstanceStack().popInstance(this);
this.localId = localId;
}
/**
* For creating supers
*
* @throws QuickFixException
*/
protected BaseComponentImpl(DefDescriptor<D> descriptor, I extender, BaseComponent<?, ?> attributeValueProvider,
I concreteComponent) throws QuickFixException {
this(descriptor, attributeValueProvider, null, extender, null);
LoggingService loggingService = Aura.getLoggingService();
loggingService.startTimer(LoggingService.TIMER_COMPONENT_CREATION);
try {
this.concreteComponent = concreteComponent;
attributeSet.set(extender.getDescriptor().getDef().getFacets(), extender.getAttributes());
finishInit();
} finally {
loggingService.stopTimer(LoggingService.TIMER_COMPONENT_CREATION);
}
Aura.getContextService().getCurrentContext().getInstanceStack().popInstance(this);
}
/**
* The base constructor that the other 2 use to initialize the object, but not he attributes. Sets all defaults for
* attributes. Does not build out the tree recursively.
*
* @param descriptor The descriptor for this component's definition
* @param def TODO
* @throws QuickFixException
*/
private BaseComponentImpl(DefDescriptor<D> descriptor, BaseComponent<?, ?> attributeValueProvider,
Map<String, Object> valueProviders, I extender, D def) throws QuickFixException {
AuraContext context = Aura.getContextService().getCurrentContext();
DefDescriptor<? extends RootDefinition> desc = null;
InstanceStack instanceStack = context.getInstanceStack();
Instance<?> accessParent = instanceStack.getAccess();
this.descriptor = descriptor;
this.originalDescriptor = descriptor;
this.path = instanceStack.getPath();
instanceStack.pushInstance(this, descriptor);
if (def == null) {
try {
def = descriptor.getDef();
if (extender == null && def.isAbstract() && def.getProviderDescriptor() == null) {
throw new InvalidDefinitionException(String.format("%s cannot be instantiated directly.",
descriptor), def.getLocation());
}
if (extender == null && (def.isAbstract() || def.getLocalProviderDef() != null)) {
this.intfDescriptor = def.getDescriptor();
}
desc = descriptor;
} catch (DefinitionNotFoundException e) {
if (!e.getDescriptor().equals(descriptor)) {
throw e;
}
DefDescriptor<InterfaceDef> intfDescriptor = DefDescriptorImpl.getInstance(descriptor.getQualifiedName(), InterfaceDef.class);
InterfaceDef intfDef = intfDescriptor.getDef();
if (intfDef != null) {
this.intfDescriptor = intfDescriptor;
desc = intfDescriptor;
} else {
// def not found
throw new DefinitionNotFoundException(descriptor);
}
}
} else {
desc = descriptor;
}
MasterDefRegistry defRegistry = Aura.getDefinitionService().getDefRegistry();
if (accessParent != null) {
// Insure that the access 'Parent' is allowed to create an instance of this component
defRegistry.assertAccess(accessParent.getDescriptor(), desc.getDef());
}
LoggingService loggingService = Aura.getLoggingService();
loggingService.startTimer(LoggingService.TIMER_COMPONENT_CREATION);
try {
this.globalId = getNextGlobalId();
this.attributeSet = new AttributeSetImpl(desc, attributeValueProvider, this);
if (valueProviders != null) {
this.valueProviders.putAll(valueProviders);
}
this.valueProviders.put(AuraValueProviderType.VIEW.getPrefix(), attributeSet);
// def can be null if a definition not found exception was thrown for that definition. Odd.
if (def != null) {
ControllerDef cd = def.getDeclaredControllerDef();
if (cd != null) {
// Insure that this def is allowed to create an instance of the controller
defRegistry.assertAccess(descriptor, cd);
this.valueProviders.put(AuraValueProviderType.CONTROLLER.getPrefix(), cd);
}
}
loggingService.incrementNum(LoggingService.CMP_COUNT);
} finally {
loggingService.stopTimer(LoggingService.TIMER_COMPONENT_CREATION);
}
}
protected void finishInit() throws QuickFixException {
AuraContext context = Aura.getContextService().getCurrentContext();
injectComponent();
createModel();
context.getInstanceStack().setAttributeName("$");
createSuper();
context.getInstanceStack().clearAttributeName("$");
validateAttributes();
BaseComponentDef def = getComponentDef();
DefDescriptor<RendererDef> rendererDesc = def.getRendererDescriptor();
if ((rendererDesc != null && rendererDesc.getDef().isLocal())) {
hasLocalDependencies = true;
}
context.registerComponent(this);
}
private Object findValue(String name) throws QuickFixException {
BaseComponent<?, ?> zuper = this.getSuper();
while (zuper != null) {
Object val = zuper.getAttributes().getValue(name);
if (val != null) {
return val;
}
zuper = zuper.getSuper();
}
return null;
}
private void validateAttributes() throws QuickFixException {
Set<AttributeDef> missingAttributes = attributeSet.getMissingAttributes();
if (missingAttributes != null && !missingAttributes.isEmpty()) {
for (AttributeDef attr : missingAttributes) {
if (this.findValue(attr.getName()) == null) {
DefDescriptor<? extends RootDefinition> desc = attributeSet.getRootDefDescriptor();
if (attributeSet.getValueProvider() != null) {
desc = attributeSet.getValueProvider().getDescriptor();
}
throw new MissingRequiredAttributeException(desc, attr.getName(), attr.getLocation());
}
}
}
}
protected abstract void createSuper() throws DefinitionNotFoundException, QuickFixException;
protected abstract void injectComponent() throws QuickFixException;
public D getComponentDef() throws QuickFixException {
return descriptor.getDef();
}
@Override
public DefDescriptor<D> getDescriptor() {
return descriptor;
}
@Override
public String getGlobalId() {
return globalId;
}
@Override
public String getLocalId() {
return localId;
}
/**
* @return All of the Attributes for this Component
*/
@Override
public AttributeSet getAttributes() {
return attributeSet;
}
/**
* this is only to serialize the general shape and ids, to ensure that we generate the same stuff in the client
*/
@Override
public void serialize(Json json) throws IOException {
AuraContext context = Aura.getContextService().getCurrentContext();
BaseComponent<?, ?> oldComponent = context.setCurrentComponent(this);
try {
BaseComponentDef def = getComponentDef();
json.writeMapBegin();
//
// Be very careful here. descriptor != def.getDescriptor().
// This is 'case normalizing', as the client is actually case
// sensitive for descriptors (ugh!).
//
json.writeMapEntry("componentDef", def.getDescriptor());
if (!descriptor.equals(originalDescriptor)) {
json.writeMapEntry("original", originalDescriptor);
}
json.writeMapEntry("creationPath", getPath());
if ((attributeSet.getValueProvider() == null || hasProvidedAttributes) && !attributeSet.isEmpty()) {
json.writeMapEntry("attributes", attributeSet);
}
if (def.getRendererDescriptor() != null) {
RendererDef rendererDef = def.getRendererDescriptor().getDef();
if (rendererDef.isLocal()) {
StringWriter sw = new StringWriter();
rendererDef.render(this, sw);
// Not writing directly to json.appendable because then it wouldn't get escaped.
// ideally Json would have a FilterWriter that escapes that we could use here.
json.writeMapEntry("rendering", sw.toString());
}
}
if (model != null && model.getDescriptor().getDef().hasMembers()) {
json.writeMapEntry("model", model);
}
// KRIS: COMPONENT CLASSES
// We need to return the definitions here since the client doesn't have them.
// Do we know if it doesn't have them? This can be a getComponent action call, but is it just
// wanting the instance and not the def?
// I know it does not have it in some instances, but now that I think about it, it probably
// also has the def sometimes too so this will be redundant.
//json.writeMapEntry("componentClass", def.getComponentClass());
json.writeMapEnd();
} catch (QuickFixException e) {
throw new AuraRuntimeException(e);
} finally {
context.setCurrentComponent(oldComponent);
}
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return String.format("%s {%s}", descriptor.toString(), getGlobalId());
}
/**
* instantiates the model
*
* @throws QuickFixException
*/
private void createModel() throws QuickFixException {
AuraContext context = Aura.getContextService().getCurrentContext();
context.pushCallingDescriptor(descriptor);
BaseComponent<?, ?> oldComponent = context.setCurrentComponent(this);
try {
ModelDef modelDef = getComponentDef().getModelDef();
if (modelDef != null) {
Aura.getDefinitionService().getDefRegistry().assertAccess(descriptor, modelDef);
model = modelDef.newInstance();
if (modelDef.hasMembers()) {
hasLocalDependencies = true;
valueProviders.put(AuraValueProviderType.MODEL.getPrefix(), model);
}
}
} finally {
context.setCurrentComponent(oldComponent);
context.popCallingDescriptor();
}
}
/**
* @return the next id to use, the ordering must match exactly what is generated client side
*/
private static String getNextGlobalId() {
AuraContext context = Aura.getContextService().getCurrentContext();
String num = Aura.getContextService().getCurrentContext().getNum();
Action action = context.getCurrentAction();
int id;
String suffix;
if (action != null) {
id = action.getInstanceStack().getNextId();
suffix = action.getId();
} else {
id = context.getNextId();
suffix = num;
}
String globalId = String.valueOf(id);
if (suffix != null) {
globalId = String.format("%s:%s", globalId, suffix);
}
return globalId;
}
@Override
public boolean hasLocalDependencies() {
return hasLocalDependencies;
}
@Override
public Object getValue(PropertyReference expr) throws QuickFixException {
AuraContext context = Aura.getContextService().getCurrentContext();
BaseComponent<?, ?> oldComponent = context.setCurrentComponent(this);
try {
String prefix = expr.getRoot();
if ("c".equals(prefix)) {
prefix.toString();
}
PropertyReference stem = expr.getStem();
Object root = valueProviders.get(prefix);
if (root == null) {
root = context.getGlobalProviders().get(prefix);
}
if (root != null) {
if (stem != null) {
if (root instanceof ValueProvider) {
return ((ValueProvider) root).getValue(stem);
} else {
return JavaModel.getValue(root, stem, null);
// no throw error at runtime even though expression
// reference nothing
// return null;
}
} else {
// they asked for just the root.
// TODO: this should only work for foreach, shouldn't be
// able to {!m}
return root;
}
}
return null;
} finally {
context.setCurrentComponent(oldComponent);
}
}
@Override
public void index(Component component) {
String id = component.getLocalId();
if (!AuraTextUtil.isNullEmptyOrWhitespace(id)) {
List<String> globalIds = index.get(id);
if (globalIds == null) {
globalIds = Lists.newArrayList();
index.put(id, globalIds);
}
globalIds.add(component.getGlobalId());
}
}
@Override
public I getSuper() {
return superComponent;
}
@Override
public Model getModel() {
return model;
}
@Override
public String getPath() {
return path;
}
static private DefDescriptor<TypeDef> componentArrType;
static private DefDescriptor<TypeDef> componentDefRefArrayType;
@Override
public void reinitializeModel() throws QuickFixException {
//
// This is a visitor pattern, implemented here with a hardwire.
//
BaseComponentDef def = descriptor.getDef();
if (componentArrType == null) {
componentArrType = Aura.getDefinitionService().getDefDescriptor("aura://Aura.Component[]", TypeDef.class);
}
if(componentDefRefArrayType == null) {
componentDefRefArrayType = Aura.getDefinitionService().getDefDescriptor("aura://Aura.ComponentDefRef[]", TypeDef.class);
}
createModel();
I zuper = getSuper();
if (zuper != null) {
zuper.reinitializeModel();
}
//
// Walk all attributes, pushing the reinitialize model in to those as well.
//
for (Map.Entry<DefDescriptor<AttributeDef>, AttributeDef> foo : def.getAttributeDefs().entrySet()) {
AttributeDef attr = foo.getValue();
DefDescriptor<?> typeDesc;
if (attr instanceof AttributeDefImpl) {
AttributeDefImpl attri = (AttributeDefImpl) attr;
typeDesc = attri.getTypeDesc();
} else {
// bad.
typeDesc = attr.getTypeDef().getDescriptor();
}
if (componentArrType.equals(typeDesc)) {
Object val = getAttributes().getValue(foo.getKey().getName());
if (val instanceof List) {
@SuppressWarnings("unchecked")
List<BaseComponent<?, ?>> facet = (List<BaseComponent<?, ?>>) val;
for (BaseComponent<?, ?> c : facet) {
c.reinitializeModel();
}
}
}
else if (componentDefRefArrayType.equals(typeDesc)) {
ComponentDefRefArray val = getAttributes().getValue(foo.getKey().getName(), ComponentDefRefArray.class);
if (val != null) {
//@SuppressWarnings("unchecked")
//List<BaseComponent<?, ?>> facet = (List<BaseComponent<?, ?>>) val;
for (Object c : val.getList()) {
if(c instanceof BaseComponent) {
((BaseComponent<?, ?>)c).reinitializeModel();
}
}
}
}
}
}
protected final DefDescriptor<D> originalDescriptor;
protected DefDescriptor<D> descriptor;
protected DefDescriptor<? extends RootDefinition> intfDescriptor;
private final String globalId;
private final String path;
protected String localId;
protected final AttributeSet attributeSet;
private Model model;
protected I superComponent;
protected I concreteComponent;
protected boolean remoteProvider = false;
private final Map<String, List<String>> index = Maps.newLinkedHashMap();
// FIXME - the values should be ValueProviders, but first we need to wrap non-m/v/c providers.
protected final Map<String, Object> valueProviders = new LinkedHashMap<>();
protected boolean hasLocalDependencies = false;
protected boolean hasProvidedAttributes;
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.conscrypt;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.security.cert.CRL;
import java.security.cert.CRLException;
import java.security.cert.CertPath;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactorySpi;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
public class OpenSSLX509CertificateFactory extends CertificateFactorySpi {
private static final byte[] PKCS7_MARKER = "-----BEGIN PKCS7".getBytes();
private static final int PUSHBACK_SIZE = 64;
static class ParsingException extends Exception {
private static final long serialVersionUID = 8390802697728301325L;
public ParsingException(String message) {
super(message);
}
public ParsingException(Exception cause) {
super(cause);
}
public ParsingException(String message, Exception cause) {
super(message, cause);
}
}
/**
* The code for X509 Certificates and CRL is pretty much the same. We use
* this abstract class to share the code between them. This makes it ugly,
* but it's already written in this language anyway.
*/
private static abstract class Parser<T> {
public T generateItem(InputStream inStream) throws ParsingException {
if (inStream == null) {
throw new ParsingException("inStream == null");
}
final boolean markable = inStream.markSupported();
if (markable) {
inStream.mark(PKCS7_MARKER.length);
}
final PushbackInputStream pbis = new PushbackInputStream(inStream, PUSHBACK_SIZE);
try {
final byte[] buffer = new byte[PKCS7_MARKER.length];
final int len = pbis.read(buffer);
if (len < 0) {
/* No need to reset here. The stream was empty or EOF. */
throw new ParsingException("inStream is empty");
}
pbis.unread(buffer, 0, len);
if (buffer[0] == '-') {
if (len == PKCS7_MARKER.length && Arrays.equals(PKCS7_MARKER, buffer)) {
List<? extends T> items = fromPkcs7PemInputStream(pbis);
if (items.size() == 0) {
return null;
}
items.get(0);
} else {
return fromX509PemInputStream(pbis);
}
}
/* PKCS#7 bags have a byte 0x06 at position 4 in the stream. */
if (buffer[4] == 0x06) {
List<? extends T> certs = fromPkcs7DerInputStream(pbis);
if (certs.size() == 0) {
return null;
}
return certs.get(0);
} else {
return fromX509DerInputStream(pbis);
}
} catch (Exception e) {
if (markable) {
try {
inStream.reset();
} catch (IOException ignored) {
}
}
throw new ParsingException(e);
}
}
public Collection<? extends T> generateItems(InputStream inStream)
throws ParsingException {
if (inStream == null) {
throw new ParsingException("inStream == null");
}
try {
if (inStream.available() == 0) {
return Collections.emptyList();
}
} catch (IOException e) {
throw new ParsingException("Problem reading input stream", e);
}
final boolean markable = inStream.markSupported();
if (markable) {
inStream.mark(PUSHBACK_SIZE);
}
/* Attempt to see if this is a PKCS#7 bag. */
final PushbackInputStream pbis = new PushbackInputStream(inStream, PUSHBACK_SIZE);
try {
final byte[] buffer = new byte[PKCS7_MARKER.length];
final int len = pbis.read(buffer);
if (len < 0) {
/* No need to reset here. The stream was empty or EOF. */
throw new ParsingException("inStream is empty");
}
pbis.unread(buffer, 0, len);
if (len == PKCS7_MARKER.length && Arrays.equals(PKCS7_MARKER, buffer)) {
return fromPkcs7PemInputStream(pbis);
}
/* PKCS#7 bags have a byte 0x06 at position 4 in the stream. */
if (buffer[4] == 0x06) {
return fromPkcs7DerInputStream(pbis);
}
} catch (Exception e) {
if (markable) {
try {
inStream.reset();
} catch (IOException ignored) {
}
}
throw new ParsingException(e);
}
/*
* It wasn't, so just try to keep grabbing certificates until we
* can't anymore.
*/
final List<T> coll = new ArrayList<T>();
T c = null;
do {
/*
* If this stream supports marking, try to mark here in case
* there is an error during certificate generation.
*/
if (markable) {
inStream.mark(PUSHBACK_SIZE);
}
try {
c = generateItem(pbis);
coll.add(c);
} catch (ParsingException e) {
/*
* If this stream supports marking, attempt to reset it to
* the mark before the failure.
*/
if (markable) {
try {
inStream.reset();
} catch (IOException ignored) {
}
}
c = null;
}
} while (c != null);
return coll;
}
protected abstract T fromX509PemInputStream(InputStream pbis) throws ParsingException;
protected abstract T fromX509DerInputStream(InputStream pbis) throws ParsingException;
protected abstract List<? extends T> fromPkcs7PemInputStream(InputStream is)
throws ParsingException;
protected abstract List<? extends T> fromPkcs7DerInputStream(InputStream is)
throws ParsingException;
}
private Parser<OpenSSLX509Certificate> certificateParser =
new Parser<OpenSSLX509Certificate>() {
@Override
public OpenSSLX509Certificate fromX509PemInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509Certificate.fromX509PemInputStream(is);
}
@Override
public OpenSSLX509Certificate fromX509DerInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509Certificate.fromX509DerInputStream(is);
}
@Override
public List<? extends OpenSSLX509Certificate>
fromPkcs7PemInputStream(InputStream is) throws ParsingException {
return OpenSSLX509Certificate.fromPkcs7PemInputStream(is);
}
@Override
public List<? extends OpenSSLX509Certificate>
fromPkcs7DerInputStream(InputStream is) throws ParsingException {
return OpenSSLX509Certificate.fromPkcs7DerInputStream(is);
}
};
private Parser<OpenSSLX509CRL> crlParser =
new Parser<OpenSSLX509CRL>() {
@Override
public OpenSSLX509CRL fromX509PemInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509CRL.fromX509PemInputStream(is);
}
@Override
public OpenSSLX509CRL fromX509DerInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509CRL.fromX509DerInputStream(is);
}
@Override
public List<? extends OpenSSLX509CRL> fromPkcs7PemInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509CRL.fromPkcs7PemInputStream(is);
}
@Override
public List<? extends OpenSSLX509CRL> fromPkcs7DerInputStream(InputStream is)
throws ParsingException {
return OpenSSLX509CRL.fromPkcs7DerInputStream(is);
}
};
@Override
public Certificate engineGenerateCertificate(InputStream inStream) throws CertificateException {
try {
return certificateParser.generateItem(inStream);
} catch (ParsingException e) {
throw new CertificateException(e);
}
}
@Override
public Collection<? extends Certificate> engineGenerateCertificates(
InputStream inStream) throws CertificateException {
try {
return certificateParser.generateItems(inStream);
} catch (ParsingException e) {
throw new CertificateException(e);
}
}
@Override
public CRL engineGenerateCRL(InputStream inStream) throws CRLException {
try {
return crlParser.generateItem(inStream);
} catch (ParsingException e) {
throw new CRLException(e);
}
}
@Override
public Collection<? extends CRL> engineGenerateCRLs(InputStream inStream) throws CRLException {
if (inStream == null) {
return Collections.emptyList();
}
try {
return crlParser.generateItems(inStream);
} catch (ParsingException e) {
throw new CRLException(e);
}
}
@Override
public Iterator<String> engineGetCertPathEncodings() {
return OpenSSLX509CertPath.getEncodingsIterator();
}
@Override
public CertPath engineGenerateCertPath(InputStream inStream) throws CertificateException {
return OpenSSLX509CertPath.fromEncoding(inStream);
}
@Override
public CertPath engineGenerateCertPath(InputStream inStream, String encoding)
throws CertificateException {
return OpenSSLX509CertPath.fromEncoding(inStream, encoding);
}
@Override
public CertPath engineGenerateCertPath(List<? extends Certificate> certificates)
throws CertificateException {
final List<X509Certificate> filtered = new ArrayList<X509Certificate>(certificates.size());
for (int i = 0; i < certificates.size(); i++) {
final Certificate c = certificates.get(i);
if (!(c instanceof X509Certificate)) {
throw new CertificateException("Certificate not X.509 type at index " + i);
}
filtered.add((X509Certificate) c);
}
return new OpenSSLX509CertPath(filtered);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.net.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Arrays;
import org.junit.Ignore;
import org.junit.Test;
public class Base64Test {
@Test
public void testBase64() {
final Base64 b64 = new Base64();
assertFalse(b64.isUrlSafe());
}
@Test
public void testBase64Boolean() {
final Base64 b64 = new Base64(true);
assertTrue(b64.isUrlSafe());
assertTrue(Arrays.equals(new byte[]{'\r','\n'}, b64.getLineSeparator()));
}
@Test
public void testBase64Int() {
Base64 b64;
b64 = new Base64(8);
assertFalse(b64.isUrlSafe());
assertEquals(8, b64.getLineLength());
b64 = new Base64(11);
assertEquals(8, b64.getLineLength());
}
@Test
public void testBase64IntByteArray() {
final Base64 b64;
b64 = new Base64(8, new byte[]{});
assertFalse(b64.isUrlSafe());
assertTrue(Arrays.equals(new byte[]{}, b64.getLineSeparator()));
}
@Test
public void testBase64IntByteArrayBoolean() {
Base64 b64;
b64 = new Base64(8, new byte[]{}, false);
assertFalse(b64.isUrlSafe());
b64 = new Base64(8, new byte[]{}, true);
assertTrue(b64.isUrlSafe());
}
@Test @Ignore
public void testDecodeBase64ByteArray() {
fail("Not yet implemented");
}
@Test @Ignore
public void testDecodeBase64String() {
fail("Not yet implemented");
}
@Test @Ignore
public void testDecodeByteArray() {
fail("Not yet implemented");
}
@Test @Ignore
public void testDecodeInteger() {
fail("Not yet implemented");
}
@Test @Ignore
public void testDecodeObject() {
fail("Not yet implemented");
}
@Test @Ignore
public void testDecodeString() {
fail("Not yet implemented");
}
@Test
public void testEncodeBase64ByteArray() {
final byte[] binaryData=null;
assertTrue(Arrays.equals(binaryData, Base64.encodeBase64(binaryData)));
}
@Test @Ignore
public void testEncodeBase64ByteArrayBoolean() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64ByteArrayBooleanBoolean() {
fail("Not yet implemented");
}
@Test
public void testEncodeBase64ByteArrayBooleanBooleanInt() {
final byte[] binaryData = new byte[]{'1','2','3'};
byte[] encoded;
encoded = Base64.encodeBase64(binaryData, false, false);
assertNotNull(encoded);
assertEquals(4, encoded.length);
try {
Base64.encodeBase64(binaryData, false, false, 3);
fail("Expected IllegalArgumentException");
} catch (final IllegalArgumentException expected) {
// expected
}
encoded = Base64.encodeBase64(binaryData, false, false, 4); // NET-483
assertNotNull(encoded);
assertEquals(4, encoded.length);
encoded = Base64.encodeBase64(binaryData, true, false);
assertNotNull(encoded);
assertEquals(6, encoded.length); // always adds trailer
try {
Base64.encodeBase64(binaryData, true, false, 5);
fail("Expected IllegalArgumentException");
} catch (final IllegalArgumentException expected) {
// expected
}
encoded = Base64.encodeBase64(binaryData, true, false, 6);
assertNotNull(encoded);
assertEquals(6, encoded.length);
}
@Test @Ignore
public void testEncodeBase64Chunked() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64StringByteArray() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64StringByteArrayBoolean() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64StringUnChunked() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64URLSafe() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeBase64URLSafeString() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeByteArray() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeInteger() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeObject() {
fail("Not yet implemented");
}
@Test @Ignore
public void testEncodeToString() {
fail("Not yet implemented");
}
@Test
public void testIsArrayByteBase64() {
assertTrue(Base64.isArrayByteBase64(new byte[]{'b',' '}));
assertFalse(Base64.isArrayByteBase64(new byte[]{'?'}));
}
@Test
public void testIsBase64() {
assertTrue(Base64.isBase64((byte)'b'));
assertFalse(Base64.isBase64((byte)' '));
}
@Test @Ignore
public void testToIntegerBytes() {
fail("Not yet implemented");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.property;
import static com.google.common.collect.Sets.newHashSet;
import static java.util.Collections.singleton;
import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.oak.api.CommitFailedException.CONSTRAINT;
import static org.apache.jackrabbit.oak.api.Type.NAME;
import static org.apache.jackrabbit.oak.api.Type.NAMES;
import static org.apache.jackrabbit.oak.commons.PathUtils.concat;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.DECLARING_NODE_TYPES;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_CONTENT_NODE_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.PROPERTY_NAMES;
import static org.apache.jackrabbit.oak.plugins.index.property.PropertyIndex.encode;
import java.util.Collections;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.jcr.PropertyType;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditor;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
import org.apache.jackrabbit.oak.plugins.index.PathFilter;
import org.apache.jackrabbit.oak.plugins.index.property.strategy.IndexStoreStrategy;
import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
import org.apache.jackrabbit.oak.spi.commit.Editor;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.query.PropertyValues;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import com.google.common.base.Predicate;
/**
* Index editor for keeping a property index up to date.
*
* @see PropertyIndex
* @see PropertyIndexLookup
*/
class PropertyIndexEditor implements IndexEditor {
/** Parent editor, or {@code null} if this is the root editor. */
private final PropertyIndexEditor parent;
/** Name of this node, or {@code null} for the root node. */
private final String name;
/** Path of this editor, built lazily in {@link #getPath()}. */
private String path;
/** Index definition node builder */
private final NodeBuilder definition;
/** Root node state */
private final NodeState root;
private final Set<String> propertyNames;
/** Type predicate, or {@code null} if there are no type restrictions */
private final Predicate<NodeState> typePredicate;
/**
* This field is only set for unique indexes. Otherwise it is null.
* Keys to check for uniqueness, or {@code null} for no uniqueness checks.
*/
private final Set<String> keysToCheckForUniqueness;
/**
* Flag to indicate whether the type of this node may have changed.
*/
private boolean typeChanged;
/**
* Matching property value keys from the before state. Lazily initialized.
*/
private Set<String> beforeKeys;
/**
* Matching property value keys from the after state. Lazily initialized.
*/
private Set<String> afterKeys;
private final IndexUpdateCallback updateCallback;
private final PathFilter pathFilter;
private final PathFilter.Result pathFilterResult;
private final MountInfoProvider mountInfoProvider;
public PropertyIndexEditor(NodeBuilder definition, NodeState root,
IndexUpdateCallback updateCallback, MountInfoProvider mountInfoProvider) {
this.parent = null;
this.name = null;
this.path = "/";
this.definition = definition;
this.root = root;
pathFilter = PathFilter.from(definition);
pathFilterResult = getPathFilterResult();
//initPropertyNames(definition);
// get property names
PropertyState names = definition.getProperty(PROPERTY_NAMES);
if (names.count() == 1) {
// OAK-1273: optimize for the common case
this.propertyNames = singleton(names.getValue(NAME, 0));
} else {
this.propertyNames = newHashSet(names.getValue(NAMES));
}
// get declaring types, and all their subtypes
// TODO: should we reindex when type definitions change?
if (definition.hasProperty(DECLARING_NODE_TYPES)) {
this.typePredicate = new TypePredicate(
root, definition.getNames(DECLARING_NODE_TYPES));
} else {
this.typePredicate = null;
}
// keep track of modified keys for uniqueness checks
if (definition.getBoolean(IndexConstants.UNIQUE_PROPERTY_NAME)) {
this.keysToCheckForUniqueness = newHashSet();
} else {
this.keysToCheckForUniqueness = null;
}
this.updateCallback = updateCallback;
this.mountInfoProvider = mountInfoProvider;
}
PropertyIndexEditor(PropertyIndexEditor parent, String name, PathFilter.Result pathFilterResult) {
this.parent = parent;
this.name = name;
this.path = null;
this.definition = parent.definition;
this.root = parent.root;
this.propertyNames = parent.getPropertyNames();
this.typePredicate = parent.typePredicate;
this.keysToCheckForUniqueness = parent.keysToCheckForUniqueness;
this.updateCallback = parent.updateCallback;
this.pathFilter = parent.pathFilter;
this.pathFilterResult = pathFilterResult;
this.mountInfoProvider = parent.mountInfoProvider;
}
/**
* commodity method for allowing extensions
*
* @return the propertyNames
*/
Set<String> getPropertyNames() {
return propertyNames;
}
/**
* Returns the path of this node, building it lazily when first requested.
*/
private String getPath() {
if (path == null) {
path = concat(parent.getPath(), name);
}
return path;
}
/**
* Adds the encoded values of the given property to the given set.
* If the given set is uninitialized, i.e. {@code null}, then a new
* set is created for any values to be added. The set, possibly newly
* initialized, is returned.
*
* @param keys set of encoded values, or {@code null}
* @param property property whose values are to be added to the set
* @return set of encoded values, possibly initialized
*/
private static Set<String> addValueKeys(
Set<String> keys, PropertyState property) {
if (property.getType().tag() != PropertyType.BINARY
&& property.count() > 0) {
if (keys == null) {
keys = newHashSet();
}
keys.addAll(encode(PropertyValues.create(property)));
}
return keys;
}
private static Set<String> getMatchingKeys(
NodeState state, Iterable<String> propertyNames) {
Set<String> keys = null;
for (String propertyName : propertyNames) {
PropertyState property = state.getProperty(propertyName);
if (property != null) {
keys = addValueKeys(keys, property);
}
}
return keys;
}
Set<IndexStoreStrategy> getStrategies(boolean unique) {
return Multiplexers.getStrategies(unique, mountInfoProvider,
definition, INDEX_CONTENT_NODE_NAME);
}
@Override
public void enter(NodeState before, NodeState after) {
// disables property name checks
typeChanged = typePredicate == null;
beforeKeys = null;
afterKeys = null;
}
@Override
public void leave(NodeState before, NodeState after)
throws CommitFailedException {
if (pathFilterResult == PathFilter.Result.INCLUDE) {
applyTypeRestrictions(before, after);
updateIndex(before, after);
}
checkUniquenessConstraints();
}
private void applyTypeRestrictions(NodeState before, NodeState after) {
// apply the type restrictions
if (typePredicate != null) {
if (typeChanged) {
// possible type change, so ignore diff results and
// just load all matching values from both states
beforeKeys = getMatchingKeys(before, getPropertyNames());
afterKeys = getMatchingKeys(after, getPropertyNames());
}
if (beforeKeys != null && !typePredicate.apply(before)) {
// the before state doesn't match the type, so clear its values
beforeKeys = null;
}
if (afterKeys != null && !typePredicate.apply(after)) {
// the after state doesn't match the type, so clear its values
afterKeys = null;
}
}
}
private void updateIndex(NodeState before, NodeState after) throws CommitFailedException {
// if any changes were detected, update the index accordingly
if (beforeKeys != null || afterKeys != null) {
// first make sure that both the before and after sets are non-null
if (beforeKeys == null
|| (typePredicate != null && !typePredicate.apply(before))) {
beforeKeys = newHashSet();
} else if (afterKeys == null) {
afterKeys = newHashSet();
} else {
// both before and after matches found, remove duplicates
Set<String> sharedKeys = newHashSet(beforeKeys);
sharedKeys.retainAll(afterKeys);
beforeKeys.removeAll(sharedKeys);
afterKeys.removeAll(sharedKeys);
}
if (!beforeKeys.isEmpty() || !afterKeys.isEmpty()) {
updateCallback.indexUpdate();
String properties = definition.getString(PROPERTY_NAMES);
boolean uniqueIndex = keysToCheckForUniqueness != null;
for (IndexStoreStrategy strategy : getStrategies(uniqueIndex)) {
NodeBuilder index = definition.child(strategy
.getIndexNodeName());
if (uniqueIndex) {
keysToCheckForUniqueness.addAll(getExistingKeys(
afterKeys, index, strategy));
}
strategy.update(index, getPath(), properties, definition,
beforeKeys, afterKeys);
}
}
}
checkUniquenessConstraints();
}
private void checkUniquenessConstraints() throws CommitFailedException {
if (parent == null) {
// make sure that the index node exist, even with no content
definition.child(INDEX_CONTENT_NODE_NAME);
boolean uniqueIndex = keysToCheckForUniqueness != null;
// check uniqueness constraints when leaving the root
if (uniqueIndex &&
!keysToCheckForUniqueness.isEmpty()) {
NodeState indexMeta = definition.getNodeState();
String failed = getFirstDuplicate(
keysToCheckForUniqueness, indexMeta);
if (failed != null) {
String msg = String.format(
"Uniqueness constraint violated at path [%s] for one of the "
+ "property in %s having value %s",
getPath(), propertyNames, failed);
throw new CommitFailedException(CONSTRAINT, 30, msg);
}
}
}
}
/**
* From a set of keys, get those that already exist in the index.
*
* @param keys the keys
* @param index the index
* @param s the index store strategy
* @return the set of keys that already exist in this unique index
*/
private Set<String> getExistingKeys(Set<String> keys, NodeBuilder index, IndexStoreStrategy s) {
Set<String> existing = null;
for (String key : keys) {
if (s.exists(index, key)) {
if (existing == null) {
existing = newHashSet();
}
existing.add(key);
}
}
if (existing == null) {
existing = Collections.emptySet();
}
return existing;
}
/**
* From a set of keys, get the first that has multiple entries, if any.
*
* @param keys the keys
* @param indexMeta the index configuration
* @return the first duplicate, or null if none was found
*/
private String getFirstDuplicate(Set<String> keys, NodeState indexMeta) {
for (String key : keys) {
long count = 0;
for (IndexStoreStrategy s : getStrategies(true)) {
count += s.count(root, indexMeta, singleton(key), 2);
if (count > 1) {
return key;
}
}
}
return null;
}
private static boolean isTypeProperty(String name) {
return JCR_PRIMARYTYPE.equals(name) || JCR_MIXINTYPES.equals(name);
}
@Override
public void propertyAdded(PropertyState after) {
String name = after.getName();
typeChanged = typeChanged || isTypeProperty(name);
if (getPropertyNames().contains(name)) {
afterKeys = addValueKeys(afterKeys, after);
}
}
@Override
public void propertyChanged(PropertyState before, PropertyState after) {
String name = after.getName();
typeChanged = typeChanged || isTypeProperty(name);
if (getPropertyNames().contains(name)) {
beforeKeys = addValueKeys(beforeKeys, before);
afterKeys = addValueKeys(afterKeys, after);
}
}
@Override
public void propertyDeleted(PropertyState before) {
String name = before.getName();
typeChanged = typeChanged || isTypeProperty(name);
if (getPropertyNames().contains(name)) {
beforeKeys = addValueKeys(beforeKeys, before);
}
}
/**
* Retrieve a new index editor associated with the child node to process
*
* @param parent the index editor related to the parent node
* @param name the name of the child node
* @return an instance of the PropertyIndexEditor
*/
PropertyIndexEditor getChildIndexEditor(@Nonnull PropertyIndexEditor parent, @Nonnull String name, PathFilter.Result filterResult) {
return new PropertyIndexEditor(parent, name, filterResult);
}
@Override
public Editor childNodeAdded(String name, NodeState after) {
PathFilter.Result filterResult = getPathFilterResult(name);
if (filterResult == PathFilter.Result.EXCLUDE) {
return null;
}
return getChildIndexEditor(this, name, filterResult);
}
@Override
public Editor childNodeChanged(
String name, NodeState before, NodeState after) {
PathFilter.Result filterResult = getPathFilterResult(name);
if (filterResult == PathFilter.Result.EXCLUDE) {
return null;
}
return getChildIndexEditor(this, name, filterResult);
}
@Override
public Editor childNodeDeleted(String name, NodeState before) {
PathFilter.Result filterResult = getPathFilterResult(name);
if (filterResult == PathFilter.Result.EXCLUDE) {
return null;
}
return getChildIndexEditor(this, name, filterResult);
}
private PathFilter.Result getPathFilterResult() {
return pathFilter.filter(getPath());
}
private PathFilter.Result getPathFilterResult(String childNodeName) {
return pathFilter.filter(concat(getPath(), childNodeName));
}
}
| |
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
*distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you maynot use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicablelaw or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.pig;
import static org.apache.commons.lang.StringUtils.isEmpty;
import java.io.IOException;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.phoenix.mapreduce.PhoenixInputFormat;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.SchemaType;
import org.apache.phoenix.pig.util.PhoenixPigSchemaUtil;
import org.apache.phoenix.pig.util.QuerySchemaParserFunction;
import org.apache.phoenix.pig.util.TableSchemaParserFunction;
import org.apache.phoenix.pig.util.TypeUtil;
import org.apache.phoenix.pig.writable.PhoenixPigDBWritable;
import org.apache.pig.Expression;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.PigException;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.UDFContext;
import com.google.common.base.Preconditions;
/**
* LoadFunc to load data from HBase using Phoenix .
*
* Example usage:
* a) TABLE
* i) A = load 'hbase://table/HIRES' using
* org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
*
* The above loads the data from a table 'HIRES'
*
* ii) A = load 'hbase://table/HIRES/id,name' using
* org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
*
* Here, only id, name are returned from the table HIRES as part of LOAD.
*
* b) QUERY
* i) B = load 'hbase://query/SELECT fname, lname FROM HIRES' using
* org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
*
* The above loads fname and lname columns from 'HIRES' table.
*
*/
public final class PhoenixHBaseLoader extends LoadFunc implements LoadMetadata {
private static final Log LOG = LogFactory.getLog(PhoenixHBaseLoader.class);
private static final String PHOENIX_TABLE_NAME_SCHEME = "hbase://table/";
private static final String PHOENIX_QUERY_SCHEME = "hbase://query/";
private static final String RESOURCE_SCHEMA_SIGNATURE = "phoenix.pig.schema";
private Configuration config;
private String tableName;
private String selectQuery;
private String zkQuorum ;
private PhoenixInputFormat<PhoenixPigDBWritable> inputFormat;
private RecordReader<NullWritable,PhoenixPigDBWritable> reader;
private String contextSignature;
private ResourceSchema schema;
/**
* @param zkQuorum
*/
public PhoenixHBaseLoader(String zkQuorum) {
super();
Preconditions.checkNotNull(zkQuorum);
Preconditions.checkState(zkQuorum.length() > 0, "Zookeeper quorum cannot be empty!");
this.zkQuorum = zkQuorum;
}
@Override
public void setLocation(String location, Job job) throws IOException {
final Configuration configuration = job.getConfiguration();
//explicitly turning off combining splits.
configuration.setBoolean("pig.noSplitCombination", true);
//to have phoenix working on a secured cluster
TableMapReduceUtil.initCredentials(job);
this.initializePhoenixPigConfiguration(location, configuration);
}
/**
* Initialize PhoenixPigConfiguration if it is null. Called by {@link #setLocation} and {@link #getSchema}
* @param location
* @param configuration
* @throws PigException
*/
private void initializePhoenixPigConfiguration(final String location, final Configuration configuration) throws PigException {
if(this.config != null) {
return;
}
this.config = configuration;
this.config.set(HConstants.ZOOKEEPER_QUORUM,this.zkQuorum);
PhoenixConfigurationUtil.setInputClass(this.config, PhoenixPigDBWritable.class);
Pair<String,String> pair = null;
try {
if (location.startsWith(PHOENIX_TABLE_NAME_SCHEME)) {
String tableSchema = location.substring(PHOENIX_TABLE_NAME_SCHEME.length());
final TableSchemaParserFunction parseFunction = new TableSchemaParserFunction();
pair = parseFunction.apply(tableSchema);
PhoenixConfigurationUtil.setSchemaType(this.config, SchemaType.TABLE);
} else if (location.startsWith(PHOENIX_QUERY_SCHEME)) {
this.selectQuery = location.substring(PHOENIX_QUERY_SCHEME.length());
final QuerySchemaParserFunction queryParseFunction = new QuerySchemaParserFunction(this.config);
pair = queryParseFunction.apply(this.selectQuery);
PhoenixConfigurationUtil.setInputQuery(this.config, this.selectQuery);
PhoenixConfigurationUtil.setSchemaType(this.config, SchemaType.QUERY);
}
this.tableName = pair.getFirst();
final String selectedColumns = pair.getSecond();
if(isEmpty(this.tableName) && isEmpty(this.selectQuery)) {
printUsage(location);
}
PhoenixConfigurationUtil.setInputTableName(this.config, this.tableName);
if(!isEmpty(selectedColumns)) {
PhoenixConfigurationUtil.setSelectColumnNames(this.config, selectedColumns);
}
} catch(IllegalArgumentException iae) {
printUsage(location);
}
}
@Override
public String relativeToAbsolutePath(String location, Path curDir) throws IOException {
return location;
}
@Override
public InputFormat getInputFormat() throws IOException {
if(inputFormat == null) {
inputFormat = new PhoenixInputFormat<PhoenixPigDBWritable>();
PhoenixConfigurationUtil.setInputClass(this.config,PhoenixPigDBWritable.class);
}
return inputFormat;
}
@SuppressWarnings("unchecked")
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
this.reader = reader;
final String resourceSchemaAsStr = getValueFromUDFContext(this.contextSignature,RESOURCE_SCHEMA_SIGNATURE);
if (resourceSchemaAsStr == null) {
throw new IOException("Could not find schema in UDF context");
}
schema = (ResourceSchema)ObjectSerializer.deserialize(resourceSchemaAsStr);
}
/*
* @see org.apache.pig.LoadFunc#setUDFContextSignature(java.lang.String)
*/
@Override
public void setUDFContextSignature(String signature) {
this.contextSignature = signature;
}
@Override
public Tuple getNext() throws IOException {
try {
if(!reader.nextKeyValue()) {
return null;
}
final PhoenixPigDBWritable record = reader.getCurrentValue();
if(record == null) {
return null;
}
final Tuple tuple = TypeUtil.transformToTuple(record,schema.getFields());
return tuple;
} catch (InterruptedException e) {
int errCode = 6018;
final String errMsg = "Error while reading input";
throw new ExecException(errMsg, errCode,PigException.REMOTE_ENVIRONMENT, e);
}
}
private void printUsage(final String location) throws PigException {
String locationErrMsg = String.format("The input location in load statement should be of the form " +
"%s<table name> or %s<query>. Got [%s] ",PHOENIX_TABLE_NAME_SCHEME,PHOENIX_QUERY_SCHEME,location);
LOG.error(locationErrMsg);
throw new PigException(locationErrMsg);
}
@Override
public ResourceSchema getSchema(String location, Job job) throws IOException {
if(schema != null) {
return schema;
}
final Configuration configuration = job.getConfiguration();
this.initializePhoenixPigConfiguration(location, configuration);
this.schema = PhoenixPigSchemaUtil.getResourceSchema(this.config);
if(LOG.isDebugEnabled()) {
LOG.debug(String.format("Resource Schema generated for location [%s] is [%s]", location, schema.toString()));
}
this.storeInUDFContext(this.contextSignature, RESOURCE_SCHEMA_SIGNATURE, ObjectSerializer.serialize(schema));
return schema;
}
@Override
public ResourceStatistics getStatistics(String location, Job job) throws IOException {
// not implemented
return null;
}
@Override
public String[] getPartitionKeys(String location, Job job) throws IOException {
// not implemented
return null;
}
@Override
public void setPartitionFilter(Expression partitionFilter) throws IOException {
// not implemented
}
private void storeInUDFContext(final String signature,final String key,final String value) {
final UDFContext udfContext = UDFContext.getUDFContext();
final Properties props = udfContext.getUDFProperties(this.getClass(), new String[]{signature});
props.put(key, value);
}
private String getValueFromUDFContext(final String signature,final String key) {
final UDFContext udfContext = UDFContext.getUDFContext();
final Properties props = udfContext.getUDFProperties(this.getClass(), new String[]{signature});
return props.getProperty(key);
}
}
| |
/*
* Copyright (C) 2008, 2014 XStream Committers.
* All rights reserved.
*
* The software in this package is published under the terms of the BSD
* style license a copy of which has been included with this distribution in
* the LICENSE.txt file.
*
* Created on 18. November 2008 by Joerg Schaible
*/
package com.thoughtworks.xstream.persistence;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.util.Iterator;
import java.util.Map;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.converters.ConverterLookup;
import com.thoughtworks.xstream.io.StreamException;
import com.thoughtworks.xstream.mapper.Mapper;
/**
* Abstract base class for file based persistence strategies.
*
* @author Guilherme Silveira
* @author Joerg Schaible
* @since 1.3.1
*/
public abstract class AbstractFilePersistenceStrategy<K, V> implements PersistenceStrategy<K, V> {
private final FilenameFilter filter;
private final File baseDirectory;
private final String encoding;
private final transient XStream xstream;
public AbstractFilePersistenceStrategy(final File baseDirectory, final XStream xstream, final String encoding) {
this.baseDirectory = baseDirectory;
this.xstream = xstream;
this.encoding = encoding;
filter = new ValidFilenameFilter();
}
protected ConverterLookup getConverterLookup() {
return xstream.getConverterLookup();
}
protected Mapper getMapper() {
return xstream.getMapper();
}
protected boolean isValid(final File dir, final String name) {
return name.endsWith(".xml");
}
/**
* Given a filename, the unescape method returns the key which originated it.
*
* @param name the filename
* @return the original key
*/
protected abstract K extractKey(String name);
/**
* Given a key, the escape method returns the filename which shall be used.
*
* @param key the key
* @return the desired and escaped filename
*/
protected abstract String getName(Object key);
protected class ValidFilenameFilter implements FilenameFilter {
@Override
public boolean accept(final File dir, final String name) {
return new File(dir, name).isFile() && isValid(dir, name);
}
}
protected class XmlMapEntriesIterator implements Iterator<Map.Entry<K, V>> {
private final File[] files = baseDirectory.listFiles(filter);
private int position = -1;
private File current = null;
@Override
public boolean hasNext() {
return position + 1 < files.length;
}
@Override
public void remove() {
if (current == null) {
throw new IllegalStateException();
}
// removes without loading
current.delete();
}
@Override
public Map.Entry<K, V> next() {
return new Map.Entry<K, V>() {
private final File file = current = files[++position];
private final K key = extractKey(file.getName());
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return readFile(file);
}
@Override
public V setValue(final V value) {
return put(key, value);
}
@Override
public int hashCode() {
final V value = getValue();
return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode());
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof Map.Entry<?, ?>)) {
return false;
}
@SuppressWarnings("unchecked")
final Map.Entry<K, V> e2 = (Map.Entry<K, V>)obj;
final K key2 = e2.getKey();
if (key == null ? key2 == null : key.equals(key2)) {
final V value = getValue();
final V value2 = e2.getValue();
return value == null ? value2 == null : value.equals(value2);
}
return false;
}
};
}
}
private void writeFile(final File file, final Object value) {
try {
final FileOutputStream out = new FileOutputStream(file);
final Writer writer = encoding != null
? new OutputStreamWriter(out, encoding)
: new OutputStreamWriter(out);
try {
xstream.toXML(value, writer);
} finally {
writer.close();
}
} catch (final IOException e) {
throw new StreamException(e);
}
}
private File getFile(final String filename) {
return new File(baseDirectory, filename);
}
private V readFile(final File file) {
try {
final FileInputStream in = new FileInputStream(file);
final Reader reader = encoding != null ? new InputStreamReader(in, encoding) : new InputStreamReader(in);
try {
@SuppressWarnings("unchecked")
final V value = (V)xstream.fromXML(reader);
return value;
} finally {
reader.close();
}
} catch (final FileNotFoundException e) {
// not found... file.exists might generate a sync problem
return null;
} catch (final IOException e) {
throw new StreamException(e);
}
}
@Override
public V put(final K key, final V value) {
final V oldValue = get(key);
final String filename = getName(key);
writeFile(new File(baseDirectory, filename), value);
return oldValue;
}
@Override
public Iterator<Map.Entry<K, V>> iterator() {
return new XmlMapEntriesIterator();
}
@Override
public int size() {
return baseDirectory.list(filter).length;
}
public boolean containsKey(final K key) {
// faster lookup
final File file = getFile(getName(key));
return file.isFile();
}
@Override
public V get(final Object key) {
return readFile(getFile(getName(key)));
}
@Override
public V remove(final Object key) {
// faster lookup
final File file = getFile(getName(key));
V value = null;
if (file.isFile()) {
value = readFile(file);
file.delete();
}
return value;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.License.OperationMode;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackSettings;
import java.util.Arrays;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.license.License.OperationMode.BASIC;
import static org.elasticsearch.license.License.OperationMode.GOLD;
import static org.elasticsearch.license.License.OperationMode.MISSING;
import static org.elasticsearch.license.License.OperationMode.PLATINUM;
import static org.elasticsearch.license.License.OperationMode.STANDARD;
import static org.elasticsearch.license.License.OperationMode.TRIAL;
import static org.hamcrest.Matchers.is;
/**
* Unit tests for the {@link XPackLicenseState}
*/
public class XPackLicenseStateTests extends ESTestCase {
/** Creates a license state with the given license type and active state, and checks the given method returns expected. */
void assertAllowed(OperationMode mode, boolean active, Predicate<XPackLicenseState> predicate, boolean expected) {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(mode, active, null);
assertEquals(expected, predicate.test(licenseState));
}
/**
* Checks the ack message going from the {@code from} license type to {@code to} license type.
* TODO: check the actual messages, not just the number of them! This was copied from previous license tests...
*/
void assertAckMesssages(String feature, OperationMode from, OperationMode to, int expectedMessages) {
String[] gotMessages = XPackLicenseState.ACKNOWLEDGMENT_MESSAGES.get(feature).apply(from, to);
assertEquals(expectedMessages, gotMessages.length);
}
static <T> T randomFrom(T[] values, Predicate<T> filter) {
return randomFrom(Arrays.stream(values).filter(filter).collect(Collectors.toList()));
}
static OperationMode randomMode() {
return randomFrom(OperationMode.values());
}
public static OperationMode randomTrialStandardGoldOrPlatinumMode() {
return randomFrom(TRIAL, STANDARD, GOLD, PLATINUM);
}
public static OperationMode randomTrialOrPlatinumMode() {
return randomFrom(TRIAL, PLATINUM);
}
public static OperationMode randomTrialGoldOrPlatinumMode() {
return randomFrom(TRIAL, GOLD, PLATINUM);
}
public static OperationMode randomTrialBasicStandardGoldOrPlatinumMode() {
return randomFrom(TRIAL, BASIC, STANDARD, GOLD, PLATINUM);
}
public static OperationMode randomBasicStandardOrGold() {
return randomFrom(BASIC, STANDARD, GOLD);
}
public void testSecurityDefaults() {
XPackLicenseState licenseState =
new XPackLicenseState(Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build());
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true));
licenseState = new XPackLicenseState(Settings.EMPTY);
assertSecurityNotAllowed(licenseState);
}
public void testTransportSslDoesNotAutomaticallyEnableSecurityOnTrialLicense() {
final XPackLicenseState licenseState;
licenseState =
new XPackLicenseState(Settings.builder().put(XPackSettings.TRANSPORT_SSL_ENABLED.getKey(), true).build());
assertSecurityNotAllowed(licenseState);
}
public void testSecurityBasicWithoutExplicitSecurityEnabled() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, true, null);
assertThat(licenseState.isAuthAllowed(), is(false));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NONE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(false));
assertThat(licenseState.isApiKeyServiceAllowed(), is(false));
assertThat(licenseState.isSecurityAvailable(), is(true));
assertThat(licenseState.isSecurityDisabledByLicenseDefaults(), is(true));
}
public void testSecurityBasicWithExplicitSecurityEnabled() {
final Settings settings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build();
XPackLicenseState licenseState = new XPackLicenseState(settings);
licenseState.update(BASIC, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NATIVE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(false));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
assertThat(licenseState.isSecurityAvailable(), is(true));
assertThat(licenseState.isSecurityDisabledByLicenseDefaults(), is(false));
}
public void testSecurityDefaultBasicExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, false, null);
assertThat(licenseState.isAuthAllowed(), is(false));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(false));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NONE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(false));
assertThat(licenseState.isApiKeyServiceAllowed(), is(false));
}
public void testSecurityEnabledBasicExpired() {
XPackLicenseState licenseState = new XPackLicenseState(
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build());
licenseState.update(BASIC, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(false));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NATIVE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(false));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
}
public void testSecurityStandard() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(STANDARD, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NATIVE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
}
public void testSecurityStandardExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(STANDARD, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(false));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NATIVE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
}
public void testSecurityGold() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(GOLD, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.DEFAULT));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(true));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
}
public void testSecurityGoldExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(GOLD, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(false));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.DEFAULT));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(true));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
}
public void testSecurityPlatinum() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(PLATINUM, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true));
assertThat(licenseState.isTokenServiceAllowed(), is(true));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
}
public void testSecurityPlatinumExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(PLATINUM, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(false));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
assertThat(licenseState.isTokenServiceAllowed(), is(true));
assertThat(licenseState.isApiKeyServiceAllowed(), is(true));
}
public void testNewTrialDefaultsSecurityOff() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(TRIAL, true, VersionUtils.randomCompatibleVersion(random(), Version.CURRENT));
assertThat(licenseState.isSecurityDisabledByLicenseDefaults(), is(true));
assertSecurityNotAllowed(licenseState);
}
private void assertSecurityNotAllowed(XPackLicenseState licenseState) {
assertThat(licenseState.isAuthAllowed(), is(false));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
assertThat(licenseState.isAuditingAllowed(), is(false));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(false));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.NONE));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
}
public void testSecurityAckBasicToNotGoldOrStandard() {
OperationMode toMode = randomFrom(OperationMode.values(), mode -> mode != GOLD && mode != STANDARD);
assertAckMesssages(XPackField.SECURITY, BASIC, toMode, 0);
}
public void testSecurityAckAnyToTrialOrPlatinum() {
assertAckMesssages(XPackField.SECURITY, randomMode(), randomTrialOrPlatinumMode(), 0);
}
public void testSecurityAckTrialGoldOrPlatinumToBasic() {
assertAckMesssages(XPackField.SECURITY, randomTrialGoldOrPlatinumMode(), BASIC, 7);
}
public void testSecurityAckStandardToBasic() {
assertAckMesssages(XPackField.SECURITY, STANDARD, BASIC, 1);
}
public void testSecurityAckAnyToStandard() {
OperationMode from = randomFrom(BASIC, GOLD, PLATINUM, TRIAL);
assertAckMesssages(XPackField.SECURITY, from, STANDARD, 5);
}
public void testSecurityAckBasicStandardTrialOrPlatinumToGold() {
OperationMode from = randomFrom(BASIC, PLATINUM, TRIAL, STANDARD);
assertAckMesssages(XPackField.SECURITY, from, GOLD, 3);
}
public void testMonitoringAckBasicToAny() {
assertAckMesssages(XPackField.MONITORING, BASIC, randomMode(), 0);
}
public void testMonitoringAckAnyToTrialGoldOrPlatinum() {
assertAckMesssages(XPackField.MONITORING, randomMode(), randomTrialStandardGoldOrPlatinumMode(), 0);
}
public void testMonitoringAckNotBasicToBasic() {
OperationMode from = randomFrom(STANDARD, GOLD, PLATINUM, TRIAL);
assertAckMesssages(XPackField.MONITORING, from, BASIC, 2);
}
public void testMonitoringAllowed() {
assertAllowed(randomMode(), true, XPackLicenseState::isMonitoringAllowed, true);
assertAllowed(randomMode(), false, XPackLicenseState::isMonitoringAllowed, false);
}
public void testMonitoringUpdateRetention() {
assertAllowed(STANDARD, true, XPackLicenseState::isUpdateRetentionAllowed, true);
assertAllowed(GOLD, true, XPackLicenseState::isUpdateRetentionAllowed, true);
assertAllowed(PLATINUM, true, XPackLicenseState::isUpdateRetentionAllowed, true);
assertAllowed(TRIAL, true, XPackLicenseState::isUpdateRetentionAllowed, true);
assertAllowed(BASIC, true, XPackLicenseState::isUpdateRetentionAllowed, false);
assertAllowed(MISSING, false, XPackLicenseState::isUpdateRetentionAllowed, false);
}
public void testWatcherPlatinumGoldTrialStandard() throws Exception {
assertAllowed(TRIAL, true, XPackLicenseState::isWatcherAllowed, true);
assertAllowed(GOLD, true, XPackLicenseState::isWatcherAllowed, true);
assertAllowed(PLATINUM, true, XPackLicenseState::isWatcherAllowed, true);
assertAllowed(STANDARD, true, XPackLicenseState::isWatcherAllowed, true);
}
public void testWatcherBasicLicense() throws Exception {
assertAllowed(BASIC, true, XPackLicenseState::isWatcherAllowed, false);
}
public void testWatcherInactive() {
assertAllowed(BASIC, false, XPackLicenseState::isWatcherAllowed, false);
}
public void testWatcherInactivePlatinumGoldTrial() throws Exception {
assertAllowed(TRIAL, false, XPackLicenseState::isWatcherAllowed, false);
assertAllowed(GOLD, false, XPackLicenseState::isWatcherAllowed, false);
assertAllowed(PLATINUM, false, XPackLicenseState::isWatcherAllowed, false);
assertAllowed(STANDARD, false, XPackLicenseState::isWatcherAllowed, false);
}
public void testGraphPlatinumTrial() throws Exception {
assertAllowed(TRIAL, true, XPackLicenseState::isGraphAllowed, true);
assertAllowed(PLATINUM, true, XPackLicenseState::isGraphAllowed, true);
}
public void testGraphBasic() throws Exception {
assertAllowed(BASIC, true, XPackLicenseState::isGraphAllowed, false);
}
public void testGraphStandard() throws Exception {
assertAllowed(STANDARD, true, XPackLicenseState::isGraphAllowed, false);
}
public void testGraphInactiveBasic() {
assertAllowed(BASIC, false, XPackLicenseState::isGraphAllowed, false);
}
public void testGraphInactivePlatinumTrial() throws Exception {
assertAllowed(TRIAL, false, XPackLicenseState::isMachineLearningAllowed, false);
assertAllowed(PLATINUM, false, XPackLicenseState::isMachineLearningAllowed, false);
}
public void testMachineLearningPlatinumTrial() throws Exception {
assertAllowed(TRIAL, true, XPackLicenseState::isMachineLearningAllowed, true);
assertAllowed(PLATINUM, true, XPackLicenseState::isMachineLearningAllowed, true);
}
public void testMachineLearningBasic() throws Exception {
assertAllowed(BASIC, true, XPackLicenseState::isMachineLearningAllowed, false);
}
public void testMachineLearningStandard() throws Exception {
assertAllowed(STANDARD, true, XPackLicenseState::isMachineLearningAllowed, false);
}
public void testMachineLearningInactiveBasic() {
assertAllowed(BASIC, false, XPackLicenseState::isMachineLearningAllowed, false);
}
public void testMachineLearningInactivePlatinumTrial() throws Exception {
assertAllowed(TRIAL, false, XPackLicenseState::isMachineLearningAllowed, false);
assertAllowed(PLATINUM, false, XPackLicenseState::isMachineLearningAllowed, false);
}
public void testLogstashPlatinumGoldTrialStandard() throws Exception {
assertAllowed(TRIAL, true, XPackLicenseState::isLogstashAllowed, true);
assertAllowed(GOLD, true, XPackLicenseState::isLogstashAllowed, true);
assertAllowed(PLATINUM, true, XPackLicenseState::isLogstashAllowed, true);
assertAllowed(STANDARD, true, XPackLicenseState::isLogstashAllowed, true);
}
public void testLogstashBasicLicense() throws Exception {
assertAllowed(BASIC, true, XPackLicenseState::isLogstashAllowed, false);
}
public void testLogstashInactive() {
assertAllowed(BASIC, false, XPackLicenseState::isLogstashAllowed, false);
assertAllowed(TRIAL, false, XPackLicenseState::isLogstashAllowed, false);
assertAllowed(GOLD, false, XPackLicenseState::isLogstashAllowed, false);
assertAllowed(PLATINUM, false, XPackLicenseState::isLogstashAllowed, false);
assertAllowed(STANDARD, false, XPackLicenseState::isLogstashAllowed, false);
}
public void testSqlDefaults() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(true));
}
public void testSqlBasic() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlBasicExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlStandard() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(STANDARD, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlStandardExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(STANDARD, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlGold() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(GOLD, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlGoldExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(GOLD, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlPlatinum() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(PLATINUM, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(true));
}
public void testSqlPlatinumExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(PLATINUM, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
}
public void testSqlAckAnyToTrialOrPlatinum() {
assertAckMesssages(XPackField.SQL, randomMode(), randomTrialOrPlatinumMode(), 0);
}
public void testSqlAckTrialOrPlatinumToNotTrialOrPlatinum() {
assertAckMesssages(XPackField.SQL, randomTrialOrPlatinumMode(), randomBasicStandardOrGold(), 1);
}
public void testTransformBasic() throws Exception {
assertAllowed(BASIC, true, XPackLicenseState::isTransformAllowed, true);
}
public void testTransformStandard() throws Exception {
assertAllowed(STANDARD, true, XPackLicenseState::isTransformAllowed, true);
}
public void testTransformInactiveBasic() {
assertAllowed(BASIC, false, XPackLicenseState::isTransformAllowed, false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.generation;
import java.io.IOException;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Stack;
import java.util.TimeZone;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.excalibur.source.Source;
import org.apache.excalibur.source.SourceException;
import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.TraversableSource;
import org.apache.regexp.RE;
import org.apache.regexp.RESyntaxException;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.ResourceNotFoundException;
import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.components.source.impl.MultiSourceValidity;
import org.apache.cocoon.components.source.util.SourceUtil;
import org.apache.cocoon.environment.SourceResolver;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* Generates an XML source hierarchy listing from a Traversable Source.
*
* <p>
* The root node of the generated document will normally be a
* <code>collection</code> node and a collection node can contain zero or more
* <code>resource</code> or collection nodes. A resource node has no children.
* Each node will contain the following attributes:
* <blockquote>
* <dl>
* <dt> name
* <dd> the name of the source
* <dt> lastModified
* <dd> the time the source was last modified, measured as the number of
* milliseconds since the epoch (as in java.io.File.lastModified)
* <dt> size
* <dd> the source size, in bytes (as in java.io.File.length)
* <dt> date (optional)
* <dd> the time the source was last modified in human-readable form
* </dl>
* </blockquote>
* <p>
* <b>Configuration options:</b>
* <dl>
* <dt> <i>depth</i> (optional)
* <dd> Sets how deep TraversableGenerator should delve into the
* source hierarchy. If set to 1 (the default), only the starting
* collection's immediate contents will be returned.
* <dt> <i>sort</i> (optional)
* <dd> Sort order in which the nodes are returned. Possible values are
* name, size, time, collection. collection is the same as name,
* except that the collection entries are listed first. System order is
* default.
* <dt> <i>reverse</i> (optional)
* <dd> Reverse the order of the sort
* <dt> <i>dateFormat</i> (optional)
* <dd> Sets the format for the date attribute of each node, as
* described in java.text.SimpleDateFormat. If unset, the default
* format for the current locale will be used.
* <dt> <i>timeZone</i> (optional)
* <dd> Sets the time zone offset ID for the date attribute, as
* described in java.util.TimeZone. If unset, the default
* system time zone will be used.
* <dt> <i>refreshDelay</i> (optional)
* <dd> Sets the delay (in seconds) between checks on the source hierarchy
* for changed content. Defaults to 1 second.
* </dl>
* </p>
*
* @cocoon.sitemap.component.documentation
* Generates an XML source hierarchy listing from a Traversable Source.
* @cocoon.sitemap.component.documentation.caching Yes
*
* @version $Id$
*/
public class TraversableGenerator extends ServiceableGenerator
implements CacheableProcessingComponent {
/** The URI of the namespace of this generator. */
protected static final String URI = "http://apache.org/cocoon/collection/1.0";
/** The namespace prefix for this namespace. */
protected static final String PREFIX = "collection";
/* Node and attribute names */
protected static final String COL_NODE_NAME = "collection";
protected static final String RESOURCE_NODE_NAME = "resource";
protected static final String RES_NAME_ATTR_NAME = "name";
protected static final String URI_ATTR_NAME = "uri";
protected static final String LASTMOD_ATTR_NAME = "lastModified";
protected static final String DATE_ATTR_NAME = "date";
protected static final String SIZE_ATTR_NAME = "size";
/** The validity that is being built */
protected MultiSourceValidity validity;
/**
* Convenience object, so we don't need to create an AttributesImpl for every element.
*/
protected AttributesImpl attributes;
/**
* The cache key needs to be generated for the configuration of this
* generator, so storing the parameters for generateKey().
* Using the member variables after setup() would not work I guess. I don't
* know a way from the regular expressions back to the pattern or at least
* a useful string.
*/
protected List cacheKeyParList;
/**
* The depth parameter determines how deep the TraversableGenerator should delve.
*/
protected int depth;
/**
* The dateFormatter determines into which date format the lastModified
* time should be converted.
* FIXME: SimpleDateFormat is not supported by all locales!
*/
protected SimpleDateFormat dateFormatter;
/** The delay between checks on updates to the source hierarchy. */
protected long refreshDelay;
/**
* The sort parameter determines by which attribute the content of one
* collection should be sorted. Possible values are "name", "size", "time"
* and "collection", where "collection" is the same as "name", except that
* collection entries are listed first.
*/
protected String sort;
/** The reverse parameter reverses the sort order. <code>false</code> is default. */
protected boolean reverse;
/** The regular expression for the root pattern. */
protected RE rootRE;
/** The regular expression for the include pattern. */
protected RE includeRE;
/** The regular expression for the exclude pattern. */
protected RE excludeRE;
/**
* This is only set to true for the requested source specified by the
* <code>src</code> attribute on the generator's configuration.
*/
protected boolean isRequestedSource;
/**
* Set the request parameters. Must be called before the generate method.
*
* @param resolver the SourceResolver object
* @param objectModel a <code>Map</code> containing model object
* @param src the Traversable Source to be XMLized specified as
* <code>src</code> attribute on <map:generate/>
* @param par configuration parameters
*/
public void setup(SourceResolver resolver, Map objectModel, String src, Parameters par)
throws ProcessingException, SAXException, IOException {
if (src == null) {
throw new ProcessingException("No src attribute pointing to a traversable source to be XMLized specified.");
}
super.setup(resolver, objectModel, src, par);
this.cacheKeyParList = new ArrayList();
this.cacheKeyParList.add(src);
this.depth = par.getParameterAsInteger("depth", 1);
this.cacheKeyParList.add(String.valueOf(this.depth));
String dateFormatString = par.getParameter("dateFormat", null);
this.cacheKeyParList.add(dateFormatString);
if (dateFormatString != null) {
String locale = par.getParameter("locale", null);
if (locale != null) {
this.dateFormatter = new SimpleDateFormat(dateFormatString, new Locale(locale, ""));
} else {
this.dateFormatter = new SimpleDateFormat(dateFormatString);
}
} else {
this.dateFormatter = new SimpleDateFormat();
}
String timeZone = par.getParameter("timeZone", null);
if (timeZone != null) {
this.dateFormatter.setTimeZone(TimeZone.getTimeZone(timeZone));
}
this.sort = par.getParameter("sort", "name");
this.cacheKeyParList.add(this.sort);
this.reverse = par.getParameterAsBoolean("reverse", false);
this.cacheKeyParList.add(String.valueOf(this.reverse));
this.refreshDelay = par.getParameterAsLong("refreshDelay", 1L) * 1000L;
this.cacheKeyParList.add(String.valueOf(this.refreshDelay));
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("depth: " + this.depth);
this.getLogger().debug("dateFormat: " + this.dateFormatter.toPattern());
this.getLogger().debug("timeZone: " + timeZone);
this.getLogger().debug("sort: " + this.sort);
this.getLogger().debug("reverse: " + this.reverse);
this.getLogger().debug("refreshDelay: " + this.refreshDelay);
}
String rePattern = null;
try {
rePattern = par.getParameter("root", null);
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("root pattern: " + rePattern);
}
this.cacheKeyParList.add(rePattern);
this.rootRE = (rePattern == null) ? null : new RE(rePattern);
rePattern = par.getParameter("include", null);
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("include pattern: " + rePattern);
}
this.cacheKeyParList.add(rePattern);
this.includeRE = (rePattern == null) ? null : new RE(rePattern);
rePattern = par.getParameter("exclude", null);
if (this.getLogger().isDebugEnabled()) {
this.getLogger().debug("exclude pattern: " + rePattern);
}
this.cacheKeyParList.add(rePattern);
this.excludeRE = (rePattern == null) ? null : new RE(rePattern);
} catch (RESyntaxException rese) {
throw new ProcessingException("Syntax error in regexp pattern '"
+ rePattern + "'", rese);
}
this.isRequestedSource = false;
this.attributes = new AttributesImpl();
}
/* (non-Javadoc)
* @see org.apache.cocoon.caching.CacheableProcessingComponent#getKey()
*/
public Serializable getKey() {
StringBuffer buffer = new StringBuffer();
int len = this.cacheKeyParList.size();
for (int i = 0; i < len; i++) {
buffer.append(this.cacheKeyParList.get(i));
buffer.append(':');
}
return buffer.toString();
}
/**
* Gets the source validity, using a deferred validity object. The validity
* is initially empty since the resources that define it are not known
* before generation has occured. So the returned object is kept by the
* generator and filled with each of the resources that is traversed.
*
* @see org.apache.cocoon.components.source.impl.MultiSourceValidity
*/
public SourceValidity getValidity() {
if (this.validity == null) {
this.validity = new MultiSourceValidity(this.resolver, this.refreshDelay);
}
return this.validity;
}
/**
* Generate XML data.
*
* @throws SAXException if an error occurs while outputting the document
* @throws ProcessingException if something went wrong while traversing
* the source hierarchy
*/
public void generate() throws SAXException, ProcessingException {
Source src = null;
Stack ancestors = null;
try {
src = this.resolver.resolveURI(this.source);
if (!(src instanceof TraversableSource)) {
throw new SourceException(this.source + " is not a traversable source");
}
final TraversableSource inputSource = (TraversableSource) src;
if (!inputSource.exists()) {
throw new ResourceNotFoundException(this.source + " does not exist.");
}
this.contentHandler.startDocument();
this.contentHandler.startPrefixMapping(PREFIX, URI);
ancestors = getAncestors(inputSource);
addAncestorPath(inputSource, ancestors);
this.contentHandler.endPrefixMapping(PREFIX);
this.contentHandler.endDocument();
if (this.validity != null) {
this.validity.close();
}
} catch (SourceException se) {
throw SourceUtil.handle(se);
} catch (IOException ioe) {
throw new ResourceNotFoundException("Could not read collection "
+ this.source, ioe);
} finally {
if (src != null) {
this.resolver.release(src);
}
if (ancestors != null) {
Enumeration enumeration = ancestors.elements();
while (enumeration.hasMoreElements()) {
resolver.release((Source) enumeration.nextElement());
}
}
}
}
/**
* Creates a stack containing the ancestors of a traversable source up to
* specific parent matching the root pattern.
*
* @param source the traversable source whose ancestors shall be retrieved
* @return a Stack containing the ancestors.
*/
protected Stack getAncestors(TraversableSource source) throws IOException {
TraversableSource parent = source;
Stack ancestors = new Stack();
while ((parent != null) && !isRoot(parent)) {
parent = (TraversableSource) parent.getParent();
if (parent != null) {
ancestors.push(parent);
} else {
// no ancestor matched the root pattern
ancestors.clear();
}
}
return ancestors;
}
/**
* Adds recursively the path from the source matched by the root pattern
* down to the requested source.
*
* @param source the requested source.
* @param ancestors the stack of the ancestors.
* @throws SAXException
* @throws ProcessingException
*/
protected void addAncestorPath(TraversableSource source, Stack ancestors)
throws SAXException, ProcessingException {
if (ancestors.empty()) {
this.isRequestedSource = true;
addPath(source, depth);
} else {
startNode(COL_NODE_NAME, (TraversableSource) ancestors.pop());
addAncestorPath(source, ancestors);
endNode(COL_NODE_NAME);
}
}
/**
* Adds a single node to the generated document. If the path is a
* collection and depth is greater than zero, then recursive calls
* are made to add nodes for the collection's children.
*
* @param source the resource/collection to process
* @param depth how deep to scan the collection hierarchy
*
* @throws SAXException if an error occurs while constructing nodes
* @throws ProcessingException if a problem occurs with the source
*/
protected void addPath(TraversableSource source, int depth)
throws SAXException, ProcessingException {
if (source.isCollection()) {
startNode(COL_NODE_NAME, source);
addContent(source);
if (depth > 0) {
Collection contents = null;
try {
contents = source.getChildren();
if (sort.equals("name")) {
Arrays.sort(contents.toArray(), new Comparator() {
public int compare(Object o1, Object o2) {
if (reverse) {
return ((TraversableSource) o2).getName().compareTo(((TraversableSource) o1).getName());
}
return ((TraversableSource) o1).getName().compareTo(((TraversableSource) o2).getName());
}
});
} else if (sort.equals("size")) {
Arrays.sort(contents.toArray(), new Comparator() {
public int compare(Object o1, Object o2) {
if (reverse) {
return new Long(((TraversableSource) o2).getContentLength()).compareTo(new Long(((TraversableSource) o1).getContentLength()));
}
return new Long(((TraversableSource) o1).getContentLength()).compareTo(new Long(((TraversableSource) o2).getContentLength()));
}
});
} else if (sort.equals("lastmodified")) {
Arrays.sort(contents.toArray(), new Comparator() {
public int compare(Object o1, Object o2) {
if (reverse) {
return new Long(((TraversableSource) o2).getLastModified()).compareTo(new Long(((TraversableSource) o1).getLastModified()));
}
return new Long(((TraversableSource) o1).getLastModified()).compareTo(new Long(((TraversableSource) o2).getLastModified()));
}
});
} else if (sort.equals("collection")) {
Arrays.sort(contents.toArray(), new Comparator() {
public int compare(Object o1, Object o2) {
TraversableSource ts1 = (TraversableSource) o1;
TraversableSource ts2 = (TraversableSource) o2;
if (reverse) {
if (ts2.isCollection() && !ts1.isCollection())
return -1;
if (!ts2.isCollection() && ts1.isCollection())
return 1;
return ts2.getName().compareTo(ts1.getName());
}
if (ts2.isCollection() && !ts1.isCollection())
return 1;
if (!ts2.isCollection() && ts1.isCollection())
return -1;
return ts1.getName().compareTo(ts2.getName());
}
});
}
for (int i = 0; i < contents.size(); i++) {
if (isIncluded((TraversableSource) contents.toArray()[i]) && !isExcluded((TraversableSource) contents.toArray()[i])) {
addPath((TraversableSource) contents.toArray()[i], depth - 1);
}
}
} catch (SourceException e) {
throw new ProcessingException("Error adding paths", e);
} finally {
if (contents != null) {
Iterator iter = contents.iterator();
while (iter.hasNext()) {
resolver.release((Source) iter.next());
}
}
}
}
endNode(COL_NODE_NAME);
} else {
if (isIncluded(source) && !isExcluded(source)) {
startNode(RESOURCE_NODE_NAME, source);
addContent(source);
endNode(RESOURCE_NODE_NAME);
}
}
}
/**
* Allow subclasses a chance to generate additional elements within collection and resource
* elements.
*
* @param source the source to generate additional data for.
*/
protected void addContent(TraversableSource source) throws SAXException, ProcessingException {
}
/**
* Begins a named node and calls setNodeAttributes to set its attributes.
*
* @param nodeName the name of the new node
* @param source the source a node with its attributes is added for
*
* @throws SAXException if an error occurs while creating the node
*/
protected void startNode(String nodeName, TraversableSource source)
throws SAXException, ProcessingException {
if (this.validity != null) {
this.validity.addSource(source);
}
setNodeAttributes(source);
super.contentHandler.startElement(URI, nodeName, PREFIX + ':' + nodeName, attributes);
}
/**
* Sets the attributes for a given source. For example attributes for the
* name, the size and the last modification date of the source are added.
*
* @param source the source attributes are added for
*/
protected void setNodeAttributes(TraversableSource source)
throws SAXException, ProcessingException {
long lastModified = source.getLastModified();
attributes.clear();
attributes.addAttribute("", RES_NAME_ATTR_NAME,RES_NAME_ATTR_NAME,
"CDATA", source.getName());
attributes.addAttribute("", URI_ATTR_NAME,URI_ATTR_NAME,
"CDATA", source.getURI());
attributes.addAttribute("", LASTMOD_ATTR_NAME, LASTMOD_ATTR_NAME,
"CDATA", Long.toString(source.getLastModified()));
attributes.addAttribute("", DATE_ATTR_NAME, DATE_ATTR_NAME,
"CDATA", dateFormatter.format(new Date(lastModified)));
attributes.addAttribute("", SIZE_ATTR_NAME, SIZE_ATTR_NAME,
"CDATA", Long.toString(source.getContentLength()));
if (this.isRequestedSource) {
attributes.addAttribute("", "sort", "sort", "CDATA", this.sort);
attributes.addAttribute("", "reverse", "reverse", "CDATA",
String.valueOf(this.reverse));
attributes.addAttribute("", "requested", "requested", "CDATA", "true");
this.isRequestedSource = false;
}
}
/**
* Ends the named node.
*
* @param nodeName the name of the new node
*
* @throws SAXException if an error occurs while closing the node
*/
protected void endNode(String nodeName) throws SAXException {
super.contentHandler.endElement(URI, nodeName, PREFIX + ':' + nodeName);
}
/**
* Determines if a given source is the defined root.
*
* @param source the source to check
*
* @return true if the source is the root or the root pattern is not set,
* false otherwise.
*/
protected boolean isRoot(TraversableSource source) {
return this.rootRE == null || this.rootRE.match(source.getName());
}
/**
* Determines if a given source shall be visible.
*
* @param source the source to check
*
* @return true if the source shall be visible or the include Pattern is not set,
* false otherwise.
*/
protected boolean isIncluded(TraversableSource source) {
return this.includeRE == null || this.includeRE.match(source.getName());
}
/**
* Determines if a given source shall be excluded from viewing.
*
* @param source the source to check
*
* @return false if the given source shall not be excluded or the exclude Pattern is not set,
* true otherwise.
*/
protected boolean isExcluded(TraversableSource source) {
return this.excludeRE != null && this.excludeRE.match(source.getName());
}
/**
* Recycle resources
*/
public void recycle() {
this.cacheKeyParList = null;
this.attributes = null;
this.dateFormatter = null;
this.rootRE = null;
this.includeRE = null;
this.excludeRE = null;
this.validity = null;
super.recycle();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet.metadata;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.databind.KeyDeserializer;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ColumnMetadata;
import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ColumnTypeMetadata;
import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ParquetFileMetadata;
import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ParquetTableMetadataBase;
import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.RowGroupMetadata;
import static org.apache.drill.exec.store.parquet.metadata.MetadataVersion.Constants.V4;
public class Metadata_V4 {
public static class ParquetTableMetadata_v4 extends ParquetTableMetadataBase {
MetadataSummary metadataSummary = new MetadataSummary();
FileMetadata fileMetadata = new FileMetadata();
public ParquetTableMetadata_v4() {}
public ParquetTableMetadata_v4(MetadataSummary metadataSummary) {
this.metadataSummary = metadataSummary;
}
public ParquetTableMetadata_v4(MetadataSummary metadataSummary, FileMetadata fileMetadata) {
this.metadataSummary = metadataSummary;
this.fileMetadata = fileMetadata;
}
public ParquetTableMetadata_v4(String metadataVersion, ParquetTableMetadataBase parquetTableMetadata,
List<ParquetFileMetadata_v4> files, List<Path> directories, String drillVersion, long totalRowCount, boolean allColumnsInteresting) {
this.metadataSummary.metadataVersion = metadataVersion;
this.fileMetadata.files = files;
this.metadataSummary.directories = directories;
this.metadataSummary.columnTypeInfo = ((ParquetTableMetadata_v4) parquetTableMetadata).metadataSummary.columnTypeInfo;
this.metadataSummary.drillVersion = drillVersion;
this.metadataSummary.totalRowCount = totalRowCount;
this.metadataSummary.allColumnsInteresting = allColumnsInteresting;
}
public ColumnTypeMetadata_v4 getColumnTypeInfo(String[] name) {
return metadataSummary.getColumnTypeInfo(name);
}
@Override
public List<Path> getDirectories() {
return metadataSummary.getDirectories();
}
@Override
public List<? extends ParquetFileMetadata> getFiles() {
return fileMetadata.getFiles();
}
@Override
public String getMetadataVersion() {
return metadataSummary.getMetadataVersion();
}
/**
* If directories list and file metadata list contain relative paths, update it to absolute ones
*
* @param baseDir base parent directory
*/
public void updateRelativePaths(String baseDir) {
// update directories paths to absolute ones
this.metadataSummary.directories = MetadataPathUtils.convertToAbsolutePaths(metadataSummary.directories, baseDir);
// update files paths to absolute ones
this.fileMetadata.files = (List<ParquetFileMetadata_v4>) MetadataPathUtils.convertToFilesWithAbsolutePaths(fileMetadata.files, baseDir);
}
@Override
public void assignFiles(List<? extends ParquetFileMetadata> newFiles) {
this.fileMetadata.assignFiles(newFiles);
}
@Override
public boolean hasColumnMetadata() {
return true;
}
@Override
public PrimitiveType.PrimitiveTypeName getPrimitiveType(String[] columnName) {
return getColumnTypeInfo(columnName).primitiveType;
}
@Override
public OriginalType getOriginalType(String[] columnName) {
return getColumnTypeInfo(columnName).originalType;
}
@Override
public Integer getRepetitionLevel(String[] columnName) {
return getColumnTypeInfo(columnName).repetitionLevel;
}
@Override
public Integer getDefinitionLevel(String[] columnName) {
return getColumnTypeInfo(columnName).definitionLevel;
}
@Override
public Integer getScale(String[] columnName) {
return getColumnTypeInfo(columnName).scale;
}
@Override
public Integer getPrecision(String[] columnName) {
return getColumnTypeInfo(columnName).precision;
}
@Override
public boolean isRowGroupPrunable() {
return true;
}
@Override
public ParquetTableMetadataBase clone() {
return new ParquetTableMetadata_v4(metadataSummary, fileMetadata);
}
@Override
public String getDrillVersion() {
return metadataSummary.drillVersion;
}
public MetadataSummary getSummary() {
return metadataSummary;
}
public long getTotalRowCount() {
return metadataSummary.getTotalRowCount();
}
public long getTotalNullCount(String[] columnName) {
return getColumnTypeInfo(columnName).totalNullCount;
}
public boolean isAllColumnsInteresting() {
return metadataSummary.isAllColumnsInteresting();
}
public ConcurrentHashMap<ColumnTypeMetadata_v4.Key, ColumnTypeMetadata_v4> getColumnTypeInfoMap() {
return metadataSummary.columnTypeInfo;
}
@Override
public List<? extends MetadataBase.ColumnTypeMetadata> getColumnTypeInfoList() {
return new ArrayList<>(metadataSummary.columnTypeInfo.values());
}
public void setTotalRowCount(long totalRowCount) {
metadataSummary.setTotalRowCount(totalRowCount);
}
}
/**
* Struct which contains the metadata for a single parquet file
*/
public static class ParquetFileMetadata_v4 extends ParquetFileMetadata {
@JsonProperty
public Path path;
@JsonProperty
public Long length;
@JsonProperty
public List<RowGroupMetadata_v4> rowGroups;
public ParquetFileMetadata_v4() {
}
public ParquetFileMetadata_v4(Path path, Long length, List<RowGroupMetadata_v4> rowGroups) {
this.path = path;
this.length = length;
this.rowGroups = rowGroups;
}
@Override
public String toString() {
return String.format("path: %s rowGroups: %s", path, rowGroups);
}
@JsonIgnore
@Override
public Path getPath() {
return path;
}
@JsonIgnore
@Override
public Long getLength() {
return length;
}
@JsonIgnore
@Override
public List<? extends RowGroupMetadata> getRowGroups() {
return rowGroups;
}
}
/**
* A struct that contains the metadata for a parquet row group
*/
public static class RowGroupMetadata_v4 extends RowGroupMetadata {
@JsonProperty
public Long start;
@JsonProperty
public Long length;
@JsonProperty
public Long rowCount;
@JsonProperty
public Map<String, Float> hostAffinity;
@JsonProperty
public List<ColumnMetadata_v4> columns;
public RowGroupMetadata_v4() {
}
public RowGroupMetadata_v4(Long start, Long length, Long rowCount, Map<String, Float> hostAffinity,
List<ColumnMetadata_v4> columns) {
this.start = start;
this.length = length;
this.rowCount = rowCount;
this.hostAffinity = hostAffinity;
this.columns = columns;
}
@Override
public Long getStart() {
return start;
}
@Override
public Long getLength() {
return length;
}
@Override
public Long getRowCount() {
return rowCount;
}
@Override
public Map<String, Float> getHostAffinity() {
return hostAffinity;
}
@Override
public List<? extends ColumnMetadata> getColumns() {
return columns;
}
}
public static class ColumnTypeMetadata_v4 extends ColumnTypeMetadata {
@JsonProperty
public String[] name;
@JsonProperty
public PrimitiveType.PrimitiveTypeName primitiveType;
@JsonProperty
public OriginalType originalType;
@JsonProperty
public List<OriginalType> parentTypes;
@JsonProperty
public int precision;
@JsonProperty
public int scale;
@JsonProperty
public int repetitionLevel;
@JsonProperty
public int definitionLevel;
@JsonProperty
public long totalNullCount = 0;
@JsonProperty
public boolean isInteresting = false;
// Key to find by name only
@JsonIgnore
private Key key;
public ColumnTypeMetadata_v4() {
}
private ColumnTypeMetadata_v4(Builder builder) {
this.name = builder.name;
this.primitiveType = builder.primitiveType;
this.originalType = builder.originalType;
this.precision = builder.precision;
this.scale = builder.scale;
this.repetitionLevel = builder.repetitionLevel;
this.definitionLevel = builder.definitionLevel;
this.key = new Key(name);
this.totalNullCount = builder.totalNullCount;
this.isInteresting = builder.isInteresting;
this.parentTypes = Collections.unmodifiableList(builder.parentTypes);
}
@JsonIgnore
private Key key() {
return this.key;
}
public static class Key {
private SchemaPath name;
private int hashCode = 0;
public Key(String[] name) {
this.name = SchemaPath.getCompoundPath(name);
}
public Key(SchemaPath name) {
this.name = new SchemaPath(name);
}
@Override
public int hashCode() {
if (hashCode == 0) {
hashCode = name.hashCode();
}
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Key other = (Key) obj;
return this.name.equals(other.name);
}
@Override
public String toString() {
return name.toString();
}
public static class DeSerializer extends KeyDeserializer {
public DeSerializer() {
}
@Override
public Object deserializeKey(String key, com.fasterxml.jackson.databind.DeserializationContext ctxt) {
// key string should contain '`' char if the field was serialized as SchemaPath object
if (key.contains("`")) {
return new Key(SchemaPath.parseFromString(key));
}
return new Key(key.split("\\."));
}
}
}
@JsonIgnore
@Override
public PrimitiveType.PrimitiveTypeName getPrimitiveType() {
return primitiveType;
}
@JsonIgnore
@Override
public String[] getName() {
return name;
}
public static class Builder {
private String[] name;
private PrimitiveType.PrimitiveTypeName primitiveType;
private OriginalType originalType;
private List<OriginalType> parentTypes;
private int precision;
private int scale;
private int repetitionLevel;
private int definitionLevel;
private long totalNullCount;
private boolean isInteresting;
public Builder name(String[] name) {
this.name = name;
return this;
}
public Builder primitiveType(PrimitiveType.PrimitiveTypeName primitiveType) {
this.primitiveType = primitiveType;
return this;
}
public Builder originalType(OriginalType originalType) {
this.originalType = originalType;
return this;
}
public Builder parentTypes(List<OriginalType> parentTypes) {
this.parentTypes = parentTypes;
return this;
}
public Builder precision(int precision) {
this.precision = precision;
return this;
}
public Builder scale(int scale) {
this.scale = scale;
return this;
}
public Builder repetitionLevel(int repetitionLevel) {
this.repetitionLevel = repetitionLevel;
return this;
}
public Builder definitionLevel(int definitionLevel) {
this.definitionLevel = definitionLevel;
return this;
}
public Builder totalNullCount(long totalNullCount) {
this.totalNullCount = totalNullCount;
return this;
}
public Builder interesting(boolean isInteresting) {
this.isInteresting = isInteresting;
return this;
}
public ColumnTypeMetadata_v4 build() {
return new ColumnTypeMetadata_v4(this);
}
}
}
/**
* A struct that contains the metadata for a column in a parquet file.
* Note: Since the structure of column metadata hasn't changes from v3, ColumnMetadata_v4 extends ColumnMetadata_v3
*/
public static class ColumnMetadata_v4 extends Metadata_V3.ColumnMetadata_v3 {
public ColumnMetadata_v4() {
}
public ColumnMetadata_v4(String[] name, PrimitiveType.PrimitiveTypeName primitiveType, Object minValue, Object maxValue, Long nulls) {
super(name, primitiveType, minValue, maxValue, nulls);
}
}
@JsonTypeName(V4)
public static class MetadataSummary {
@JsonProperty(value = "metadata_version")
private String metadataVersion;
/*
ColumnTypeInfo is schema information from all the files and row groups, merged into
one. To get this info, we pass the ParquetTableMetadata object all the way down to the
RowGroup and the column type is built there as it is read from the footer.
*/
@JsonProperty
ConcurrentHashMap<ColumnTypeMetadata_v4.Key, ColumnTypeMetadata_v4> columnTypeInfo = new ConcurrentHashMap<>();
@JsonProperty
List<Path> directories;
@JsonProperty
String drillVersion;
@JsonProperty
long totalRowCount = 0;
@JsonProperty
boolean allColumnsInteresting = false;
public MetadataSummary() {
}
public MetadataSummary(String metadataVersion, String drillVersion, boolean allColumnsInteresting) {
this(metadataVersion, drillVersion, new ArrayList<>(), allColumnsInteresting);
}
public MetadataSummary(String metadataVersion, String drillVersion, List<Path> directories, boolean allColumnsInteresting) {
this.metadataVersion = metadataVersion;
this.drillVersion = drillVersion;
this.directories = directories;
this.allColumnsInteresting = allColumnsInteresting;
}
@JsonIgnore
public ColumnTypeMetadata_v4 getColumnTypeInfo(String[] name) {
return columnTypeInfo.get(new ColumnTypeMetadata_v4.Key(name));
}
@JsonIgnore
public ColumnTypeMetadata_v4 getColumnTypeInfo(ColumnTypeMetadata_v4.Key key) {
return columnTypeInfo.get(key);
}
@JsonIgnore
public List<Path> getDirectories() {
return directories;
}
@JsonIgnore
public String getMetadataVersion() {
return metadataVersion;
}
@JsonIgnore
public boolean isAllColumnsInteresting() {
return allColumnsInteresting;
}
@JsonIgnore
public void setAllColumnsInteresting(boolean allColumnsInteresting) {
this.allColumnsInteresting = allColumnsInteresting;
}
@JsonIgnore
public void setTotalRowCount(Long totalRowCount) {
this.totalRowCount = totalRowCount;
}
@JsonIgnore
public Long getTotalRowCount() {
return this.totalRowCount;
}
}
/*
* A struct that holds list of file metadata in a directory
*/
public static class FileMetadata {
@JsonProperty
List<ParquetFileMetadata_v4> files;
public FileMetadata() {
}
@JsonIgnore
public List<ParquetFileMetadata_v4> getFiles() {
return files;
}
@JsonIgnore
public void assignFiles(List<? extends ParquetFileMetadata> newFiles) {
this.files = (List<ParquetFileMetadata_v4>) newFiles;
}
}
/*
* A struct that holds file metadata and row count and null count of a single file
*/
public static class ParquetFileAndRowCountMetadata {
ParquetFileMetadata_v4 fileMetadata;
Map<ColumnTypeMetadata_v4.Key, Long> totalNullCountMap;
long fileRowCount;
public ParquetFileAndRowCountMetadata() {
}
public ParquetFileAndRowCountMetadata(ParquetFileMetadata_v4 fileMetadata, Map<ColumnTypeMetadata_v4.Key, Long> totalNullCountMap, long fileRowCount) {
this.fileMetadata = fileMetadata;
this.totalNullCountMap = totalNullCountMap;
this.fileRowCount = fileRowCount;
}
public ParquetFileMetadata_v4 getFileMetadata() {
return this.fileMetadata;
}
public long getFileRowCount() {
return this.fileRowCount;
}
public Map<ColumnTypeMetadata_v4.Key, Long> getTotalNullCountMap() {
return totalNullCountMap;
}
}
}
| |
/* Spelling of numbers in modern English,
spoken in USA, Great Britain, Australia,
parts of Canada, former English colonies
and throughout the world as "lingua franca"
@(#) $Id: EngSpeller.java 820 2011-11-07 21:59:07Z gfis $
2016-01-18: set m3="NO_LIARDS" for US counting: million, billion ...
2011-10-26: spellClock
2009-11-24: spellGreeting
2006-01-06: super()
2005-07-23: with word parsing
2005-06-01, Georg Fischer
*/
/*
* Copyright 2006 Dr. Georg Fischer <punctum at punctum dot kom>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teherba.numword;
import org.teherba.numword.BaseSpeller;
/**
* Spells numbers in English
* @author Dr. Georg Fischer
*/
public class EngSpeller extends BaseSpeller {
public final static String CVSID = "@(#) $Id: EngSpeller.java 820 2011-11-07 21:59:07Z gfis $";
/**
* Constructor
*/
public EngSpeller() {
super();
setIso639("eng,en");
setDescription("English");
setMaxLog(false);
setSeparator(true);
wordN = new String[]
{ "zero"
, "one"
, "two"
, "three"
, "four"
, "five"
, "six"
, "seven"
, "eight"
, "nine"
};
wordN0 = new String[]
{ ""
, "ten"
, "twenty"
, "thirty"
, "forty"
, "fifty"
, "sixty"
, "seventy"
, "eighty"
, "ninety"
};
word1N = new String[]
{ "ten"
, "eleven"
, "twelve"
, "thirteen"
, "fourteen"
, "fifteen"
, "sixteen"
, "seventeen"
, "eighteen"
, "nineteen"
};
setMorphem("h1", "hundred");
setMorphem("t1", "thousand");
setMorphem("m1", "lion");
setMorphem("m2", "lions");
setMorphem("m3", "NO_LIARDS"); // US counting: millions, billions ... (thanks to Nino Svonja <nino@lumanetix.com>)
setMorphem("p0", " ");
setMorphem("p1", "-");
setMorphem("p2", "s");
setMorphem("p3", "and");
enumerateMorphems();
} // Constructor
/**
* Appends the wording for a triple of digits,
* plus the remaining power of 1000
* @param number the remaining part of the whole number
*/
public void spellTuple(String number) {
// hundreds
switch (digitN00) {
case 0:
break;
default:
spellN(digitN00);
putMorphem("h1");
if (digitN0 != 0 || digitN != 0) {
putMorphem("p3");
}
break;
} // switch 100
// tens and ones
switch (digitN0) {
case 0:
if (nullOnly) {
spellN(0); // lonely 0
}
else
if (digitN > 0) {
spellN(digitN);
}
break;
case 1:
spell1N(digitN);
break;
default:
spellN0(digitN0);
if (digitN >= 1) {
append(getMorphem("p1")); // "-"
append(wordN[digitN]);
}
break;
} // switch digitN0
if (! zeroTuple) { // append thousand, million ... */
switch (logTuple) {
case 0: // no thousands
break;
case 1:
putMorphem("t1");
break;
default:
spellN000(logTuple);
append(getMorphem("m1")); // lion
if (! singleTuple) {
append(getMorphem("p2")); // two million"s"
}
break;
} // switch logTuple
} // thousands ...
} // spellTuple
/**
* Returns the month's name
* @param month month's number, >= 1 and <= 12
* @return word denoting the month
*/
public String spellMonth(int month) {
String result = Integer.toString(month);
if (month >= 0 && month <= 12) {
result = (new String []
{ "Month"
, "January"
, "February"
, "March"
, "April"
, "May"
, "June"
, "July"
, "August"
, "September"
, "October"
, "November"
, "December"
})[month];
}
return result;
} // SpellMonth
/**
* Returns the season's name
* @param season number of the quarter in the year:
* 1 -> Spring, 2 -> Summer, 3 -> Autumn, 4 = Winter
* @return word denoting the season
*/
public String spellSeason(int season) {
String result = Integer.toString(season);
if (season >= 0 && season <= 4) {
result = (new String []
{ "Season"
, "Spring"
, "Summer"
, "Autumn"
, "Winter"
})[season];
}
return result;
} // spellSeason
/**
* Returns the week day's name
* @param weekDay number of day in week, >= 0 and <= 7,
* 1 -> Monday, 7 (and 0) -> Sunday
* @return word denoting the day in the week
*/
public String spellWeekDay(int weekDay) {
String result = Integer.toString(weekDay);
if (weekDay >= 0 && weekDay <= 7) {
result = (new String []
{ "Weekday"
, "Monday"
, "Tuesday"
, "Wednesday"
, "Thursday"
, "Friday"
, "Saturday"
, "Sunday"
})[weekDay];
}
return result;
} // spellWeekDay
//================================================================
/** Returns a denotation of the day's time, possibly in several variants
* @param hour hour 0..24
* @param minute minute 0..59
* @param variant the code behind the commandline option "-h":
* empty or 0 (official), 1,2,3 for a language specific variant.
* @return phrase corresponding to the denotation of the time, for example
* <ul>
* <li>English, variant "1": 18:15 => "quarter past six"</li>
* </ul>
*/
public String spellClock(int hour, int minute, String variant) {
String result = String.valueOf(hour + 100).substring(1) + ':'
+ String.valueOf(minute + 100).substring(1);
int hour12 = hour + (minute > 30 ? 1 : 0);
hour12 = (hour12 == 0 ? 12 : (hour12 >= 13 ? hour12 - 12 : hour12));
String spellHour = spellCardinal(String.valueOf(hour12));
if (false) {
} else if (variant.length() == 0 || variant.equals("0")) {
result = spellCardinal(String.valueOf(hour)).replace("zero", "twelve")
+ " o'clock ";
if (minute > 0) {
result += " " +spellCardinal(String.valueOf(minute));
}
} else if (variant.equals("1")) {
if (minute % 15 == 0) {
switch (minute / 15) {
default:
case 0:
if (hour == 24) {
result = "midnight";
} else {
result = spellHour + " o'clock";
}
break;
case 1:
result = "quarter past " + spellHour;
break;
case 2:
result = "half past " + spellHour;
break;
case 3:
result = "quarter to " + spellHour;
break;
} // switch 0..3
} else {
result = (minute < 30 ? spellCardinal(String.valueOf( minute)) + " past "
: spellCardinal(String.valueOf(60 - minute)) + " to ")
+ spellHour;
}
} // switch variant
return result;
} // spellClock(3)
//================================================================
/** Get a word for one the 4 cardinal directions,
* and for the particle for 32th fractions
* @param cardDir a cardinal direction, 0 = North, 1 = East, 2 = South, 3 = West
*/
protected String getCompassWord(int cardDir) {
String result = "";
switch (cardDir) {
case 0: result = "north" ; break;
case 1: result = "east" ; break;
case 2: result = "south" ; break;
case 3: result = "west" ; break;
case 4: result = "to" ; break;
} // switch
return result;
} // getCompassWord
//================================================================
/** Returns a greeting corresponding to the parameter time:
* @return greeting corresponding to the time of the day
*/
public String spellGreeting(int timeOfDay) {
String result = "Hello";
timeOfDay /= 6;
if (timeOfDay >= 0 && timeOfDay <= 4) {
result = (new String[]
{ "Good bye"
, "Good morning"
, "Hello"
, "Good evening"
, "Good night"
}
)[timeOfDay];
} // in range
return result;
} // spellGreeting(int)
//================================================================
/** Returns a planet
* @param planet number of the planet (3 = earth, 0 = sun, -1 = moon)
* @return planet's name
*/
public String spellPlanet(int planet) {
String result = super.spellPlanet(planet);
switch (planet) {
case -1: result = "Moon"; break;
case 0: result = "Sun"; break;
case 1: result = "Mercury"; break;
case 3: result = "Earth"; break;
} // switch
return result;
} // spellPlanet(int)
//================================================================
} // EngSpeller
| |
package com.suscipio_solutions.consecro_mud.Abilities.Fighter;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CharStats;
import com.suscipio_solutions.consecro_mud.Common.interfaces.PhyStats;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.Races.interfaces.Race;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
@SuppressWarnings("rawtypes")
public class Fighter_Pin extends FighterSkill
{
@Override public String ID() { return "Fighter_Pin"; }
private final static String localizedName = CMLib.lang().L("Pin");
@Override public String name() { return localizedName; }
@Override
public String displayText()
{
if(affected==invoker)
return "(Pinning)";
return "(Pinned)";
}
private static final String[] triggerStrings =I(new String[] {"PIN"});
@Override public int abstractQuality(){return Ability.QUALITY_MALICIOUS;}
@Override public String[] triggerStrings(){return triggerStrings;}
@Override protected int canAffectCode(){return 0;}
@Override protected int canTargetCode(){return Ability.CAN_MOBS;}
@Override public int classificationCode(){return Ability.ACODE_SKILL|Ability.DOMAIN_GRAPPLING;}
@Override public long flags(){return Ability.FLAG_BINDING;}
@Override public int usageType(){return USAGE_MOVEMENT;}
protected MOB pairedWith=null;
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!(affected instanceof MOB))
return true;
final MOB mob=(MOB)affected;
if((msg.sourceMinor() == CMMsg.TYP_DEATH)&&(pairedWith != null)&&(msg.amISource(pairedWith)))
{
unInvoke();
return super.okMessage(myHost, msg);
}
// when this spell is on a MOBs Affected list,
// it should consistantly prevent the mob
// from trying to do ANYTHING except sleep
if((msg.amISource(mob))&&(!msg.sourceMajor(CMMsg.MASK_ALWAYS)))
{
if((msg.sourceMajor(CMMsg.MASK_EYES))
||(msg.sourceMajor(CMMsg.MASK_HANDS))
||(msg.sourceMajor(CMMsg.MASK_MOUTH))
||(msg.sourceMajor(CMMsg.MASK_MOVE)))
{
if(msg.sourceMessage()!=null)
mob.tell(L("You are pinned!"));
return false;
}
}
return super.okMessage(myHost,msg);
}
@Override
public void affectPhyStats(Physical affected, PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
// when this spell is on a MOBs Affected list,
// it should consistantly put the mob into
// a sleeping state, so that nothing they do
// can get them out of it.
affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_MOVE);
affectableStats.setDisposition(affectableStats.disposition()|PhyStats.IS_SITTING);
}
@Override
public int castingQuality(MOB mob, Physical target)
{
if((mob!=null)&&(target!=null))
{
if(mob.isInCombat()&&(mob.rangeToTarget()>0))
return Ability.QUALITY_INDIFFERENT;
if((target instanceof MOB)&&(mob.baseWeight()<(((MOB)target).baseWeight()-200)))
return Ability.QUALITY_INDIFFERENT;
}
return super.castingQuality(mob,target);
}
@Override
public void unInvoke()
{
// undo the affects of this spell
if(!(affected instanceof MOB))
return;
final MOB mob=(MOB)affected;
super.unInvoke();
if(canBeUninvoked())
{
if((!mob.amDead())&&(CMLib.flags().isInTheGame(mob,false)))
{
if(mob==invoker)
{
if(mob.location()!=null)
mob.location().show(mob,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> release(s) <S-HIS-HER> pin."));
else
mob.tell(L("You release your pin."));
}
else
{
if(mob.location()!=null)
mob.location().show(mob,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> <S-IS-ARE> released from the pin"));
else
mob.tell(L("You are released from the pin."));
}
CMLib.commands().postStand(mob,true);
}
}
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
final MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null) return false;
if(mob.isInCombat()&&(mob.rangeToTarget()>0))
{
mob.tell(L("You are too far away from your target to pin them!"));
return false;
}
if((!auto)&&(mob.baseWeight()<(target.baseWeight()-200)))
{
mob.tell(L("@x1 is too big to pin!",target.name(mob)));
return false;
}
// the invoke method for spells receives as
// parameters the invoker, and the REMAINING
// command line parameters, divided into words,
// and added as String objects to a vector.
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
int levelDiff=target.phyStats().level()-(mob.phyStats().level()+(2*super.getXLEVELLevel(mob)));
if(levelDiff>0)
levelDiff=levelDiff*10;
else
levelDiff=0;
// now see if it worked
final boolean hit=(auto)||CMLib.combat().rollToHit(mob,target);
boolean success=proficiencyCheck(mob,(-levelDiff)+(-(((target.charStats().getStat(CharStats.STAT_STRENGTH)-mob.charStats().getStat(CharStats.STAT_STRENGTH))*5))),auto)&&(hit);
success=success&&(target.charStats().getBodyPart(Race.BODY_LEG)>0);
if(success)
{
// it worked, so build a copy of this ability,
// and add it to the affects list of the
// affected MOB. Then tell everyone else
// what happened.
invoker=mob;
final CMMsg msg=CMClass.getMsg(mob,target,this,CMMsg.MSK_MALICIOUS_MOVE|CMMsg.TYP_JUSTICE|(auto?CMMsg.MASK_ALWAYS:0),auto?L("<T-NAME> get(s) pinned!"):L("^F^<FIGHT^><S-NAME> pin(s) <T-NAMESELF> to the floor!^</FIGHT^>^?"));
CMLib.color().fixSourceFightColor(msg);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(msg.value()<=0)
{
success=maliciousAffect(mob,target,asLevel,5,-1)!=null;
success=maliciousAffect(mob,mob,asLevel,5,-1)!=null;
Fighter_Pin targetPin = (Fighter_Pin)target.fetchEffect(ID());
Fighter_Pin sourcePin = (Fighter_Pin)mob.fetchEffect(ID());
if((targetPin != null) && (sourcePin == null))
{ targetPin.unInvoke(); targetPin = null;}
if((sourcePin != null) && (targetPin == null))
{ sourcePin.unInvoke(); sourcePin = null;}
if(sourcePin != null)
sourcePin.pairedWith = target;
if(targetPin != null)
targetPin.pairedWith = mob;
}
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> attempt(s) to pin <T-NAMESELF>, but fail(s)."));
// return whether it worked
return success;
}
}
| |
/*
* Copyright (c) 2001, 2003, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4453053
* @summary If a server shuts down correctly during handshaking, the client doesn't see it.
* @author Brad Wetmore
*/
import java.io.*;
import java.net.*;
import java.security.*;
import javax.net.ssl.*;
public class NullCerts {
/*
* =============================================================
* Set the various variables needed for the tests, then
* specify what tests to run on each side.
*/
/*
* Should we run the client or server in a separate thread?
* Both sides can throw exceptions, but do you have a preference
* as to which side should be the main thread.
*/
private static boolean separateServerThread = true;
/*
* Where do we find the keystores?
*/
// private final static String pathToStores = "./etc";
private final static String pathToStores = "../../../../../../../etc";
private final static String keyStoreFile = "keystore";
private final static String trustStoreFile = "truststore";
private final static String passwd = "passphrase";
private final static char[] cpasswd = "passphrase".toCharArray();
/*
* Is the server ready to serve?
*/
volatile static boolean serverReady = false;
/*
* Turn on SSL debugging?
*/
private final static boolean DEBUG = false;
/*
* If the client or server is doing some kind of object creation
* that the other side depends on, and that thread prematurely
* exits, you may experience a hang. The test harness will
* terminate all hung threads after its timeout has expired,
* currently 3 minutes by default, but you might try to be
* smart about it....
*/
/*
* Define the server side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
private void doServerSide() throws Exception {
SSLServerSocketFactory sslssf =
(SSLServerSocketFactory) SSLServerSocketFactory.getDefault();
SSLServerSocket sslServerSocket =
(SSLServerSocket) sslssf.createServerSocket(serverPort, 3);
sslServerSocket.setNeedClientAuth(true);
serverPort = sslServerSocket.getLocalPort();
/*
* Signal Client, we're ready for his connect.
*/
serverReady = true;
SSLSocket sslSocket = (SSLSocket) sslServerSocket.accept();
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
try {
sslIS.read();
sslOS.write(85);
sslOS.flush();
} catch (SSLHandshakeException e) {
System.out.println(
"Should see a null cert chain exception for server: "
+ e.toString());
}
sslSocket.close();
System.out.println("Server done and exiting!");
}
/*
* Define the client side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
private void doClientSide() throws Exception {
/*
* Wait for server to get started.
*/
while (!serverReady) {
Thread.sleep(50);
}
System.out.println("Starting test");
KeyStore ks = KeyStore.getInstance("JKS");
KeyStore uks = KeyStore.getInstance("JKS");
SSLContext ctx = SSLContext.getInstance("TLS");
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
uks.load(new FileInputStream(unknownFilename), cpasswd);
kmf.init(uks, cpasswd);
ks.load(new FileInputStream(trustFilename), cpasswd);
tmf.init(ks);
ctx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
SSLSocketFactory sslsf =
(SSLSocketFactory) ctx.getSocketFactory();
SSLSocket sslSocket = (SSLSocket)
sslsf.createSocket("localhost", serverPort);
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
try {
sslOS.write(280);
sslOS.flush();
sslIS.read();
sslSocket.close();
} catch (IOException e) {
String str =
"\nYou will either see a bad_certificate SSLException\n" +
"or an IOException if the server shutdown while the\n" +
"client was still sending the remainder of its \n" +
"handshake data.";
System.out.println(str + e.toString());
}
}
/*
* =============================================================
* The remainder is just support stuff
*/
// use any free port by default
volatile int serverPort = 0;
private volatile Exception serverException = null;
private volatile Exception clientException = null;
private final static String keyFilename =
System.getProperty("test.src", ".") + "/" + pathToStores +
"/" + keyStoreFile;
private final static String trustFilename =
System.getProperty("test.src", ".") + "/" + pathToStores +
"/" + trustStoreFile;
private final static String unknownFilename =
System.getProperty("test.src", ".") + "/" + pathToStores +
"/" + "unknown_keystore";
// Used for running test standalone
public static void main(String[] args) throws Exception {
String testRoot = System.getProperty("test.src", ".");
System.setProperty("javax.net.ssl.keyStore", keyFilename);
System.setProperty("javax.net.ssl.keyStorePassword", passwd);
System.setProperty("javax.net.ssl.trustStore", trustFilename);
System.setProperty("javax.net.ssl.trustStorePassword", passwd);
if (DEBUG)
System.setProperty("javax.net.debug", "all");
/*
* Start the tests.
*/
new NullCerts();
}
private Thread clientThread = null;
private Thread serverThread = null;
/*
* Primary constructor, used to drive remainder of the test.
*
* Fork off the other side, then do your work.
*/
NullCerts() throws Exception {
if (separateServerThread) {
startServer(true);
startClient(false);
} else {
startClient(true);
startServer(false);
}
/*
* Wait for other side to close down.
*/
if (separateServerThread) {
serverThread.join();
} else {
clientThread.join();
}
/*
* When we get here, the test is pretty much over.
*
* If the main thread excepted, that propagates back
* immediately. If the other thread threw an exception, we
* should report back.
*/
if (serverException != null) {
System.err.print("Server Exception:");
throw serverException;
}
if (clientException != null) {
System.err.print("Client Exception:");
throw clientException;
}
}
private void startServer(boolean newThread) throws Exception {
if (newThread) {
serverThread = new Thread() {
public void run() {
try {
doServerSide();
} catch (Exception e) {
/*
* Our server thread just died.
*
* Release the client, if not active already...
*/
System.err.println("Server died...");
serverReady = true;
serverException = e;
}
}
};
serverThread.start();
} else {
doServerSide();
}
}
private void startClient(boolean newThread) throws Exception {
if (newThread) {
clientThread = new Thread() {
public void run() {
try {
doClientSide();
} catch (Exception e) {
/*
* Our client thread just died.
*/
System.err.println("Client died...");
clientException = e;
}
}
};
clientThread.start();
} else {
doClientSide();
}
}
}
| |
package foodtruck.linxup;
import javax.annotation.Nullable;
import com.google.common.base.MoreObjects;
import com.javadocmd.simplelatlng.LatLng;
import org.joda.time.DateTime;
import foodtruck.model.Location;
/**
* @author aviolette
* @since 7/24/16
*/
@SuppressWarnings("WeakerAccess")
public class Position {
private final DateTime date;
private final String vehicleLabel;
private final LatLng latLng;
private final int direction;
private final int speedMph;
private final boolean speeding;
private final int estimatedSpeedLimit;
private final @Nullable BehaviorCode behaviorCode;
private final String deviceNumber;
private final String driverId;
private final String simDeviceNumber;
private final String deviceTypeDescription;
private final String fuelLevel;
private final String batteryCharge;
private Position(Builder builder) {
this.date = builder.date;
this.vehicleLabel = builder.vehicleLabel;
this.latLng = builder.latLng;
this.direction = builder.direction;
this.speedMph = builder.speedMph;
this.speeding = builder.speeding;
this.estimatedSpeedLimit = builder.estimatedSpeedLimit;
this.behaviorCode = builder.behaviorCode;
this.deviceNumber = builder.deviceNumber;
this.driverId = builder.driverId;
this.simDeviceNumber = builder.simDeviceNumber;
this.deviceTypeDescription = builder.deviceTypeDescription;
this.fuelLevel = builder.fuelLevel;
this.batteryCharge = builder.batteryCharge;
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(Position position) {
return new Builder(position);
}
public DateTime getDate() {
return date;
}
public String getVehicleLabel() {
return vehicleLabel;
}
public LatLng getLatLng() {
return latLng;
}
public int getDirection() {
return direction;
}
public int getSpeedMph() {
return speedMph;
}
public boolean isSpeeding() {
return speeding;
}
public int getEstimatedSpeedLimit() {
return estimatedSpeedLimit;
}
@Nullable
public BehaviorCode getBehaviorCode() {
return behaviorCode;
}
public boolean isParked() {
return speedMph == 0;
}
public Location toLocation() {
return Location.builder()
.lat(getLatLng().getLatitude())
.lng(getLatLng().getLongitude())
.build();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("Date", date)
.add("Vehicle Label", vehicleLabel)
.add("Lat / Lng", latLng)
.add("Direction", direction)
.add("Speed (in mph)", speedMph)
.add("Speeding", speeding)
.add("Estimated speed limit", estimatedSpeedLimit)
.add("Device ID", deviceNumber)
.add("Device Type", deviceTypeDescription)
.toString();
}
public String getDeviceNumber() {
return deviceNumber;
}
public String getFuelLevel() {
return fuelLevel;
}
public String getBatteryCharge() {
return batteryCharge;
}
public static class Builder {
private DateTime date;
private String vehicleLabel;
private LatLng latLng;
private int direction;
private int speedMph;
private boolean speeding;
private int estimatedSpeedLimit;
private @Nullable BehaviorCode behaviorCode;
private String deviceNumber;
private String driverId;
private String simDeviceNumber;
private String deviceTypeDescription;
private String fuelLevel;
private String batteryCharge;
public Builder() {
}
public Builder(Position position) {
this.date = position.date;
this.vehicleLabel = position.vehicleLabel;
this.latLng = position.latLng;
this.direction = position.direction;
this.speedMph = position.speedMph;
this.speeding = position.speeding;
this.estimatedSpeedLimit = position.estimatedSpeedLimit;
this.deviceNumber = position.deviceNumber;
this.driverId = position.driverId;
this.simDeviceNumber = position.simDeviceNumber;
this.deviceTypeDescription = position.deviceTypeDescription;
this.fuelLevel = position.fuelLevel;
this.batteryCharge = position.batteryCharge;
}
public Builder date(DateTime date) {
this.date = date;
return this;
}
Builder fuelLevel(String fuelLevel) {
this.fuelLevel = fuelLevel;
return this;
}
Builder batteryCharge(String batteryCharge) {
this.batteryCharge = batteryCharge;
return this;
}
Builder deviceNumber(String deviceNumber) {
this.deviceNumber = deviceNumber;
return this;
}
Builder driverId(String driverId) {
this.driverId = driverId;
return this;
}
Builder simDeviceNumber(String deviceNumber) {
this.simDeviceNumber = deviceNumber;
return this;
}
Builder deviceTypeDescription(String deviceTypeDescription) {
this.deviceTypeDescription = deviceTypeDescription;
return this;
}
Builder speeding(boolean speeding) {
this.speeding = speeding;
return this;
}
Builder vehicleLabel(String vehicleLabel) {
this.vehicleLabel = vehicleLabel;
return this;
}
Builder latLng(LatLng position) {
this.latLng = position;
return this;
}
Builder direction(int direction) {
this.direction = direction;
return this;
}
Builder speedMph(int speedMph) {
this.speedMph = speedMph;
return this;
}
Builder estimatedSpeedLimit(int estimatedSpeedLimit) {
this.estimatedSpeedLimit = estimatedSpeedLimit;
return this;
}
Builder behaviorCode(BehaviorCode behaviorCode) {
this.behaviorCode = behaviorCode;
return this;
}
public Position build() {
return new Position(this);
}
}
}
| |
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.pantsbuild.tools.jar;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import java.util.jar.Attributes.Name;
import java.util.jar.Manifest;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.MoreObjects;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import org.kohsuke.args4j.OptionDef;
import org.kohsuke.args4j.spi.Setter;
import org.pantsbuild.args4j.ArgfileOptionHandler;
import org.pantsbuild.args4j.CollectionOptionHandler;
import org.pantsbuild.args4j.InvalidCmdLineArgumentException;
import org.pantsbuild.args4j.Parser;
import org.pantsbuild.tools.jar.JarBuilder.DuplicateAction;
import org.pantsbuild.tools.jar.JarBuilder.DuplicateEntryException;
import org.pantsbuild.tools.jar.JarBuilder.DuplicateHandler;
import org.pantsbuild.tools.jar.JarBuilder.DuplicatePolicy;
import org.pantsbuild.tools.jar.JarBuilder.Entry;
import org.pantsbuild.tools.jar.JarBuilder.Listener;
import org.pantsbuild.tools.jar.JarBuilder.Source;
public final class Main {
public static class Options {
public static class DuplicatePolicyParser extends CollectionOptionHandler<DuplicatePolicy> {
private static final Splitter REGEX_ACTION_SPLITTER =
Splitter.on('=').trimResults().omitEmptyStrings();
public DuplicatePolicyParser(
CmdLineParser parser,
OptionDef option,
Setter<? super DuplicatePolicy> setter) {
super(parser, option, setter, "DUPLICATE_POLICY", new ItemParser<DuplicatePolicy>() {
@Override public DuplicatePolicy parse(String item) {
List<String> components = ImmutableList.copyOf(REGEX_ACTION_SPLITTER.split(item));
Preconditions.checkArgument(components.size() == 2,
"Failed to parse jar path regex/action pair %s", item);
String regex = components.get(0);
DuplicateAction action = DuplicateAction.valueOf(components.get(1));
return DuplicatePolicy.pathMatches(regex, action);
}
});
}
}
static class FileSource {
private static final Splitter JAR_PATH_SPLITTER = Splitter.on('/');
private final File source;
@Nullable private final String destination;
FileSource(File source, @Nullable String destination) {
if (!source.exists() || !source.canRead()) {
throw new IllegalArgumentException(
String.format("The source %s is not a readable path", source));
}
if (!source.isDirectory() && destination == null) {
throw new IllegalArgumentException(
String.format("The source file %s must have a jar destination specified.", source));
}
if (destination != null) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(destination.trim()),
"The destination path cannot be blank");
Preconditions.checkArgument(
!destination.startsWith("/"),
"The destination path cannot be absolute, given: %s", destination);
Preconditions.checkArgument(
!ImmutableSet.copyOf(JAR_PATH_SPLITTER.split(destination)).contains(".."),
"The destination path cannot be relative, given: %s", destination);
}
this.source = source;
this.destination = destination;
}
void addTo(JarBuilder jarBuilder) {
if (source.isDirectory()) {
jarBuilder.addDirectory(source, Optional.fromNullable(destination));
} else {
jarBuilder.addFile(source, destination);
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("source", source)
.add("destination", destination)
.toString();
}
}
public static class FileSourceOptionHandler extends CollectionOptionHandler<FileSource> {
private static final Splitter DESTINATION_SPLITTER =
Splitter.on('=').trimResults().omitEmptyStrings();
public FileSourceOptionHandler(
CmdLineParser parser,
OptionDef option,
Setter<? super FileSource> setter) {
super(parser, option, setter, "FILE_SOURCE", new ItemParser<FileSource>() {
@Override public FileSource parse(String item) {
List<String> components = ImmutableList.copyOf(DESTINATION_SPLITTER.split(item));
Preconditions.checkArgument(1 <= components.size() && components.size() <= 2,
"Failed to parse entry %s", item);
File source = new File(components.get(0));
@Nullable String destination = components.size() == 2 ? components.get(1) : null;
return new FileSource(source, destination);
}
});
}
}
@Option(name = "-h", aliases = {"-help"}, help = true,
usage = "Display this help screen.")
private boolean help;
@Option(name = "-main",
usage = "The name of the fully qualified main class. "
+ "If a -manifest is specified its contents will be used but this -main will override "
+ "any entry already present.")
private String mainClass;
public static class ClassPathOptionHandler extends ArgfileOptionHandler<String> {
public ClassPathOptionHandler(
CmdLineParser parser,
OptionDef option,
Setter<? super String> setter) {
super(new CollectionOptionHandler<String>(
parser,
option,
setter,
"CLASS_PATH_ENTRY",
CollectionOptionHandler.ItemParser.IDENTITY));
}
}
@Option(name = "-classpath",
usage = "A list of comma-separated classpath entries. "
+ "If a -manifest is specified its contents will be used but this -classpath will "
+ "override any entry already present.",
handler = ClassPathOptionHandler.class)
private List<String> classPath = null;
private File manifest;
@Option(name = "-manifest",
usage = "A path to a manifest file to use. If -main or -classpath is specified those "
+ "values will overwrite the corresponding entry in this manifest.")
void setManifest(File manifest) {
if (manifest == null) {
throw new InvalidCmdLineArgumentException("-manifest", manifest, "Cannot be null.");
}
if (!manifest.exists()) {
throw new InvalidCmdLineArgumentException("-manifest", manifest, "Must exist.");
}
if (!manifest.isFile()) {
throw new InvalidCmdLineArgumentException("-manifest", manifest, "Must be a file.");
}
if (!manifest.canRead()) {
throw new InvalidCmdLineArgumentException("-manifest", manifest, "Must be readable.");
}
this.manifest = manifest;
}
@Option(name = "-update", usage = "Update the jar if it already exists, otherwise create it.")
private boolean update;
@Option(name = "-compress", usage = "Compress jar entries.")
private boolean compress;
public static class FilesOptionHandler extends ArgfileOptionHandler<FileSource> {
public FilesOptionHandler(
CmdLineParser parser,
OptionDef option,
Setter<? super FileSource> setter) {
super(new FileSourceOptionHandler(parser, option, setter));
}
}
@Option(name = "-files",
usage = "A mapping from filesystem paths to jar paths. The mapping is specified in the "
+ "form [fs path1](=[jar path1]),[fs path2](=[jar path2]). For example: "
+ "/etc/hosts=hosts,/var/log=logs would create a jar with a hosts file entry and the "
+ "contents of the /var/log tree added as individual entries under the logs/ directory "
+ "in the jar. For directories, the mapping can be skipped in which case the directory "
+ "tree is added as-is to the resulting jar.",
handler = FilesOptionHandler.class)
private List<FileSource> files = Lists.newArrayList();
public static class JarsOptionHandler extends ArgfileOptionHandler<File> {
public JarsOptionHandler(
CmdLineParser parser,
OptionDef option,
Setter<? super File> setter) {
super(new CollectionOptionHandler<File>(
parser,
option,
setter,
"JAR",
new CollectionOptionHandler.ItemParser<File>() {
@Override public File parse(String item) {
return new File(item);
}
}));
}
}
@Option(name = "-jars",
usage = "A list of comma-separated jar files whose entries to add to the output jar.",
handler = JarsOptionHandler.class)
private List<File> jars = Lists.newArrayList();
public static class PatternOptionHandler extends CollectionOptionHandler<Pattern> {
public PatternOptionHandler(
CmdLineParser parser,
OptionDef option,
Setter<? super Pattern> setter) {
super(parser, option, setter, "PATTERN", new ItemParser<Pattern>() {
@Override public Pattern parse(String item) {
try {
return Pattern.compile(item);
} catch (PatternSyntaxException e) {
throw new IllegalArgumentException(e);
}
}
});
}
}
@Option(name = "-skip", usage = "A list of regular expressions identifying entries to skip.",
handler = PatternOptionHandler.class)
private List<Pattern> skip = Lists.newArrayList();
private static final String ACTIONS = "SKIP|REPLACE|CONCAT|CONCAT_TEXT|THROW";
@Option(name = "-default_action",
usage = "The default duplicate action to apply if no policies match. Can be any of "
+ ACTIONS)
private DuplicateAction defaultAction = DuplicateAction.SKIP;
@Option(name = "-policies",
usage = "A list of duplicate policies to apply. Policies are specified as "
+ "[regex]=[action], and the action can be any one of " + ACTIONS + ". For example: "
+ "^META-INF/services/=CONCAT_TEXT would concatenate duplicate service files into one "
+ "large service file.",
handler = DuplicatePolicyParser.class)
private List<DuplicatePolicy> policies = Lists.newArrayList();
@Argument(metaVar = "TARGET_JAR", usage = "The target jar file path to write.", required = true)
private File targetJar;
}
private static final Logger LOG = Logger.getLogger(Main.class.getName());
private static class LoggingListener implements Listener {
private Source source = null;
private final File target;
LoggingListener(File target) {
this.target = target;
}
@Override
public void onSkip(Optional<? extends Entry> original, Iterable<? extends Entry> skipped) {
if (LOG.isLoggable(Level.FINE)) {
if (original.isPresent()) {
LOG.fine(String.format("Retaining %s and skipping %s", identify(original.get()),
identify(skipped)));
} else {
LOG.fine(String.format("Skipping %s", identify(skipped)));
}
}
}
@Override
public void onReplace(Iterable<? extends Entry> originals, Entry replacement) {
if (LOG.isLoggable(Level.FINE)) {
LOG.fine(String.format("Using %s to replace %s", identify(replacement),
identify(originals)));
}
}
@Override
public void onConcat(String entryName, Iterable<? extends Entry> entries) {
if (LOG.isLoggable(Level.FINE)) {
LOG.fine(String.format("Concatenating %s!%s from %s", target.getPath(), entryName,
identify(entries)));
}
}
@Override
public void onWrite(Entry entry) {
if (!entry.getSource().equals(source)) {
source = entry.getSource();
LOG.fine(entry.getSource().name());
}
LOG.log(Level.FINER, "\t{0}", entry.getName());
}
private static String identify(Entry entry) {
return entry.getSource().identify(entry.getName());
}
private static String identify(Iterable<? extends Entry> entries) {
return Joiner.on(",").join(
FluentIterable.from(entries).transform(new Function<Entry, String>() {
@Override public String apply(Entry input) {
return identify(input);
}
}));
}
}
private final Options options;
private Main(Options options) {
this.options = options;
}
static class ExitException extends Exception {
private final int code;
ExitException(int code, String message, Object... args) {
super(String.format(message, args));
this.code = code;
}
}
private void run() throws ExitException {
if (options.mainClass != null && options.manifest != null) {
throw new ExitException(1, "Can specify main or manifest but not both.");
}
if (!options.update && options.targetJar.exists() && !options.targetJar.delete()) {
throw new ExitException(1, "Failed to delete file at requested target path %s",
options.targetJar);
}
final Closer closer = Closer.create();
try {
doRun(closer, options.targetJar);
} finally {
try {
closer.close();
} catch (IOException e) {
LOG.warning("Failed to close one or more resources: " + e);
}
}
}
private void doRun(Closer closer, final File targetJar) throws ExitException {
JarBuilder jarBuilder =
closer.register(new JarBuilder(targetJar, new LoggingListener(targetJar)));
try {
@Nullable Manifest mf = getManifest();
if (mf != null) {
jarBuilder.useCustomManifest(mf);
}
} catch (IOException e) {
throw new ExitException(1, "Failed to configure custom manifest: %s", e);
}
for (Options.FileSource fileSource : options.files) {
fileSource.addTo(jarBuilder);
}
for (File jar : options.jars) {
jarBuilder.addJar(jar);
}
DuplicateHandler duplicateHandler =
new DuplicateHandler(options.defaultAction, options.policies);
try {
jarBuilder.write(options.compress, duplicateHandler, options.skip);
} catch (DuplicateEntryException e) {
throw new ExitException(1, "Refusing to write duplicate entry: %s", e);
} catch (IOException e) {
throw new ExitException(1, "Unexpected problem writing target jar %s: %s", targetJar, e);
}
}
private static final Splitter CLASS_PATH_SPLITTER =
Splitter.on(File.pathSeparatorChar).omitEmptyStrings();
private static final Function<String, Iterable<String>> ENTRY_TO_PATHS =
new Function<String, Iterable<String>>() {
@Override public Iterable<String> apply(String entry) {
return CLASS_PATH_SPLITTER.split(entry);
}
};
private static final Joiner CLASS_PATH_JOINER = Joiner.on(' ');
@Nullable
private Manifest getManifest() throws IOException {
if (options.manifest == null && options.mainClass == null && options.classPath == null) {
return null;
}
Manifest mf = loadManifest();
if (options.mainClass != null) {
mf.getMainAttributes().put(Name.MAIN_CLASS, options.mainClass);
}
if (options.classPath != null) {
String classpath =
CLASS_PATH_JOINER.join(
FluentIterable.from(options.classPath).transformAndConcat(ENTRY_TO_PATHS));
mf.getMainAttributes().put(Name.CLASS_PATH, classpath);
}
return mf;
}
private Manifest loadManifest() throws IOException {
Manifest mf = new Manifest();
if (options.manifest != null) {
Closer closer = Closer.create();
try {
FileInputStream input = closer.register(new FileInputStream(options.manifest));
mf.read(input);
} catch (IOException e) {
throw closer.rethrow(
new IOException("Failed to load manifest from " + options.manifest, e));
} finally {
closer.close();
}
}
return JarBuilder.ensureDefaultManifestEntries(mf);
}
/**
* Creates or updates a jar with specified files, directories and jar files.
*
* @param args The command line arguments.
*/
public static void main(String[] args) {
ConsoleHandler handler = new ConsoleHandler();
handler.setFormatter(new SimpleFormatter());
handler.setLevel(Level.WARNING);
Logger.getLogger("").addHandler(handler);
Options options = new Options();
Parser.Result result = Parser.parse(options, args);
if (result.isFailure()) {
result.printUsage(System.err);
exit(1);
} else if (options.help) {
result.printUsage(System.out);
exit(0);
}
Main main = new Main(options);
try {
main.run();
} catch (ExitException e) {
System.err.println(e.getMessage());
exit(e.code);
}
exit(0);
}
private static void exit(int code) {
// We're a main - its fine to exit.
// SUPPRESS CHECKSTYLE RegexpSinglelineJava
System.exit(code);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.webapp.AppsBlock;
import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.View;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TBODY;
import com.google.inject.Inject;
public class RMAppsBlock extends AppsBlock {
private ResourceManager rm;
@Inject
RMAppsBlock(ResourceManager rm, View.ViewContext ctx) {
super(null, ctx);
this.rm = rm;
}
@Override
protected void renderData(Block html) {
TBODY<TABLE<Hamlet>> tbody =
html.table("#apps").thead().tr().th(".id", "ID").th(".user", "User")
.th(".name", "Name").th(".type", "Application Type")
.th(".queue", "Queue").th(".priority", "Application Priority")
.th(".starttime", "StartTime")
.th("launchtime", "LaunchTime")
.th(".finishtime", "FinishTime").th(".state", "State")
.th(".finalstatus", "FinalStatus")
.th(".runningcontainer", "Running Containers")
.th(".allocatedCpu", "Allocated CPU VCores")
.th(".allocatedMemory", "Allocated Memory MB")
.th(".reservedCpu", "Reserved CPU VCores")
.th(".reservedMemory", "Reserved Memory MB")
.th(".queuePercentage", "% of Queue")
.th(".clusterPercentage", "% of Cluster")
.th(".progress", "Progress")
.th(".ui", "Tracking UI")
.th(".blacklisted", "Blacklisted Nodes").__()
.__().tbody();
StringBuilder appsTableData = new StringBuilder("[\n");
for (ApplicationReport appReport : appReports) {
// TODO: remove the following condition. It is still here because
// the history side implementation of ApplicationBaseProtocol
// hasn't filtering capability (YARN-1819).
if (!reqAppStates.isEmpty()
&& !reqAppStates.contains(appReport.getYarnApplicationState())) {
continue;
}
AppInfo app = new AppInfo(appReport);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString(
app.getCurrentAppAttemptId());
String queuePercent = "N/A";
String clusterPercent = "N/A";
if(appReport.getApplicationResourceUsageReport() != null) {
queuePercent = String.format("%.1f",
appReport.getApplicationResourceUsageReport()
.getQueueUsagePercentage());
clusterPercent = String.format("%.1f",
appReport.getApplicationResourceUsageReport().getClusterUsagePercentage());
}
String blacklistedNodesCount = "N/A";
RMApp rmApp = rm.getRMContext().getRMApps()
.get(appAttemptId.getApplicationId());
if (rmApp != null) {
RMAppAttempt appAttempt = rmApp.getRMAppAttempt(appAttemptId);
Set<String> nodes =
null == appAttempt ? null : appAttempt.getBlacklistedNodes();
if (nodes != null) {
blacklistedNodesCount = String.valueOf(nodes.size());
}
}
String percent = StringUtils.format("%.1f", app.getProgress());
appsTableData
.append("[\"<a href='")
.append(url("app", app.getAppId()))
.append("'>")
.append(app.getAppId())
.append("</a>\",\"")
.append(
StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(app.getUser())))
.append("\",\"")
.append(
StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(app.getName())))
.append("\",\"")
.append(
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getType())))
.append("\",\"")
.append(
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getQueue()))).append("\",\"").append(String
.valueOf(app.getPriority()))
.append("\",\"").append(app.getStartedTime())
.append("\",\"").append(app.getLaunchTime())
.append("\",\"").append(app.getFinishedTime())
.append("\",\"")
.append(app.getAppState() == null ? UNAVAILABLE : app.getAppState())
.append("\",\"")
.append(app.getFinalAppStatus())
.append("\",\"")
.append(app.getRunningContainers() == -1 ? "N/A" : String
.valueOf(app.getRunningContainers()))
.append("\",\"")
.append(app.getAllocatedCpuVcores() == -1 ? "N/A" : String
.valueOf(app.getAllocatedCpuVcores()))
.append("\",\"")
.append(app.getAllocatedMemoryMB() == -1 ? "N/A" :
String.valueOf(app.getAllocatedMemoryMB()))
.append("\",\"")
.append(app.getReservedCpuVcores() == -1 ? "N/A" : String
.valueOf(app.getReservedCpuVcores()))
.append("\",\"")
.append(app.getReservedMemoryMB() == -1 ? "N/A" :
String.valueOf(app.getReservedMemoryMB()))
.append("\",\"")
.append(queuePercent)
.append("\",\"")
.append(clusterPercent)
.append("\",\"")
// Progress bar
.append("<br title='").append(percent).append("'> <div class='")
.append(C_PROGRESSBAR).append("' title='").append(join(percent, '%'))
.append("'> ").append("<div class='").append(C_PROGRESSBAR_VALUE)
.append("' style='").append(join("width:", percent, '%'))
.append("'> </div> </div>").append("\",\"<a ");
String trackingURL =
app.getTrackingUrl() == null
|| app.getTrackingUrl().equals(UNAVAILABLE)
|| app.getAppState() == YarnApplicationState.NEW ? null : app
.getTrackingUrl();
String trackingUI =
app.getTrackingUrl() == null
|| app.getTrackingUrl().equals(UNAVAILABLE)
|| app.getAppState() == YarnApplicationState.NEW ? "Unassigned"
: app.getAppState() == YarnApplicationState.FINISHED
|| app.getAppState() == YarnApplicationState.FAILED
|| app.getAppState() == YarnApplicationState.KILLED ? "History"
: "ApplicationMaster";
appsTableData.append(trackingURL == null ? "#" : "href='" + trackingURL)
.append("'>").append(trackingUI).append("</a>\",").append("\"")
.append(blacklistedNodesCount).append("\"],\n");
}
if (appsTableData.charAt(appsTableData.length() - 2) == ',') {
appsTableData.delete(appsTableData.length() - 2,
appsTableData.length() - 1);
}
appsTableData.append("]");
html.script().$type("text/javascript")
.__("var appsTableData=" + appsTableData).__();
tbody.__().__();
}
@Override
protected List<ApplicationReport> getApplicationReport(
final GetApplicationsRequest request) throws YarnException, IOException {
return rm.getClientRMService().getApplications(request)
.getApplicationList();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.CompositeType;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.types.AtomicDataType;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.FieldsDataType;
import org.apache.flink.table.types.logical.LegacyTypeInformationType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.LogicalTypeFamily;
import org.apache.flink.table.types.utils.DataTypeDefaultVisitor;
import org.apache.flink.table.types.utils.TypeConversions;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasFamily;
/**
* Logic to validate {@link TableSource} types.
*/
@Internal
public class TableSourceValidation {
/**
* Validates a TableSource.
*
* <ul>
* <li>checks that all fields of the schema can be resolved</li>
* <li>checks that resolved fields have the correct type</li>
* <li>checks that the time attributes are correctly configured.</li>
* </ul>
*
* @param tableSource The {@link TableSource} for which the time attributes are checked.
*/
public static void validateTableSource(TableSource<?> tableSource){
TableSchema schema = tableSource.getTableSchema();
List<RowtimeAttributeDescriptor> rowtimeAttributes = getRowtimeAttributes(tableSource);
Optional<String> proctimeAttribute = getProctimeAttribute(tableSource);
validateSingleRowtimeAttribute(rowtimeAttributes);
validateRowtimeAttributesExistInSchema(rowtimeAttributes, schema);
validateProctimeAttributesExistInSchema(proctimeAttribute, schema);
validateLogicalToPhysicalMapping(tableSource, schema, rowtimeAttributes, proctimeAttribute);
validateTimestampExtractorArguments(rowtimeAttributes, tableSource);
validateNotOverlapping(rowtimeAttributes, proctimeAttribute);
}
/**
* Checks if the given {@link TableSource} defines a rowtime attribute.
*
* @param tableSource The table source to check.
* @return true if the given table source defines rotime attribute
*/
public static boolean hasRowtimeAttribute(TableSource<?> tableSource) {
return !getRowtimeAttributes(tableSource).isEmpty();
}
private static void validateSingleRowtimeAttribute(List<RowtimeAttributeDescriptor> rowtimeAttributes) {
if (rowtimeAttributes.size() > 1) {
throw new ValidationException("Currently, only a single rowtime attribute is supported. " +
"Please remove all but one RowtimeAttributeDescriptor.");
}
}
private static void validateRowtimeAttributesExistInSchema(
List<RowtimeAttributeDescriptor> rowtimeAttributes,
TableSchema tableSchema) {
rowtimeAttributes.forEach(r -> {
if (!tableSchema.getFieldDataType(r.getAttributeName()).isPresent()) {
throw new ValidationException(String.format(
"Found a rowtime attribute for field '%s' but it does not exist in the Table. TableSchema: %s",
r.getAttributeName(),
tableSchema));
}
}
);
}
private static void validateProctimeAttributesExistInSchema(
Optional<String> proctimeAttribute,
TableSchema tableSchema) {
proctimeAttribute.ifPresent(r -> {
if (!tableSchema.getFieldDataType(r).isPresent()) {
throw new ValidationException(String.format(
"Found a proctime attribute for field '%s' but it does not exist in the Table. TableSchema: %s",
r,
tableSchema));
}
}
);
}
private static void validateNotOverlapping(
List<RowtimeAttributeDescriptor> rowtimeAttributes,
Optional<String> proctimeAttribute) {
proctimeAttribute.ifPresent(proctime -> {
if (rowtimeAttributes.stream()
.anyMatch(rowtimeAttribute -> rowtimeAttribute.getAttributeName().equals(proctime))) {
throw new ValidationException(String.format(
"Field '%s' must not be processing time and rowtime attribute at the same time.",
proctime));
}
}
);
}
private static void validateLogicalToPhysicalMapping(
TableSource<?> tableSource,
TableSchema schema,
List<RowtimeAttributeDescriptor> rowtimeAttributes,
Optional<String> proctimeAttribute) {
// validate that schema fields can be resolved to a return type field of correct type
int mappedFieldCnt = 0;
for (int i = 0; i < schema.getFieldCount(); i++) {
DataType fieldType = schema.getFieldDataType(i).get();
LogicalType logicalFieldType = fieldType.getLogicalType();
String fieldName = schema.getFieldName(i).get();
if (proctimeAttribute.map(p -> p.equals(fieldName)).orElse(false)) {
if (!(hasFamily(logicalFieldType, LogicalTypeFamily.TIMESTAMP))) {
throw new ValidationException(String.format("Processing time field '%s' has invalid type %s. " +
"Processing time attributes must be of type SQL_TIMESTAMP.", fieldName, logicalFieldType));
}
} else if (rowtimeAttributes.stream().anyMatch(p -> p.getAttributeName().equals(fieldName))) {
if (!(hasFamily(logicalFieldType, LogicalTypeFamily.TIMESTAMP))) {
throw new ValidationException(String.format("Rowtime time field '%s' has invalid type %s. " +
"Rowtime time attributes must be of type SQL_TIMESTAMP.", fieldName, logicalFieldType));
}
} else {
validateLogicalTypeEqualsPhysical(fieldName, fieldType, tableSource);
mappedFieldCnt += 1;
}
}
// ensure that only one field is mapped to an atomic type
DataType producedDataType = tableSource.getProducedDataType();
if (!isCompositeType(producedDataType) && mappedFieldCnt > 1) {
throw new ValidationException(
String.format(
"More than one table field matched to atomic input type %s.",
producedDataType));
}
}
private static boolean isCompositeType(DataType producedDataType) {
LogicalType logicalType = producedDataType.getLogicalType();
return producedDataType instanceof FieldsDataType ||
(logicalType instanceof LegacyTypeInformationType &&
((LegacyTypeInformationType) logicalType).getTypeInformation() instanceof CompositeType);
}
private static void validateLogicalTypeEqualsPhysical(
String fieldName,
DataType logicalType,
TableSource<?> tableSource) {
ResolvedField resolvedField = resolveField(fieldName, tableSource);
if (!resolvedField.getType().equals(logicalType)) {
throw new ValidationException(String.format(
"Type %s of table field '%s' does not " +
"match with type '%s; of the field '%s' of the TableSource return type.",
logicalType,
resolvedField.getType(),
fieldName,
resolvedField.getType()));
}
}
private static void validateTimestampExtractorArguments(
List<RowtimeAttributeDescriptor> descriptors,
TableSource<?> tableSource) {
if (descriptors.size() == 1) {
RowtimeAttributeDescriptor descriptor = descriptors.get(0);
// look up extractor input fields in return type
String[] extractorInputFields = descriptor.getTimestampExtractor().getArgumentFields();
TypeInformation[] physicalTypes = Arrays.stream(extractorInputFields)
.map(fieldName -> resolveField(fieldName, tableSource))
.map(resolvedField -> TypeConversions.fromDataTypeToLegacyInfo(resolvedField.getType()))
.toArray(TypeInformation[]::new);
// validate timestamp extractor
descriptor.getTimestampExtractor().validateArgumentFields(physicalTypes);
}
}
private static class ResolvedField {
private final String name;
private final DataType type;
private ResolvedField(String name, DataType type) {
this.type = type;
this.name = name;
}
public DataType getType() {
return type;
}
public String getName() {
return name;
}
}
/**
* Identifies for a field name of the logical schema, the corresponding physical field in the
* return type of a {@link TableSource}.
*
* @param fieldName The logical field to look up.
* @param tableSource The table source in which to look for the field.
* @return The name, index, and type information of the physical field.
*/
private static ResolvedField resolveField(String fieldName, TableSource<?> tableSource) {
DataType producedDataType = tableSource.getProducedDataType();
if (tableSource instanceof DefinedFieldMapping) {
Map<String, String> fieldMapping = ((DefinedFieldMapping) tableSource).getFieldMapping();
if (fieldMapping != null) {
String resolvedFieldName = fieldMapping.get(fieldName);
if (resolvedFieldName == null) {
throw new ValidationException(String.format(
"Field '%s' could not be resolved by the field mapping.",
fieldName));
}
return new ResolvedField(
resolvedFieldName,
lookupFieldType(
producedDataType,
resolvedFieldName,
String.format(
"Table field '%s' was resolved to TableSource return type field " +
"'%s', but field '%s' was not found in the return " +
"type %s of the TableSource. " +
"Please verify the field mapping of the TableSource.",
fieldName,
resolvedFieldName,
resolvedFieldName,
producedDataType)));
}
}
return new ResolvedField(
fieldName,
lookupFieldType(
producedDataType,
fieldName,
String.format(
"Table field '%s' was not found in the return type %s of the TableSource.",
fieldName,
producedDataType)));
}
/** Look up a field by name in a {@link DataType}. */
private static DataType lookupFieldType(DataType inputType, String fieldName, String failMsg) {
return inputType.accept(new TypeExtractor(fieldName)).orElseThrow(() -> new ValidationException(failMsg));
}
private static class TypeExtractor extends DataTypeDefaultVisitor<Optional<DataType>> {
private final String fieldName;
TypeExtractor(String fieldName) {
this.fieldName = fieldName;
}
@Override
public Optional<DataType> visit(AtomicDataType atomicDataType) {
// This is check for backwards compatibility. We should also support legacy type with composite type info
LogicalType logicalType = atomicDataType.getLogicalType();
if (logicalType instanceof LegacyTypeInformationType) {
LegacyTypeInformationType<?> legacyTypeInformationType = (LegacyTypeInformationType<?>) logicalType;
TypeInformation<?> typeInformation = legacyTypeInformationType.getTypeInformation();
if (typeInformation instanceof CompositeType<?>) {
CompositeType<?> compositeType = (CompositeType<?>) typeInformation;
return Optional.of(TypeConversions.fromLegacyInfoToDataType(compositeType.getTypeAt(fieldName)));
}
}
return Optional.of(atomicDataType);
}
@Override
public Optional<DataType> visit(FieldsDataType fieldsDataType) {
return Optional.ofNullable(fieldsDataType.getFieldDataTypes().get(fieldName));
}
@Override
protected Optional<DataType> defaultMethod(DataType dataType) {
return Optional.of(dataType);
}
}
/** Returns a list with all rowtime attribute descriptors of the {@link TableSource}. */
private static List<RowtimeAttributeDescriptor> getRowtimeAttributes(TableSource<?> tableSource) {
if (tableSource instanceof DefinedRowtimeAttributes) {
return ((DefinedRowtimeAttributes) tableSource).getRowtimeAttributeDescriptors();
}
return Collections.emptyList();
}
/** Returns the proctime attribute of the {@link TableSource} if it is defined. */
private static Optional<String> getProctimeAttribute(TableSource<?> tableSource) {
if (tableSource instanceof DefinedProctimeAttribute) {
return Optional.ofNullable(((DefinedProctimeAttribute) tableSource).getProctimeAttribute());
}
return Optional.empty();
}
private TableSourceValidation() {
}
}
| |
/*L
* Copyright Duke Comprehensive Cancer Center
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catrip/LICENSE.txt for details.
*/
package gov.nih.nci.caintegrator.domain.study.bean;
import gov.nih.nci.caintegrator.domain.study.bean.*;
import java.util.*;
/**
* <!-- LICENSE_TEXT_START -->
* <!-- LICENSE_TEXT_END -->
*/
/**
* The treatment arm and other specifics regarding the participation of the Subject to a particular
* Study.
*
*/
public class StudyParticipant
implements java.io.Serializable
{
private static final long serialVersionUID = 1234567890L;
/**
* The subjects designation as a male or female based on a biological construct premised upon biological
* characteristics enabling sexual reproduction Values include: Female, Male, Unknown.
*
*/
private java.lang.String administrativeGenderCode;
/**
* The subjects designation as a male or female based on a biological construct premised upon biological
* characteristics enabling sexual reproduction Values include: Female, Male, Unknown.
*
*/
public java.lang.String getAdministrativeGenderCode(){
return administrativeGenderCode;
}
public void setAdministrativeGenderCode( java.lang.String administrativeGenderCode){
this.administrativeGenderCode = administrativeGenderCode;
}
/**
* Phenotype status of the subject relative to the disease of interest. Possible values: CONTROL,
* CASE, CASE_EARLY, CASE_ADVANCED, UNKNOWN
*
*/
private java.lang.String caseControlStatus;
/**
* Phenotype status of the subject relative to the disease of interest. Possible values: CONTROL,
* CASE, CASE_EARLY, CASE_ADVANCED, UNKNOWN
*
*/
public java.lang.String getCaseControlStatus(){
return caseControlStatus;
}
public void setCaseControlStatus( java.lang.String caseControlStatus){
this.caseControlStatus = caseControlStatus;
}
/**
* The days when the patient is removed from the protocol, i.e., is not being followed and will not be
* retreated
*
*/
private java.lang.Integer daysOffStudy;
/**
* The days when the patient is removed from the protocol, i.e., is not being followed and will not be
* retreated
*
*/
public java.lang.Integer getDaysOffStudy(){
return daysOffStudy;
}
public void setDaysOffStudy( java.lang.Integer daysOffStudy){
this.daysOffStudy = daysOffStudy;
}
/**
* days on study from entry to death or last follow-up
*/
private java.lang.Integer daysOnStudy;
/**
* days on study from entry to death or last follow-up
*/
public java.lang.Integer getDaysOnStudy(){
return daysOnStudy;
}
public void setDaysOnStudy( java.lang.Integer daysOnStudy){
this.daysOnStudy = daysOnStudy;
}
/**
* The patient's self declared ethnic origination, independent of racial origination, based on OMB
* approved categories. Values include: Hispanic Or Latino, Unknown, Not reported, Not Hispanic
* Or Latino.
*
*/
private java.lang.String ethnicGroupCode;
/**
* The patient's self declared ethnic origination, independent of racial origination, based on OMB
* approved categories. Values include: Hispanic Or Latino, Unknown, Not reported, Not Hispanic
* Or Latino.
*
*/
public java.lang.String getEthnicGroupCode(){
return ethnicGroupCode;
}
public void setEthnicGroupCode( java.lang.String ethnicGroupCode){
this.ethnicGroupCode = ethnicGroupCode;
}
/**
* Indicator if at least one first-degree relative is affected by the disease of interest
*
*/
private java.lang.String familyHistory;
/**
* Indicator if at least one first-degree relative is affected by the disease of interest
*
*/
public java.lang.String getFamilyHistory(){
return familyHistory;
}
public void setFamilyHistory( java.lang.String familyHistory){
this.familyHistory = familyHistory;
}
/**
* Unique identifier for the instance of StudyParticipant.
*/
private java.lang.String id;
/**
* Unique identifier for the instance of StudyParticipant.
*/
public java.lang.String getId(){
return id;
}
public void setId( java.lang.String id){
this.id = id;
}
/**
* The name of institution where patient is enrolled.
*/
private java.lang.String institutionName;
/**
* The name of institution where patient is enrolled.
*/
public java.lang.String getInstitutionName(){
return institutionName;
}
public void setInstitutionName( java.lang.String institutionName){
this.institutionName = institutionName;
}
/**
* Indicates whether a patient is off study.
*/
private java.lang.Boolean isOffStudy;
/**
* Indicates whether a patient is off study.
*/
public java.lang.Boolean getIsOffStudy(){
return isOffStudy;
}
public void setIsOffStudy( java.lang.Boolean isOffStudy){
this.isOffStudy = isOffStudy;
}
/**
* The patient's self declared racial origination, independent of ethnic origination, using OMB
* approved categories. Values include: Not Reported, American Indian or Alaska Native, Native Hawaiian
* or other Pacific Islander, Unknown, Asian, White, Black or African American.
*
*/
private java.lang.String raceCode;
/**
* The patient's self declared racial origination, independent of ethnic origination, using OMB
* approved categories. Values include: Not Reported, American Indian or Alaska Native, Native Hawaiian
* or other Pacific Islander, Unknown, Asian, White, Black or African American.
*
*/
public java.lang.String getRaceCode(){
return raceCode;
}
public void setRaceCode( java.lang.String raceCode){
this.raceCode = raceCode;
}
/**
* The unique number assigned to identify a patient on a study.
*/
private java.lang.String studySubjectIdentifier;
/**
* The unique number assigned to identify a patient on a study.
*/
public java.lang.String getStudySubjectIdentifier(){
return studySubjectIdentifier;
}
public void setStudySubjectIdentifier( java.lang.String studySubjectIdentifier){
this.studySubjectIdentifier = studySubjectIdentifier;
}
/**
* Indicates whether a patient is surviving.
*/
private java.lang.String survivalStatus;
/**
* Indicates whether a patient is surviving.
*/
public java.lang.String getSurvivalStatus(){
return survivalStatus;
}
public void setSurvivalStatus( java.lang.String survivalStatus){
this.survivalStatus = survivalStatus;
}
public boolean equals(Object obj){
boolean eq = false;
if(obj instanceof StudyParticipant) {
StudyParticipant c =(StudyParticipant)obj;
String thisId = getId();
if(thisId != null && thisId.equals(c.getId())) {
eq = true;
}
}
return eq;
}
public int hashCode(){
int h = 0;
if(getId() != null) {
h += getId().hashCode();
}
return h;
}
/**
* Groups of subjects based on self-described ethnic groupings and phenotypic ascertainment scheme.
*
*/
private gov.nih.nci.caintegrator.domain.study.bean.Population population;
/**
* Groups of subjects based on self-described ethnic groupings and phenotypic ascertainment scheme.
*
*/
public gov.nih.nci.caintegrator.domain.study.bean.Population getPopulation(){
return population;
}
public void setPopulation(gov.nih.nci.caintegrator.domain.study.bean.Population population){
this.population = population;
}
/**
* A standard of basic quantity or increment represented by numeric value
*/
private gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtDeath;
/**
* A standard of basic quantity or increment represented by numeric value
*/
public gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement getAgeAtDeath(){
return ageAtDeath;
}
public void setAgeAtDeath(gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtDeath){
this.ageAtDeath = ageAtDeath;
}
/**
* A type of research activity that tests how well new medical treatments or other interventions work
* in subjects. Such plans test new methods of screening, prevention, diagnosis or treatment of a disease.
* The specific plans are fully defined in the protocol and may be carried out in a clinic or other medical
* facility.
*
*/
private gov.nih.nci.caintegrator.domain.study.bean.Study study;
/**
* A type of research activity that tests how well new medical treatments or other interventions work
* in subjects. Such plans test new methods of screening, prevention, diagnosis or treatment of a disease.
* The specific plans are fully defined in the protocol and may be carried out in a clinic or other medical
* facility.
*
*/
public gov.nih.nci.caintegrator.domain.study.bean.Study getStudy(){
return study;
}
public void setStudy(gov.nih.nci.caintegrator.domain.study.bean.Study study){
this.study = study;
}
/**
* The secondary unique identifier assigned to identify a patient on a study.
*/
private Set <gov.nih.nci.caintegrator.domain.study.bean.SecondaryParticipantIdentifier> secondaryParticipantIdentifierCollection = new HashSet<gov.nih.nci.caintegrator.domain.study.bean.SecondaryParticipantIdentifier>();
/**
* The secondary unique identifier assigned to identify a patient on a study.
*/
public Set <gov.nih.nci.caintegrator.domain.study.bean.SecondaryParticipantIdentifier> getSecondaryParticipantIdentifierCollection(){
return secondaryParticipantIdentifierCollection;
}
public void setSecondaryParticipantIdentifierCollection(Set<gov.nih.nci.caintegrator.domain.study.bean.SecondaryParticipantIdentifier> secondaryParticipantIdentifierCollection){
this.secondaryParticipantIdentifierCollection = secondaryParticipantIdentifierCollection;
}
/**
* A standard of basic quantity or increment represented by numeric value
*/
private gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtEnrollment;
/**
* A standard of basic quantity or increment represented by numeric value
*/
public gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement getAgeAtEnrollment(){
return ageAtEnrollment;
}
public void setAgeAtEnrollment(gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtEnrollment){
this.ageAtEnrollment = ageAtEnrollment;
}
/**
* Indicates analysis tasks in the trial such as procdure or SubstanceAdministration.
*
*/
private Set <gov.nih.nci.caintegrator.domain.study.bean.Activity> activityCollection = new HashSet<gov.nih.nci.caintegrator.domain.study.bean.Activity>();
/**
* Indicates analysis tasks in the trial such as procdure or SubstanceAdministration.
*
*/
public Set <gov.nih.nci.caintegrator.domain.study.bean.Activity> getActivityCollection(){
return activityCollection;
}
public void setActivityCollection(Set<gov.nih.nci.caintegrator.domain.study.bean.Activity> activityCollection){
this.activityCollection = activityCollection;
}
/**
* A standard of basic quantity or increment represented by numeric value
*/
private gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtDiagnosis;
/**
* A standard of basic quantity or increment represented by numeric value
*/
public gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement getAgeAtDiagnosis(){
return ageAtDiagnosis;
}
public void setAgeAtDiagnosis(gov.nih.nci.caintegrator.domain.common.bean.NumericMeasurement ageAtDiagnosis){
this.ageAtDiagnosis = ageAtDiagnosis;
}
/**
* Representation of the analysis groups such as "CEPH Population" or "Non-Tumor Samples"
*
*/
private Set <gov.nih.nci.caintegrator.domain.analysis.snp.bean.SNPAnalysisGroup> analysisGroupCollection = new HashSet<gov.nih.nci.caintegrator.domain.analysis.snp.bean.SNPAnalysisGroup>();
/**
* Representation of the analysis groups such as "CEPH Population" or "Non-Tumor Samples"
*
*/
public Set <gov.nih.nci.caintegrator.domain.analysis.snp.bean.SNPAnalysisGroup> getAnalysisGroupCollection(){
return analysisGroupCollection;
}
public void setAnalysisGroupCollection(Set<gov.nih.nci.caintegrator.domain.analysis.snp.bean.SNPAnalysisGroup> analysisGroupCollection){
this.analysisGroupCollection = analysisGroupCollection;
}
/**
* A part of a thing, or of several things, removed to demonstrate or to determine the character of the
* whole, e.g. a substance, or portion of material obtained for use in testing, examination, or study;
* particularly, a preparation of tissue or bodily fluid taken for observation, examination or diagnosis.
* NOTE: Can be a sample of a collection or biopsy. (arc relationship)
*
*/
private Set <gov.nih.nci.caintegrator.domain.study.bean.Specimen> specimenCollection = new HashSet<gov.nih.nci.caintegrator.domain.study.bean.Specimen>();
/**
* A part of a thing, or of several things, removed to demonstrate or to determine the character of the
* whole, e.g. a substance, or portion of material obtained for use in testing, examination, or study;
* particularly, a preparation of tissue or bodily fluid taken for observation, examination or diagnosis.
* NOTE: Can be a sample of a collection or biopsy. (arc relationship)
*
*/
public Set <gov.nih.nci.caintegrator.domain.study.bean.Specimen> getSpecimenCollection(){
return specimenCollection;
}
public void setSpecimenCollection(Set<gov.nih.nci.caintegrator.domain.study.bean.Specimen> specimenCollection){
this.specimenCollection = specimenCollection;
}
/**
* Results of a clinical analysis
*/
private Set <gov.nih.nci.caintegrator.domain.finding.clinical.bean.ClinicalFinding> clinicalFindingCollection = new HashSet<gov.nih.nci.caintegrator.domain.finding.clinical.bean.ClinicalFinding>();
/**
* Results of a clinical analysis
*/
public Set <gov.nih.nci.caintegrator.domain.finding.clinical.bean.ClinicalFinding> getClinicalFindingCollection(){
return clinicalFindingCollection;
}
public void setClinicalFindingCollection(Set<gov.nih.nci.caintegrator.domain.finding.clinical.bean.ClinicalFinding> clinicalFindingCollection){
this.clinicalFindingCollection = clinicalFindingCollection;
}
public String toString() {
return "StudyParticipant{" +
"administrativeGenderCode='" + administrativeGenderCode + '\'' +
", caseControlStatus='" + caseControlStatus + '\'' +
", daysOffStudy=" + daysOffStudy +
", daysOnStudy=" + daysOnStudy +
", ethnicGroupCode='" + ethnicGroupCode + '\'' +
", familyHistory='" + familyHistory + '\'' +
", id='" + id + '\'' +
", institutionName='" + institutionName + '\'' +
", isOffStudy=" + isOffStudy +
", raceCode='" + raceCode + '\'' +
", studySubjectIdentifier='" + studySubjectIdentifier + '\'' +
", survivalStatus='" + survivalStatus + '\'' +
", population=" + population +
'}';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mahout.math;
import com.google.common.base.Preconditions;
import org.apache.mahout.math.function.Functions;
/**
* Cholesky decomposition shamelessly ported from JAMA.
* <p/>
* A Cholesky decomposition of a semi-positive definite matrix A is a lower triangular matrix L such
* that L L^* = A. If A is full rank, L is unique. If A is real, then it must be symmetric and R
* will also be real.
*/
public class CholeskyDecomposition {
private final PivotedMatrix L;
private boolean isPositiveDefinite;
public CholeskyDecomposition(Matrix a) {
this(a, true);
}
public CholeskyDecomposition(Matrix a, boolean pivot) {
int rows = a.rowSize();
L = new PivotedMatrix(new DenseMatrix(rows, rows));
// must be square
Preconditions.checkArgument(rows == a.columnSize());
if (pivot) {
decomposeWithPivoting(a);
} else {
decompose(a);
}
}
private void decomposeWithPivoting(Matrix a) {
int n = a.rowSize();
L.assign(a);
// pivoted column-wise submatrix cholesky with simple pivoting
double uberMax = L.viewDiagonal().aggregate(Functions.MAX, Functions.ABS);
for (int k = 0; k < n; k++) {
double max = 0;
int pivot = k;
for (int j = k; j < n; j++) {
if (L.get(j, j) > max) {
max = L.get(j, j);
pivot = j;
if (uberMax < Math.abs(max)) {
uberMax = Math.abs(max);
}
}
}
L.swap(k, pivot);
double akk = L.get(k, k);
double epsilon = 1.0e-10 * Math.max(uberMax, L.viewColumn(k).aggregate(Functions.MAX, Functions.ABS));
if (akk < -epsilon) {
// can't have decidedly negative element on diagonal
throw new IllegalArgumentException("Matrix is not positive semi-definite");
} else if (akk <= epsilon) {
// degenerate column case. Set all to zero
L.viewColumn(k).assign(0);
isPositiveDefinite = false;
// no need to subtract from remaining sub-matrix
} else {
// normalize column by diagonal element
akk = Math.sqrt(Math.max(0, akk));
L.viewColumn(k).viewPart(k, n - k).assign(Functions.div(akk));
L.viewColumn(k).viewPart(0, k).assign(0);
// subtract off scaled version of this column to the right
for (int j = k + 1; j < n; j++) {
Vector columnJ = L.viewColumn(j).viewPart(k, n - k);
Vector columnK = L.viewColumn(k).viewPart(k, n - k);
columnJ.assign(columnK, Functions.minusMult(columnK.get(j - k)));
}
}
}
}
private void decompose(Matrix a) {
int n = a.rowSize();
L.assign(a);
// column-wise submatrix cholesky with simple pivoting
for (int k = 0; k < n; k++) {
double akk = L.get(k, k);
// set upper part of column to 0.
L.viewColumn(k).viewPart(0, k).assign(0);
double epsilon = 1.0e-10 * L.viewColumn(k).aggregate(Functions.MAX, Functions.ABS);
if (akk <= epsilon) {
// degenerate column case. Set diagonal to 1, all others to zero
L.viewColumn(k).viewPart(k, n - k).assign(0);
isPositiveDefinite = false;
// no need to subtract from remaining sub-matrix
} else {
// normalize column by diagonal element
akk = Math.sqrt(Math.max(0, akk));
L.set(k, k, akk);
L.viewColumn(k).viewPart(k + 1, n - k - 1).assign(Functions.div(akk));
// now subtract scaled version of column
for (int j = k + 1; j < n; j++) {
Vector columnJ = L.viewColumn(j).viewPart(j, n - j);
Vector columnK = L.viewColumn(k).viewPart(j, n - j);
columnJ.assign(columnK, Functions.minusMult(L.get(j, k)));
}
}
}
}
public boolean isPositiveDefinite() {
return isPositiveDefinite;
}
public Matrix getL() {
return L.getBase();
}
public PivotedMatrix getPermutedL() {
return L;
}
/**
* @return Returns the permutation of rows and columns that was applied to L
*/
public int[] getPivot() {
return L.getRowPivot();
}
public int[] getInversePivot() {
return L.getInverseRowPivot();
}
/**
* Compute inv(L) * z efficiently.
*
* @param z
*/
public Matrix solveLeft(Matrix z) {
int n = L.columnSize();
int nx = z.columnSize();
Matrix X = new DenseMatrix(n, z.columnSize());
X.assign(z);
// Solve L*Y = Z using back-substitution
// note that k and i have to go in a funny order because L is pivoted
for (int internalK = 0; internalK < n; internalK++) {
int k = L.rowUnpivot(internalK);
for (int j = 0; j < nx; j++) {
for (int internalI = 0; internalI < internalK; internalI++) {
int i = L.rowUnpivot(internalI);
X.set(k, j, X.get(k, j) - X.get(i, j) * L.get(k, i));
}
if (L.get(k, k) != 0) {
X.set(k, j, X.get(k, j) / L.get(k, k));
} else {
X.set(k, j, 0);
}
}
}
return X;
}
/**
* Compute z * inv(L') efficiently
*/
public Matrix solveRight(Matrix z) {
int n = z.columnSize();
int nx = z.rowSize();
Matrix x = new DenseMatrix(z.rowSize(), z.columnSize());
x.assign(z);
// Solve Y*L' = Z using back-substitution
for (int internalK = 0; internalK < n; internalK++) {
int k = L.rowUnpivot(internalK);
for (int j = 0; j < nx; j++) {
for (int internalI = 0; internalI < k; internalI++) {
int i = L.rowUnpivot(internalI);
x.set(j, k, x.get(j, k) - x.get(j, i) * L.get(k, i));
if (Double.isInfinite(x.get(j, k)) || Double.isNaN(x.get(j, k))) {
throw new IllegalStateException(String.format("Invalid value found at %d,%d (should not be possible)", j, k));
}
}
if (L.get(k, k) != 0) {
x.set(j, k, x.get(j, k) / L.get(k, k));
} else {
x.set(j, k, 0);
}
if (Double.isInfinite(x.get(j, k)) || Double.isNaN(x.get(j, k))) {
throw new IllegalStateException(String.format("Invalid value found at %d,%d (should not be possible)", j, k));
}
}
}
return x;
}
}
| |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.windgate.jdbc;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
/**
* Keep a connection of H2 'in memory' Database.
*/
public class H2Resource extends TestWatcher {
private final String name;
private Class<?> context;
private Connection connection;
/**
* Creates a new instance.
* The target URL will be {@code "jdbc:h2:mem:<name>"}.
* @param name simple name of database
*/
public H2Resource(String name) {
this.name = name;
}
@Override
protected void starting(Description description) {
org.h2.Driver.load();
this.context = description.getTestClass();
this.connection = open();
boolean green = false;
try {
leakcheck();
before();
green = true;
} catch (Exception e) {
throw new AssertionError(e);
} finally {
if (green == false) {
finished(description);
}
}
}
private void leakcheck() {
try {
execute0("CREATE TABLE H2_TEST_DUPCHECK (SID IDENTITY PRIMARY KEY)");
} catch (SQLException e) {
throw new AssertionError(e);
}
}
/**
* runs before executes each test.
* @throws Exception if failed
*/
protected void before() throws Exception {
return;
}
/**
* Creates a new connection.
* @return the created connection
*/
public Connection open() {
try {
return DriverManager.getConnection(getJdbcUrl());
} catch (SQLException e) {
throw new AssertionError(e);
}
}
/**
* Returns the target URL.
* @return target URL
*/
public String getJdbcUrl() {
return "jdbc:h2:mem:" + name;
}
/**
* Returns query result columns list.
* @param sql target SQL
* @return result rows list that contains columns array
*/
public List<List<Object>> query(String sql) {
try {
return query0(sql);
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* Returns query result columns list.
* @param sql target SQL
* @return result rows list that contains columns array
*/
public List<Object> single(String sql) {
try {
List<List<Object>> query = query0(sql);
assertThat(sql, query.size(), is(1));
return query.get(0);
} catch (Exception e) {
throw new AssertionError(e);
}
}
/**
* Count rows in the table.
* @param table target table
* @return number of row in the table, or -1 if failed
*/
public int count(String table) {
try {
List<List<Object>> r = query0(MessageFormat.format("SELECT COUNT(*) FROM {0}", table));
if (r.size() != 1) {
return -1;
}
return ((Number) r.get(0).get(0)).intValue();
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
private List<List<Object>> query0(String sql) throws SQLException {
try (Statement s = connection.createStatement()) {
ResultSet rs = s.executeQuery(sql);
ResultSetMetaData meta = rs.getMetaData();
int size = meta.getColumnCount();
List<List<Object>> results = new ArrayList<>();
while (rs.next()) {
Object[] columns = new Object[size];
for (int i = 0; i < size; i++) {
columns[i] = rs.getObject(i + 1);
}
results.add(Arrays.asList(columns));
}
return results;
}
}
/**
* Executes DML.
* @param sql DML
*/
public void execute(String sql) {
try {
execute0(sql);
} catch (Exception e) {
throw new AssertionError(e);
}
}
private void execute0(String sql) throws SQLException {
try (PreparedStatement ps = connection.prepareStatement(sql)) {
ps.execute();
connection.commit();
}
}
/**
* Executes DML in target file.
* @param sqlFile resource file
*/
public void executeFile(String sqlFile) {
String content = load(sqlFile);
execute(content);
}
private String load(String resource) {
try (InputStream source = context.getResourceAsStream(resource)) {
assertThat(resource, source, is(not(nullValue())));
StringBuilder buf = new StringBuilder();
try (Reader reader = new InputStreamReader(source, "UTF-8")) {
char[] cbuf = new char[1024];
while (true) {
int read = reader.read(cbuf);
if (read < 0) {
break;
}
buf.append(cbuf, 0, read);
}
}
return buf.toString();
} catch (Exception e) {
throw new AssertionError(e);
}
}
@Override
public void finished(Description description) {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
throw new AssertionError(e);
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.document;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import static org.elasticsearch.action.DocWriteRequest.OpType;
import static org.elasticsearch.client.Requests.clearIndicesCacheRequest;
import static org.elasticsearch.client.Requests.getRequest;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
* Integration test for document action like index, bulk, and get. It has a very long history: it was in the second commit of Elasticsearch.
*/
public class DocumentActionsIT extends ESIntegTestCase {
protected void createIndex() {
ElasticsearchAssertions.assertAcked(prepareCreate(getConcreteIndexName()).setMapping("name", "type=keyword,store=true"));
}
protected String getConcreteIndexName() {
return "test";
}
public void testIndexActions() throws Exception {
createIndex();
NumShards numShards = getNumShards(getConcreteIndexName());
logger.info("Running Cluster Health");
ensureGreen();
logger.info("Indexing [type1/1]");
IndexResponse indexResponse = client().prepareIndex()
.setIndex("test")
.setId("1")
.setSource(source("1", "test"))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
.get();
assertThat(indexResponse.getIndex(), equalTo(getConcreteIndexName()));
assertThat(indexResponse.getId(), equalTo("1"));
logger.info("Refreshing");
RefreshResponse refreshResponse = refresh();
assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
logger.info("--> index exists?");
assertThat(indexExists(getConcreteIndexName()), equalTo(true));
logger.info("--> index exists?, fake index");
assertThat(indexExists("test1234565"), equalTo(false));
logger.info("Clearing cache");
ClearIndicesCacheResponse clearIndicesCacheResponse = client().admin()
.indices()
.clearCache(clearIndicesCacheRequest("test").fieldDataCache(true).queryCache(true))
.actionGet();
assertNoFailures(clearIndicesCacheResponse);
assertThat(clearIndicesCacheResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
logger.info("Force Merging");
waitForRelocation(ClusterHealthStatus.GREEN);
ForceMergeResponse mergeResponse = forceMerge();
assertThat(mergeResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
GetResponse getResult;
logger.info("Get [type1/1]");
for (int i = 0; i < 5; i++) {
getResult = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test"));
getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
logger.info("Get [type1/1] with script");
for (int i = 0; i < 5; i++) {
getResult = client().prepareGet("test", "1").setStoredFields("name").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat(getResult.isExists(), equalTo(true));
assertThat(getResult.getSourceAsBytes(), nullValue());
assertThat(getResult.getField("name").getValues().get(0).toString(), equalTo("test"));
}
logger.info("Get [type1/2] (should be empty)");
for (int i = 0; i < 5; i++) {
getResult = client().get(getRequest("test").id("2")).actionGet();
assertThat(getResult.isExists(), equalTo(false));
}
logger.info("Delete [type1/1]");
DeleteResponse deleteResponse = client().prepareDelete("test", "1").execute().actionGet();
assertThat(deleteResponse.getIndex(), equalTo(getConcreteIndexName()));
assertThat(deleteResponse.getId(), equalTo("1"));
logger.info("Refreshing");
client().admin().indices().refresh(refreshRequest("test")).actionGet();
logger.info("Get [type1/1] (should be empty)");
for (int i = 0; i < 5; i++) {
getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.isExists(), equalTo(false));
}
logger.info("Index [type1/1]");
client().index(indexRequest("test").id("1").source(source("1", "test"))).actionGet();
logger.info("Index [type1/2]");
client().index(indexRequest("test").id("2").source(source("2", "test2"))).actionGet();
logger.info("Flushing");
FlushResponse flushResult = client().admin().indices().prepareFlush("test").execute().actionGet();
assertThat(flushResult.getSuccessfulShards(), equalTo(numShards.totalNumShards));
assertThat(flushResult.getFailedShards(), equalTo(0));
logger.info("Refreshing");
client().admin().indices().refresh(refreshRequest("test")).actionGet();
logger.info("Get [type1/1] and [type1/2]");
for (int i = 0; i < 5; i++) {
getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
getResult = client().get(getRequest("test").id("2")).actionGet();
String ste1 = getResult.getSourceAsString();
String ste2 = Strings.toString(source("2", "test2"));
assertThat("cycle #" + i, ste1, equalTo(ste2));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
logger.info("Count");
// check count
for (int i = 0; i < 5; i++) {
// test successful
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet();
assertNoFailures(countResponse);
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getFailedShards(), equalTo(0));
// count with no query is a match all one
countResponse = client().prepareSearch("test").setSize(0).execute().actionGet();
assertThat(
"Failures " + countResponse.getShardFailures(),
countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length,
equalTo(0)
);
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getFailedShards(), equalTo(0));
}
}
public void testBulk() throws Exception {
createIndex();
NumShards numShards = getNumShards(getConcreteIndexName());
logger.info("-> running Cluster Health");
ensureGreen();
BulkResponse bulkResponse = client().prepareBulk()
.add(client().prepareIndex().setIndex("test").setId("1").setSource(source("1", "test")))
.add(client().prepareIndex().setIndex("test").setId("2").setSource(source("2", "test")).setCreate(true))
.add(client().prepareIndex().setIndex("test").setSource(source("3", "test")))
.add(client().prepareIndex().setIndex("test").setCreate(true).setSource(source("4", "test")))
.add(client().prepareDelete().setIndex("test").setId("1"))
.add(client().prepareIndex().setIndex("test").setSource("{ xxx }", XContentType.JSON)) // failure
.execute()
.actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(true));
assertThat(bulkResponse.getItems().length, equalTo(6));
assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[0].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[0].getIndex(), equalTo(getConcreteIndexName()));
assertThat(bulkResponse.getItems()[0].getId(), equalTo("1"));
assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[1].getOpType(), equalTo(OpType.CREATE));
assertThat(bulkResponse.getItems()[1].getIndex(), equalTo(getConcreteIndexName()));
assertThat(bulkResponse.getItems()[1].getId(), equalTo("2"));
assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[2].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[2].getIndex(), equalTo(getConcreteIndexName()));
String generatedId3 = bulkResponse.getItems()[2].getId();
assertThat(bulkResponse.getItems()[3].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[3].getOpType(), equalTo(OpType.CREATE));
assertThat(bulkResponse.getItems()[3].getIndex(), equalTo(getConcreteIndexName()));
String generatedId4 = bulkResponse.getItems()[3].getId();
assertThat(bulkResponse.getItems()[4].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[4].getOpType(), equalTo(OpType.DELETE));
assertThat(bulkResponse.getItems()[4].getIndex(), equalTo(getConcreteIndexName()));
assertThat(bulkResponse.getItems()[4].getId(), equalTo("1"));
assertThat(bulkResponse.getItems()[5].isFailed(), equalTo(true));
assertThat(bulkResponse.getItems()[5].getOpType(), equalTo(OpType.INDEX));
assertThat(bulkResponse.getItems()[5].getIndex(), equalTo(getConcreteIndexName()));
waitForRelocation(ClusterHealthStatus.GREEN);
RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet();
assertNoFailures(refreshResponse);
assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards));
for (int i = 0; i < 5; i++) {
GetResponse getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat("cycle #" + i, getResult.isExists(), equalTo(false));
getResult = client().get(getRequest("test").id("2")).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
getResult = client().get(getRequest("test").id(generatedId3)).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
getResult = client().get(getRequest("test").id(generatedId4)).actionGet();
assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("4", "test"))));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
}
private XContentBuilder source(String id, String nameValue) throws IOException {
return XContentFactory.jsonBuilder().startObject().field("id", id).field("name", nameValue).endObject();
}
}
| |
package com.linkedin.databus.core.monitoring.mbean;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.concurrent.locks.Lock;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import org.apache.avro.io.JsonEncoder;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.log4j.Logger;
import com.linkedin.databus.core.DbusEvent;
import com.linkedin.databus.core.DbusEventInternalReadable;
import com.linkedin.databus.core.DbusEventInternalReadable.EventScanStatus;
import com.linkedin.databus.core.monitoring.events.DbusEventsTotalStatsEvent;
public class DbusEventsTotalStats extends AbstractMonitoringMBean<DbusEventsTotalStatsEvent>
implements DbusEventsTotalStatsMBean
{
public static final String MODULE = DbusEventsTotalStats.class.getName();
private final HashSet<Object> _peers;
protected final String _dimension;
private final Logger _log;
public DbusEventsTotalStats(int ownerId, String dimension,
boolean enabled, boolean threadSafe,
DbusEventsTotalStatsEvent initData)
{
super(enabled, threadSafe, initData);
_dimension = AbstractMonitoringMBean.sanitizeString(dimension);
_event.ownerId = ownerId;
_event.dimension = _dimension;
_peers = new HashSet<Object>(1000);
_event.timestampCreated= System.currentTimeMillis();
_log = Logger.getLogger(MODULE + "." + dimension);
reset();
}
private void resetBufferStats()
{
_event.minWinScn = Long.MAX_VALUE;
_event.maxWinScn = 0;
_event.sinceWinScn = Long.MAX_VALUE;
_event.numFreeBytes = 0;
_event.timestampMinScnEvent = DEFAULT_MIN_LONG_VALUE;
_event.timestampMaxScnEvent = DEFAULT_MAX_LONG_VALUE;
}
public DbusEventsTotalStats clone(boolean threadSafe)
{
return new DbusEventsTotalStats(_event.ownerId, _dimension, _enabled.get(), threadSafe,
getStatistics(null));
}
public long getTimestampMaxScnEvent() {
return _event.timestampMaxScnEvent;
}
@Override
public int getNumPeers()
{
Lock readLock = acquireReadLock();
int result = 0;
try
{
result = _event.numPeers;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getNumDataEvents()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
result = _event.numDataEvents;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getNumDataEventsFiltered()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
result = _event.numDataEventsFiltered;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getTimeSinceLastResetMs()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
result = System.currentTimeMillis() - _event.timestampLastResetMs;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getTimestampLastResetMs()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
result = _event.timestampLastResetMs;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getMaxSeenWinScn()
{
Lock readLock = acquireReadLock();
try
{
return _event.maxSeenWinScn;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getMaxFilteredWinScn()
{
Lock readLock = acquireReadLock();
try
{
return _event.maxFilteredWinScn;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getMinSeenWinScn()
{
Lock readLock = acquireReadLock();
try
{
return _event.minSeenWinScn;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getSizeDataEvents()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
long num = _event.numDataEvents;
result = (0 == num) ? 0 : _event.sizeDataEvents / num;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getSizeDataEventsPayload()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
long num = _event.numDataEvents;
result = (0 == num) ? 0 : _event.sizeDataEventsPayload / num;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getSizeDataEventsFiltered()
{
Lock readLock = acquireReadLock();
try
{
long num = _event.numDataEventsFiltered;
return (0 == num) ? 0 : _event.sizeDataEventsFiltered / num;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getSizeDataEventsPayloadFiltered()
{
Lock readLock = acquireReadLock();
long result = 0;
try
{
result = _event.sizeDataEventsPayloadFiltered;
}
finally
{
releaseLock(readLock);
}
return result;
}
@Override
public long getNumSysEvents()
{
Lock readLock = acquireReadLock();
try
{
return _event.numSysEvents;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getSizeSysEvents()
{
Lock readLock = acquireReadLock();
try
{
long num = _event.numSysEvents;
return (0 == num) ? 0 : _event.sizeSysEvents / num;
}
finally
{
releaseLock(readLock);
}
}
public void registerDataEvent(DbusEventInternalReadable e)
{
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
//ms
long eventTsInMs = e.timestampInNanos()/(1000*1000);
long now = System.currentTimeMillis();
_event.timestampMaxScnEvent = Math.max(_event.timestampMaxScnEvent,eventTsInMs);
_event.timestampAccessed = now;
_event.latencyEvent += (_event.timestampAccessed > eventTsInMs) ? _event.timestampAccessed - eventTsInMs : 0;
_event.numDataEvents++;
_event.sizeDataEvents += e.size();
_event.sizeDataEventsPayload += e.payloadLength();
if (e.sequence() > _event.maxSeenWinScn)
{
// We have a new max event
_event.maxSeenWinScn = e.sequence();
_event.timeLag = (now > eventTsInMs) ? now - eventTsInMs : 0;
}
_event.minSeenWinScn = minValue(_event.minSeenWinScn,e.sequence());
}
finally
{
releaseLock(writeLock);
}
}
public void registerDataEventFiltered(DbusEventInternalReadable e)
{
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
_event.numDataEventsFiltered++;
_event.sizeDataEventsFiltered += e.size();
_event.sizeDataEventsPayloadFiltered += e.payloadLength();
_event.maxFilteredWinScn = Math.max(_event.maxFilteredWinScn,e.sequence());
}
finally
{
releaseLock(writeLock);
}
}
public void registerSysEvent(DbusEvent e)
{
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
_event.numSysEvents++;
_event.sizeSysEvents += e.size();
long now = System.currentTimeMillis();
if (e.isEndOfPeriodMarker())
{
_event.minSeenWinScn = minValue(_event.minSeenWinScn,e.sequence());
long eventTsInMs = e.timestampInNanos()/(1000*1000);
_event.timestampMaxScnEvent = Math.max(_event.timestampMaxScnEvent,eventTsInMs);
if (e.sequence() > _event.maxSeenWinScn)
{
// We have a new max event
_event.maxSeenWinScn = e.sequence();
_event.timeLag = (now > eventTsInMs) ? now - eventTsInMs : 0;
}
}
_event.timestampAccessed = now;
}
finally
{
releaseLock(writeLock);
}
}
@Override
protected void resetData()
{
_event.timestampLastResetMs = System.currentTimeMillis();
_event.timestampAccessed = DEFAULT_MAX_LONG_VALUE;
_event.timeSinceLastResetMs = 0;
_event.numPeers = 0;
_event.numDataEvents = 0;
_event.sizeDataEvents = 0;
_event.sizeDataEventsPayload = 0;
_event.numDataEventsFiltered = 0;
_event.sizeDataEventsFiltered = 0;
_event.sizeDataEventsPayloadFiltered = 0;
_event.maxSeenWinScn = DEFAULT_MAX_LONG_VALUE;
_event.minSeenWinScn = DEFAULT_MIN_LONG_VALUE;
_event.numSysEvents = 0;
_event.sizeSysEvents = 0;
_event.numErrHeader = 0;
_event.numErrPayload = 0;
_event.numInvalidEvents = 0;
_event.maxFilteredWinScn = 0;
_event.latencyEvent = 0;
_event.maxTimeSpan = DEFAULT_MAX_LONG_VALUE; // Makes sense only in the aggregated class.
_event.minTimeSpan = DEFAULT_MIN_LONG_VALUE; // Makes sense only in the aggregated class.
_event.maxTimestampAccessed = DEFAULT_MAX_LONG_VALUE; // Makes sense only in the aggregated class.
_event.minTimestampAccessed = DEFAULT_MIN_LONG_VALUE; // Makes sense only in the aggregated class.
_event.maxTimestampMaxScnEvent = DEFAULT_MAX_LONG_VALUE; // Makes sense only in the aggregated class.
_event.minTimestampMaxScnEvent = DEFAULT_MIN_LONG_VALUE; // Makes sense only in the aggregated class.
_event.timeLag = 0;
_event.maxTimeLag = DEFAULT_MAX_LONG_VALUE; // Makes sense only in the aggregated class.
_event.minTimeLag = DEFAULT_MIN_LONG_VALUE; // Makes sense only in the aggregated class.
resetBufferStats();
_peers.clear();
}
@Override
public JsonEncoder createJsonEncoder(OutputStream out) throws IOException
{
return new JsonEncoder(_event.getSchema(), out);
}
/** clone this event to otherEvent atomically **/
public void cloneData(DbusEventsTotalStats otherEvent)
{
Lock writeLock = acquireWriteLock();
try
{
//note: otherEvent is RHS - and is read; _event is written to
otherEvent.cloneData(_event);
}
finally
{
releaseLock(writeLock);
}
}
@Override
protected void cloneData(DbusEventsTotalStatsEvent event)
{
event.ownerId = _event.ownerId;
event.dimension = _event.dimension;
event.timestampLastResetMs = _event.timestampLastResetMs;
event.timeSinceLastResetMs = System.currentTimeMillis() - _event.timestampLastResetMs;
event.numPeers = _event.numPeers;
event.numDataEvents = _event.numDataEvents;
event.sizeDataEvents = _event.sizeDataEvents;
event.sizeDataEventsPayload = _event.sizeDataEventsPayload;
event.numDataEventsFiltered = _event.numDataEventsFiltered;
event.sizeDataEventsFiltered = _event.sizeDataEventsFiltered;
event.sizeDataEventsPayloadFiltered = _event.sizeDataEventsPayloadFiltered;
event.maxSeenWinScn = _event.maxSeenWinScn;
event.minSeenWinScn = _event.minSeenWinScn;
event.numSysEvents = _event.numSysEvents;
event.sizeSysEvents = _event.sizeSysEvents;
event.minWinScn = _event.minWinScn;
event.maxWinScn = _event.maxWinScn;
event.numErrHeader = _event.numErrHeader;
event.numInvalidEvents = _event.numInvalidEvents;
event.numErrPayload = _event.numErrPayload;
event.timestampCreated = _event.timestampCreated;
event.timestampAccessed = _event.timestampAccessed;
event.sinceWinScn = _event.sinceWinScn;
event.timestampMaxScnEvent = _event.timestampMaxScnEvent;
event.timestampMinScnEvent = _event.timestampMinScnEvent;
event.maxFilteredWinScn = _event.maxFilteredWinScn;
event.latencyEvent = _event.latencyEvent;
event.timeLag = _event.timeLag;
// aggregate fields
event.minTimeLag = _event.minTimeLag;
event.maxTimeLag = _event.maxTimeLag;
event.maxTimeSpan = _event.maxTimeSpan;
event.minTimeSpan = _event.minTimeSpan;
event.maxTimestampAccessed = _event.maxTimestampAccessed;
event.minTimestampAccessed = _event.minTimestampAccessed;
event.maxTimestampMaxScnEvent = _event.maxTimestampMaxScnEvent;
event.minTimestampMaxScnEvent = _event.minTimestampMaxScnEvent;
}
@Override
protected DbusEventsTotalStatsEvent newDataEvent()
{
return new DbusEventsTotalStatsEvent();
}
@Override
protected SpecificDatumWriter<DbusEventsTotalStatsEvent> getAvroWriter()
{
return new SpecificDatumWriter<DbusEventsTotalStatsEvent>(DbusEventsTotalStatsEvent.class);
}
@Override
public void mergeStats(DatabusMonitoringMBean<DbusEventsTotalStatsEvent> other)
{
if (!(this instanceof AggregatedDbusEventsTotalStats))
{
_log.error("Can use mergeStats only on AggregatedDbusEventsTotalStats");
throw new RuntimeException("Can use mergeStats only on AggregatedDbusEventsTotalStats");
}
super.mergeStats(other);
if (other instanceof DbusEventsTotalStats)
{
mergeClients((DbusEventsTotalStats)other);
}
}
@Override
protected void doMergeStats(Object eventData)
{
_log.error("Merging statistics into DbusEventsTotalStats not supported. Use AggregatedDbusEventsTotalStats");
throw new RuntimeException("Merging statistics into DbusEventsTotalStats not supported. Use AggregatedDbusEventsTotalStats");
// Merge logic is in AggregatedDbusEventsTotalStats
}
/** A bit of a hack to merge state outside the event state */
private void mergeClients(DbusEventsTotalStats other)
{
Lock otherReadLock = other.acquireReadLock();
Lock writeLock = acquireWriteLock(otherReadLock);
try
{
_peers.addAll(other._peers);
_event.numPeers = _peers.size();
}
finally
{
releaseLock(writeLock);
releaseLock(otherReadLock);
}
}
@Override
public ObjectName generateObjectName() throws MalformedObjectNameException
{
Hashtable<String, String> mbeanProps = generateBaseMBeanProps();
mbeanProps.put("ownerId", Integer.toString(_event.ownerId));
mbeanProps.put("dimension", _dimension);
return new ObjectName(AbstractMonitoringMBean.JMX_DOMAIN, mbeanProps);
}
public String getDimension()
{
return _dimension;
}
public void registerPeer(String peerId)
{
Lock writeLock = acquireWriteLock();
try
{
_peers.add(peerId);
_event.numPeers = _peers.size();
}
finally
{
releaseLock(writeLock);
}
}
public void registerEventError(EventScanStatus writingEventStatus)
{
if (writingEventStatus != DbusEventInternalReadable.EventScanStatus.OK) {
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
++_event.numInvalidEvents;
switch(writingEventStatus) {
case PARTIAL:
++_event.numErrHeader;
break;
case ERR:
++_event.numErrPayload;
break;
case OK: break;//NOOP
}
}
finally
{
releaseLock(writeLock);
}
}
}
public void registerScnRange(long min, long max) {
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try {
_event.minWinScn = min;
_event.maxWinScn = max;
}
finally
{
releaseLock(writeLock);
}
}
@Override
public long getNumInvalidEvents()
{
Lock readLock = acquireReadLock();
try
{
return _event.numInvalidEvents;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getNumHeaderErrEvents()
{
Lock readLock = acquireReadLock();
try
{
return _event.numErrHeader;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getNumPayloadErrEvents()
{
Lock readLock = acquireReadLock();
try
{
return _event.numErrPayload;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getMinScn()
{
Lock readLock = acquireReadLock();
try
{
return _event.minWinScn;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getMaxScn()
{
Lock readLock = acquireReadLock();
try
{
return _event.maxWinScn;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getTimeSinceLastAccess()
{
Lock readLock = acquireReadLock();
try
{
if(_event.timestampAccessed <= 0)
return DEFAULT_MAX_LONG_VALUE;
return System.currentTimeMillis() - _event.timestampAccessed;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getTimeSinceCreation()
{
Lock readLock = acquireReadLock();
try
{
return System.currentTimeMillis()- _event.timestampCreated;
}
finally
{
releaseLock(readLock);
}
}
public void registerCreationTime(long s)
{
Lock writeLock = acquireWriteLock();
try
{
_event.timestampCreated = s;
}
finally
{
releaseLock(writeLock);
}
}
@Override
public long getFreeSpace()
{
Lock readLock = acquireReadLock();
try
{
return _event.numFreeBytes;
}
finally
{
releaseLock(readLock);
}
}
public void registerBufferMetrics(long min, long max, long since, long freeSpace)
{
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
_event.minWinScn = min;
_event.maxWinScn = max;
_event.sinceWinScn = since;
_event.numFreeBytes = freeSpace;
}
finally
{
releaseLock(writeLock);
}
}
public void registerTimestampOfFirstEvent(long ts)
{
if (! _enabled.get()) return;
Lock writeLock = acquireWriteLock();
try
{
_event.timestampMinScnEvent = ts;
}
finally
{
releaseLock(writeLock);
}
}
@Override
public long getPrevScn() {
return _event.sinceWinScn;
}
@Override
public long getTimeLag()
{
Lock readLock = acquireReadLock();
try
{
return _event.timeLag;
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getMinTimeLag()
{
return getTimeLag();
}
@Override
public long getMaxTimeLag()
{
return getTimeLag();
}
@Override
public long getTimeSpan()
{
Lock readLock = acquireReadLock();
try
{
if(_event.timestampMaxScnEvent <=0 || _event.timestampMinScnEvent <= 0) // not a valid timestamps
return DEFAULT_MAX_LONG_VALUE;
return (_event.timestampMaxScnEvent - _event.timestampMinScnEvent);
}
finally
{
releaseLock(readLock);
}
}
@Override
public long getTimeSinceLastEvent()
{
Lock readLock = acquireReadLock();
try
{
if (_event.timestampMaxScnEvent <= 0)
{
return DEFAULT_MAX_LONG_VALUE;
}
else
{
return System.currentTimeMillis() - _event.timestampMaxScnEvent ;
}
}
finally
{
releaseLock(readLock);
}
}
public long getTimestampMinScnEvent()
{
return _event.timestampMinScnEvent;
}
@Override
public long getLatencyEvent()
{
Lock readLock = acquireReadLock();
try
{
long num = _event.numDataEvents;
return (num==0) ? 0 : _event.latencyEvent/num;
}
finally
{
releaseLock(readLock);
}
}
// For the methods that make sense only in the aggregated class, return the same value
// evey time so that we don't make any inferences out of them.
@Override
public long getMinTimeSinceLastAccess()
{
return 0;
}
@Override
public long getMaxTimeSinceLastAccess()
{
return 0;
}
@Override
public long getMinTimeSinceLastEvent()
{
return 0;
}
@Override
public long getMaxTimeSinceLastEvent()
{
return 0;
}
@Override
public long getMinTimeSpan()
{
return 0;
}
@Override
public long getMaxTimeSpan()
{
return 0;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.security;
import com.google.common.annotations.VisibleForTesting;
import java.lang.invoke.MethodHandles;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.core.NodeConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Validates URLs based on an allow list or a {@link ClusterState} in SolrCloud. */
public class AllowListUrlChecker {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/** {@link org.apache.solr.core.SolrXmlConfig} property to configure the allowed URLs. */
public static final String URL_ALLOW_LIST = "allowUrls";
/** System property to disable URL checking and {@link #ALLOW_ALL} instead. */
public static final String DISABLE_URL_ALLOW_LIST = "solr.disable." + URL_ALLOW_LIST;
/** Clue given in URL-forbidden exceptions messages. */
public static final String SET_SOLR_DISABLE_URL_ALLOW_LIST_CLUE =
"Set -D" + DISABLE_URL_ALLOW_LIST + "=true to disable URL allow-list checks.";
/** Singleton checker which allows all URLs. {@link #isEnabled()} returns false. */
public static final AllowListUrlChecker ALLOW_ALL;
static {
try {
ALLOW_ALL =
new AllowListUrlChecker(Collections.emptyList()) {
@Override
public void checkAllowList(List<String> urls, ClusterState clusterState) {
// Allow.
}
@Override
public boolean isEnabled() {
return false;
}
@Override
public String toString() {
return getClass().getSimpleName() + " [allow all]";
}
};
} catch (MalformedURLException e) {
// Never thrown.
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
}
/**
* Regex pattern to match any protocol, e.g. http:// https:// s3://. After a match, regex group 1
* contains the protocol and group 2 the rest.
*/
private static final Pattern PROTOCOL_PATTERN = Pattern.compile("(\\w+)(://.*)");
/** Allow list of hosts. Elements in the list will be host:port (no protocol or context). */
private final Set<String> hostAllowList;
/**
* @param urlAllowList List of allowed URLs. URLs must be well-formed, missing protocol is
* tolerated. An empty list means there is no explicit allow-list of URLs, in this case no URL
* is allowed unless a {@link ClusterState} is provided in {@link #checkAllowList(List,
* ClusterState)}.
* @throws MalformedURLException If an URL is invalid.
*/
public AllowListUrlChecker(List<String> urlAllowList) throws MalformedURLException {
hostAllowList = parseHostPorts(urlAllowList);
}
/**
* Creates a URL checker based on the {@link NodeConfig} property to configure the allowed URLs.
*/
public static AllowListUrlChecker create(NodeConfig config) {
if (Boolean.getBoolean(DISABLE_URL_ALLOW_LIST)) {
return AllowListUrlChecker.ALLOW_ALL;
} else if (System.getProperty("solr.disable.shardsWhitelist") != null) {
log.warn(
"Property 'solr.disable.shardsWhitelist' is deprecated, please use '{}' instead.",
DISABLE_URL_ALLOW_LIST);
}
try {
return new AllowListUrlChecker(config.getAllowUrls());
} catch (MalformedURLException e) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Invalid URL syntax in '" + URL_ALLOW_LIST + "' configuration: " + config.getAllowUrls(),
e);
}
}
/**
* @see #checkAllowList(List, ClusterState)
*/
public void checkAllowList(List<String> urls) throws MalformedURLException {
checkAllowList(urls, null);
}
/**
* Checks that the given URLs are present in the configured allow-list or in the provided {@link
* ClusterState} (in case of cloud mode).
*
* @param urls The list of urls to check.
* @param clusterState The up to date {@link ClusterState}, can be null in case of non-cloud mode.
* @throws MalformedURLException If an URL is invalid.
* @throws SolrException If an URL is not present in the allow-list or in the provided {@link
* ClusterState}.
*/
public void checkAllowList(List<String> urls, ClusterState clusterState)
throws MalformedURLException {
Set<String> clusterHostAllowList =
clusterState == null ? Collections.emptySet() : clusterState.getHostAllowList();
for (String url : urls) {
String hostPort = parseHostPort(url);
if (!clusterHostAllowList.contains(hostPort) && !hostAllowList.contains(hostPort)) {
throw new SolrException(
SolrException.ErrorCode.FORBIDDEN,
"URL "
+ url
+ " is neither a live node of the cluster nor in the configured '"
+ URL_ALLOW_LIST
+ "' "
+ hostAllowList);
}
}
}
/** Whether this checker has been created with a non-empty allow-list of URLs. */
public boolean hasExplicitAllowList() {
return !hostAllowList.isEmpty();
}
/** Whether the URL checking is enabled. Only {@link #ALLOW_ALL} returns false. */
public boolean isEnabled() {
return true;
}
/** Only for testing. */
@VisibleForTesting
public Set<String> getHostAllowList() {
return hostAllowList == null ? null : Collections.unmodifiableSet(hostAllowList);
}
@Override
public String toString() {
return getClass().getSimpleName() + " [allowList=" + hostAllowList + "]";
}
@VisibleForTesting
static Set<String> parseHostPorts(List<String> urls) throws MalformedURLException {
if (urls == null || urls.isEmpty()) {
return Collections.emptySet();
}
Set<String> hostPorts = new HashSet<>();
for (String urlString : urls) {
hostPorts.add(parseHostPort(urlString));
}
return hostPorts;
}
private static String parseHostPort(String url) throws MalformedURLException {
// Parse the host and port.
// It doesn't really matter which protocol we set here because we are not going to use it.
url = url.trim();
URL u;
Matcher protocolMatcher = PROTOCOL_PATTERN.matcher(url);
if (protocolMatcher.matches()) {
// Replace any protocol unsupported by URL.
if (!protocolMatcher.group(1).startsWith("http")) {
url = "http" + protocolMatcher.group(2);
}
u = new URL(url);
} else {
u = new URL("http://" + url);
}
if (u.getHost() == null || u.getPort() < 0) {
throw new MalformedURLException("Invalid host or port in '" + url + "'");
}
return u.getHost() + ":" + u.getPort();
}
}
| |
/*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.wicket.panels;
import java.text.DateFormat;
import java.text.MessageFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.data.DataView;
import org.apache.wicket.markup.repeater.data.ListDataProvider;
import org.eclipse.jgit.lib.PersonIdent;
import com.gitblit.Constants;
import com.gitblit.Keys;
import com.gitblit.models.DailyLogEntry;
import com.gitblit.models.RepositoryCommit;
import com.gitblit.utils.StringUtils;
import com.gitblit.utils.TimeUtils;
import com.gitblit.wicket.WicketUtils;
import com.gitblit.wicket.pages.CommitPage;
import com.gitblit.wicket.pages.ComparePage;
import com.gitblit.wicket.pages.SummaryPage;
import com.gitblit.wicket.pages.TagPage;
import com.gitblit.wicket.pages.TreePage;
public class DigestsPanel extends BasePanel {
private static final long serialVersionUID = 1L;
private final boolean hasChanges;
private boolean hasMore;
public DigestsPanel(String wicketId, List<DailyLogEntry> digests) {
super(wicketId);
hasChanges = digests.size() > 0;
final int hashLen = app().settings().getInteger(Keys.web.shortCommitIdLength, 6);
String dateFormat = app().settings().getString(Keys.web.datestampLongFormat, "EEEE, MMMM d, yyyy");
final TimeZone timezone = getTimeZone();
final DateFormat df = new SimpleDateFormat(dateFormat);
df.setTimeZone(timezone);
final Calendar cal = Calendar.getInstance(timezone);
ListDataProvider<DailyLogEntry> dp = new ListDataProvider<DailyLogEntry>(digests);
DataView<DailyLogEntry> pushView = new DataView<DailyLogEntry>("change", dp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<DailyLogEntry> logItem) {
final DailyLogEntry change = logItem.getModelObject();
String fullRefName = change.getChangedRefs().get(0);
String shortRefName = fullRefName;
boolean isTag = false;
if (shortRefName.startsWith(Constants.R_HEADS)) {
shortRefName = shortRefName.substring(Constants.R_HEADS.length());
} else if (shortRefName.startsWith(Constants.R_TAGS)) {
shortRefName = shortRefName.substring(Constants.R_TAGS.length());
isTag = true;
}
String fuzzydate;
TimeUtils tu = getTimeUtils();
Date pushDate = change.date;
if (TimeUtils.isToday(pushDate, timezone)) {
fuzzydate = tu.today();
} else if (TimeUtils.isYesterday(pushDate, timezone)) {
fuzzydate = tu.yesterday();
} else {
// calculate a fuzzy time ago date
cal.setTime(pushDate);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
pushDate = cal.getTime();
fuzzydate = getTimeUtils().timeAgo(pushDate);
}
logItem.add(new Label("whenChanged", fuzzydate + ", " + df.format(pushDate)));
Label changeIcon = new Label("changeIcon");
// use the repository hash color to differentiate the icon.
String color = StringUtils.getColor(StringUtils.stripDotGit(change.repository));
WicketUtils.setCssStyle(changeIcon, "color: " + color);
if (isTag) {
WicketUtils.setCssClass(changeIcon, "iconic-tag");
} else {
WicketUtils.setCssClass(changeIcon, "iconic-loop");
}
logItem.add(changeIcon);
if (isTag) {
// tags are special
PersonIdent ident = change.getCommits().get(0).getAuthorIdent();
if (!StringUtils.isEmpty(ident.getName())) {
logItem.add(new Label("whoChanged", ident.getName()));
} else {
logItem.add(new Label("whoChanged", ident.getEmailAddress()));
}
} else {
logItem.add(new Label("whoChanged").setVisible(false));
}
String preposition = "gb.of";
boolean isDelete = false;
String what;
String by = null;
switch(change.getChangeType(fullRefName)) {
case CREATE:
if (isTag) {
// new tag
what = getString("gb.createdNewTag");
preposition = "gb.in";
} else {
// new branch
what = getString("gb.createdNewBranch");
preposition = "gb.in";
}
break;
case DELETE:
isDelete = true;
if (isTag) {
what = getString("gb.deletedTag");
} else {
what = getString("gb.deletedBranch");
}
preposition = "gb.from";
break;
default:
what = MessageFormat.format(change.getCommitCount() > 1 ? getString("gb.commitsTo") : getString("gb.oneCommitTo"), change.getCommitCount());
if (change.getAuthorCount() == 1) {
by = MessageFormat.format(getString("gb.byOneAuthor"), change.getAuthorIdent().getName());
} else {
by = MessageFormat.format(getString("gb.byNAuthors"), change.getAuthorCount());
}
break;
}
logItem.add(new Label("whatChanged", what));
logItem.add(new Label("byAuthors", by).setVisible(!StringUtils.isEmpty(by)));
if (isDelete) {
// can't link to deleted ref
logItem.add(new Label("refChanged", shortRefName));
} else if (isTag) {
// link to tag
logItem.add(new LinkPanel("refChanged", null, shortRefName,
TagPage.class, WicketUtils.newObjectParameter(change.repository, fullRefName)));
} else {
// link to tree
logItem.add(new LinkPanel("refChanged", null, shortRefName,
TreePage.class, WicketUtils.newObjectParameter(change.repository, fullRefName)));
}
// to/from/etc
logItem.add(new Label("repoPreposition", getString(preposition)));
String repoName = StringUtils.stripDotGit(change.repository);
logItem.add(new LinkPanel("repoChanged", null, repoName,
SummaryPage.class, WicketUtils.newRepositoryParameter(change.repository)));
int maxCommitCount = 5;
List<RepositoryCommit> commits = change.getCommits();
if (commits.size() > maxCommitCount) {
commits = new ArrayList<RepositoryCommit>(commits.subList(0, maxCommitCount));
}
// compare link
String compareLinkText = null;
if ((change.getCommitCount() <= maxCommitCount) && (change.getCommitCount() > 1)) {
compareLinkText = MessageFormat.format(getString("gb.viewComparison"), commits.size());
} else if (change.getCommitCount() > maxCommitCount) {
int diff = change.getCommitCount() - maxCommitCount;
compareLinkText = MessageFormat.format(diff > 1 ? getString("gb.nMoreCommits") : getString("gb.oneMoreCommit"), diff);
}
if (StringUtils.isEmpty(compareLinkText)) {
logItem.add(new Label("compareLink").setVisible(false));
} else {
String endRangeId = change.getNewId(fullRefName);
String startRangeId = change.getOldId(fullRefName);
logItem.add(new LinkPanel("compareLink", null, compareLinkText, ComparePage.class, WicketUtils.newRangeParameter(change.repository, startRangeId, endRangeId)));
}
final boolean showSwatch = app().settings().getBoolean(Keys.web.repositoryListSwatches, true);
ListDataProvider<RepositoryCommit> cdp = new ListDataProvider<RepositoryCommit>(commits);
DataView<RepositoryCommit> commitsView = new DataView<RepositoryCommit>("commit", cdp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<RepositoryCommit> commitItem) {
final RepositoryCommit commit = commitItem.getModelObject();
// author gravatar
commitItem.add(new GravatarImage("commitAuthor", commit.getAuthorIdent().getName(),
commit.getAuthorIdent().getEmailAddress(), null, 16, false, false));
// merge icon
if (commit.getParentCount() > 1) {
commitItem.add(WicketUtils.newImage("commitIcon", "commit_merge_16x16.png"));
} else {
commitItem.add(WicketUtils.newBlankImage("commitIcon"));
}
// short message
String shortMessage = commit.getShortMessage();
String trimmedMessage = shortMessage;
if (commit.getRefs() != null && commit.getRefs().size() > 0) {
trimmedMessage = StringUtils.trimString(shortMessage, Constants.LEN_SHORTLOG_REFS);
} else {
trimmedMessage = StringUtils.trimString(shortMessage, Constants.LEN_SHORTLOG);
}
LinkPanel shortlog = new LinkPanel("commitShortMessage", "list",
trimmedMessage, CommitPage.class, WicketUtils.newObjectParameter(
change.repository, commit.getName()));
if (!shortMessage.equals(trimmedMessage)) {
WicketUtils.setHtmlTooltip(shortlog, shortMessage);
}
commitItem.add(shortlog);
// commit hash link
LinkPanel commitHash = new LinkPanel("hashLink", null, commit.getName().substring(0, hashLen),
CommitPage.class, WicketUtils.newObjectParameter(
change.repository, commit.getName()));
WicketUtils.setCssClass(commitHash, "shortsha1");
WicketUtils.setHtmlTooltip(commitHash, commit.getName());
commitItem.add(commitHash);
if (showSwatch) {
// set repository color
String color = StringUtils.getColor(StringUtils.stripDotGit(change.repository));
WicketUtils.setCssStyle(commitItem, MessageFormat.format("border-left: 2px solid {0};", color));
}
}
};
logItem.add(commitsView);
}
};
add(pushView);
}
public boolean hasMore() {
return hasMore;
}
public boolean hideIfEmpty() {
setVisible(hasChanges);
return hasChanges;
}
}
| |
/*
* Copyright AllSeen Alliance. All rights reserved.
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* This is a sample code demonstrating how to use AllJoyn messages to pass complex data types.
* This will get a String array containing all of the contact found on the phone.
* or the list of phone number(s) and e-mail addresses for a contact based on their name.
*/
package org.alljoyn.bus.samples.contacts_client;
import org.alljoyn.bus.BusAttachment;
import org.alljoyn.bus.BusException;
import org.alljoyn.bus.BusListener;
import org.alljoyn.bus.Mutable;
import org.alljoyn.bus.ProxyBusObject;
import org.alljoyn.bus.SessionListener;
import org.alljoyn.bus.SessionOpts;
import org.alljoyn.bus.Status;
import android.app.Activity;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.res.Resources;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.provider.ContactsContract;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
import android.widget.Toast;
/*
* This is a sample client that will receive information about the contacts stored on
* the phone from a contacts service.
*/
public class ContactsClient extends Activity {
static {
System.loadLibrary("alljoyn_java");
}
private static final int DIALOG_CONTACT = 1;
private static final int MESSAGE_DISPLAY_ALL_CONTACTS = 1;
private static final int MESSAGE_DISPLAY_CONTACT = 2;
private static final int MESSAGE_POST_TOAST = 3;
private static final int MESSAGE_START_PROGRESS_DIALOG = 4;
private static final int MESSAGE_STOP_PROGRESS_DIALOG = 5;
private static final String TAG = "ContactsClient";
private Button mGetContactsBtn;
private ArrayAdapter<String> mContactsListAdapter;
private ListView mContactsListView;
private Menu menu;
private Contact mAddressEntry;
private NameId[] mContactNames;
String mSingleName;
int mSingleUserId;
BusHandler mBusHandler;
private ProgressDialog mDialog;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_DISPLAY_ALL_CONTACTS:
mContactNames = (NameId[]) msg.obj;
/*
* Make sure the Contacts list is clear of any old information before filling the list.
*/
mContactsListAdapter.clear();
/*
* Change the name of the button from "Get Contacts List" to "Update Contacts List"
*/
mGetContactsBtn.setText(getString(R.string.update_contacts));
for (int i = 0; i < mContactNames.length; i++) {
mContactsListAdapter.add(mContactNames[i].displayName);
}
break;
case MESSAGE_DISPLAY_CONTACT:
mAddressEntry = (Contact) msg.obj;
showDialog(DIALOG_CONTACT);
break;
case MESSAGE_POST_TOAST:
Toast.makeText(getApplicationContext(), (String) msg.obj, Toast.LENGTH_LONG).show();
break;
case MESSAGE_START_PROGRESS_DIALOG:
mDialog = ProgressDialog.show(ContactsClient.this,
"",
"Finding Contacts Service.\nPlease wait...",
true,
true);
break;
case MESSAGE_STOP_PROGRESS_DIALOG:
mDialog.dismiss();
break;
default:
break;
}
}
};
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mContactsListAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1);
mContactsListView = (ListView) findViewById(R.id.contact_list);
mContactsListView.setAdapter(mContactsListAdapter);
mContactsListView.setTextFilterEnabled(true);
mContactsListView.setOnItemClickListener(new GetContactInformation());
mGetContactsBtn = (Button) findViewById(R.id.get_contacts_btn);
mGetContactsBtn.setOnClickListener(new GetContactsListener());
mAddressEntry = new Contact();
HandlerThread busThread = new HandlerThread("BusHandler");
busThread.start();
mBusHandler = new BusHandler(busThread.getLooper());
mBusHandler.sendEmptyMessage(BusHandler.CONNECT);
mHandler.sendEmptyMessage(MESSAGE_START_PROGRESS_DIALOG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.mainmenu, menu);
this.menu = menu;
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.quit:
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mBusHandler.sendEmptyMessage(BusHandler.DISCONNECT);
}
/**
* Implementation of the OnClickListener attached to the "Get Contacts List"
* "Update Contacts List" button. When clicked this will fill the
* mcontactsListAdapter with an alphabetized list of all the contacts on
* the phone.
*/
private class GetContactsListener implements View.OnClickListener {
public void onClick(View v) {
mBusHandler.sendEmptyMessage(BusHandler.GET_ALL_CONTACT_NAMES);
}
}
/**
* Implementation of the OnItemClickListener for any item in the contacts
* list. The listener will use the the string from the list and use that
* name to lookup an individual contact based on that name.
*/
private class GetContactInformation implements AdapterView.OnItemClickListener {
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Message msg = mBusHandler.obtainMessage(BusHandler.GET_CONTACT, mContactNames[position]);
mBusHandler.sendMessage(msg);
}
}
@Override
protected Dialog onCreateDialog(int id) {
Dialog dialog = null;
try {
switch (id) {
case DIALOG_CONTACT: {
dialog = new Dialog(ContactsClient.this);
break;
}
default: {
dialog = null;
break;
}
}
} catch (Throwable ex ) {
Log.e(TAG, String.format("Throwable exception found"), ex);
}
return dialog;
}
@Override
protected void onPrepareDialog(int id, Dialog dialog) {
switch (id) {
/*
* build a dialog that show the contents of the an individual contact
* This will dynamically build the dialog based on how much information
* is known about the contact.
*/
case DIALOG_CONTACT: {
/*
* Individual dialog elements
*/
int PHONE_TABLE_OFFSET = 2; /* two columns before the phone numbers start */
int EMAIL_TABLE_OFFSET = 1 + PHONE_TABLE_OFFSET + mAddressEntry.phone.length;
/*
* Reset the dialog to a known starting point
*/
dialog.setContentView(R.layout.contact);
dialog.setTitle(getString(R.string.contact_dialog_title));
/*
* Add the contact Name to the top of the table
*/
TextView contactName = (TextView) dialog.findViewById(R.id.contact_name);
contactName.setText(mAddressEntry.name);
/*
* Get the table layout so items can be added to it.
*/
TableLayout contactTable = (TableLayout) dialog.findViewById(R.id.contact_table);
/*
* Add a phone number entry to the dialog displayed on screen for each phone number.
*/
if (mAddressEntry.phone.length > 0) {
for (int i = 0; i < mAddressEntry.phone.length; i++) {
insertPhoneToTable(contactTable, mAddressEntry.phone[i], i + PHONE_TABLE_OFFSET);
}
}
/*
* Add an email number entry to the dialog displayed on screen for each email address.
*/
if (mAddressEntry.email.length > 0) {
for (int i = 0; i < mAddressEntry.email.length; i++) {
insertEmailToTable(contactTable, mAddressEntry.email[i], i + EMAIL_TABLE_OFFSET);
}
}
break;
}
default:
break;
}
}
/**
* Insert a phone number into the table at the indicated position
*/
private void insertPhoneToTable(TableLayout table, Contact.Phone phone, int position) {
TableRow tr = new TableRow(getApplicationContext());
TextView type = new TextView(getApplicationContext());
type.setLayoutParams(new TableRow.LayoutParams(1));
/*
* If the phone type has a custom label use that label other wise pull
* the type from the phone_types string array.
*/
if (phone.type == ContactsContract.CommonDataKinds.Phone.TYPE_CUSTOM) {
type.setText(phone.label);
} else {
Resources res = getResources();
String[] phoneTypes = res.getStringArray(R.array.phone_types);
type.setText(phoneTypes[phone.type]);
}
TextView number = new TextView(getApplicationContext());
number.setText(phone.number);
tr.addView(type);
tr.addView(number);
table.addView(tr, position);
}
/*
* Insert an email address into the table at the indicated position
*/
private void insertEmailToTable(TableLayout table, Contact.Email email, int position) {
TableRow tr = new TableRow(getApplicationContext());
TextView type = new TextView(getApplicationContext());
type.setLayoutParams(new TableRow.LayoutParams(1));
/*
* if the email type has a custom label use that label other wise pull
* the type from the email_types string array.
*/
if (email.type == ContactsContract.CommonDataKinds.Email.TYPE_CUSTOM) {
type.setText(email.label);
} else {
Resources res = getResources();
String[] emailTypes = res.getStringArray(R.array.email_types);
type.setText(emailTypes[email.type]);
}
TextView address = new TextView(getApplicationContext());
address.setText(email.address);
tr.addView(type);
tr.addView(address);
table.addView(tr, position);
}
/*
* See the SimpleClient sample for a more complete description of the code used
* to connect this code to the Bus
*/
class BusHandler extends Handler {
private static final String SERVICE_NAME = "org.alljoyn.bus.addressbook";
private static final short CONTACT_PORT = 42;
public static final int CONNECT = 1;
public static final int DISCONNECT = 2;
public static final int GET_CONTACT = 3;
public static final int GET_ALL_CONTACT_NAMES = 4;
public static final int JOIN_SESSION = 5;
private BusAttachment mBus;
private ProxyBusObject mProxyObj;
private AddressBookInterface mAddressBookInterface;
private int mSessionId;
private boolean mIsConnected;
private boolean mIsStoppingDiscovery;
public BusHandler(Looper looper) {
super(looper);
mIsConnected = false;
mIsStoppingDiscovery = false;
}
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case CONNECT: {
org.alljoyn.bus.alljoyn.DaemonInit.PrepareDaemon(getApplicationContext());
mBus = new BusAttachment(getPackageName(), BusAttachment.RemoteMessage.Receive);
mBus.registerBusListener(new BusListener() {
@Override
public void foundAdvertisedName(String name, short transport, String namePrefix) {
logInfo(String.format("MyBusListener.foundAdvertisedName(%s, 0x%04x, %s)", name, transport, namePrefix));
/*
* This client will only join the first service that it sees advertising
* the indicated well-known name. If the program is already a member of
* a session (i.e. connected to a service) we will not attempt to join
* another session.
* It is possible to join multiple session however joining multiple
* sessions is not shown in this sample.
*/
if (! mIsConnected){
Message msg = obtainMessage(JOIN_SESSION, name);
sendMessage(msg);
}
}
});
Status status = mBus.connect();
logStatus("BusAttachment.connect()", status);
status = mBus.findAdvertisedName(SERVICE_NAME);
logStatus(String.format("BusAttachement.findAdvertisedName(%s)", SERVICE_NAME), status);
break;
}
case JOIN_SESSION: {
if (mIsStoppingDiscovery) {
break;
}
short contactPort = CONTACT_PORT;
SessionOpts sessionOpts = new SessionOpts();
Mutable.IntegerValue sessionId = new Mutable.IntegerValue();
Status status = mBus.joinSession((String) msg.obj, contactPort, sessionId, sessionOpts, new SessionListener(){
@Override
public void sessionLost(int sessionId, int reason) {
mIsConnected = false;
logInfo(String.format("MyBusListener.sessionLost(sessionId = %d, reason = %d)", sessionId, reason));
mHandler.sendEmptyMessage(MESSAGE_START_PROGRESS_DIALOG);
}
});
logStatus("BusAttachment.joinSession()", status);
if (status == Status.OK) {
mProxyObj = mBus.getProxyBusObject(SERVICE_NAME, "/addressbook", sessionId.value,
new Class[] { AddressBookInterface.class });
mAddressBookInterface = mProxyObj.getInterface(AddressBookInterface.class);
mSessionId = sessionId.value;
mIsConnected = true;
mHandler.sendEmptyMessage(MESSAGE_STOP_PROGRESS_DIALOG);
}
break;
}
case DISCONNECT: {
mIsStoppingDiscovery = true;
if (mIsConnected) {
Status status = mBus.leaveSession(mSessionId);
logStatus("BusAttachment.leaveSession()", status);
}
mBus.disconnect();
getLooper().quit();
break;
}
// Call AddressBookInterface.getContact method and send the result to the UI handler.
case GET_CONTACT: {
if (mAddressBookInterface == null) {
break;
}
try {
NameId nameId = (NameId)msg.obj;
Contact reply = mAddressBookInterface.getContact(nameId.displayName, nameId.userId);
Message replyMsg = mHandler.obtainMessage(MESSAGE_DISPLAY_CONTACT, reply);
mHandler.sendMessage(replyMsg);
} catch (BusException ex) {
logException("AddressBookInterface.getContact()", ex);
}
break;
}
// Call AddressBookInterface.getAllContactNames and send the result to the UI handler
case GET_ALL_CONTACT_NAMES: {
try {
NameId[] reply = mAddressBookInterface.getAllContactNames();
Message replyMsg = mHandler.obtainMessage(MESSAGE_DISPLAY_ALL_CONTACTS, (Object) reply);
mHandler.sendMessage(replyMsg);
} catch (BusException ex) {
logException("AddressBookInterface.getAllContactNames()", ex);
}
break;
}
default:
break;
}
}
}
private void logStatus(String msg, Status status) {
String log = String.format("%s: %s", msg, status);
if (status == Status.OK) {
Log.i(TAG, log);
} else {
Message toastMsg = mHandler.obtainMessage(MESSAGE_POST_TOAST, log);
mHandler.sendMessage(toastMsg);
Log.e(TAG, log);
}
}
private void logException(String msg, BusException ex) {
String log = String.format("%s: %s", msg, ex);
Message toastMsg = mHandler.obtainMessage(MESSAGE_POST_TOAST, log);
mHandler.sendMessage(toastMsg);
Log.e(TAG, log, ex);
}
private void logInfo(String msg) {
Log.i(TAG, msg);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow/core/example/feature.proto
package org.tensorflow.proto.example;
/**
* Protobuf type {@code tensorflow.Features}
*/
public final class Features extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.Features)
FeaturesOrBuilder {
private static final long serialVersionUID = 0L;
// Use Features.newBuilder() to construct.
private Features(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Features() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new Features();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Features(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
feature_ = com.google.protobuf.MapField.newMapField(
FeatureDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000001;
}
com.google.protobuf.MapEntry<java.lang.String, org.tensorflow.proto.example.Feature>
feature__ = input.readMessage(
FeatureDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
feature_.getMutableMap().put(
feature__.getKey(), feature__.getValue());
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 1:
return internalGetFeature();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.proto.example.Features.class, org.tensorflow.proto.example.Features.Builder.class);
}
public static final int FEATURE_FIELD_NUMBER = 1;
private static final class FeatureDefaultEntryHolder {
static final com.google.protobuf.MapEntry<
java.lang.String, org.tensorflow.proto.example.Feature> defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, org.tensorflow.proto.example.Feature>newDefaultInstance(
org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_FeatureEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.MESSAGE,
org.tensorflow.proto.example.Feature.getDefaultInstance());
}
private com.google.protobuf.MapField<
java.lang.String, org.tensorflow.proto.example.Feature> feature_;
private com.google.protobuf.MapField<java.lang.String, org.tensorflow.proto.example.Feature>
internalGetFeature() {
if (feature_ == null) {
return com.google.protobuf.MapField.emptyMapField(
FeatureDefaultEntryHolder.defaultEntry);
}
return feature_;
}
public int getFeatureCount() {
return internalGetFeature().getMap().size();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public boolean containsFeature(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
return internalGetFeature().getMap().containsKey(key);
}
/**
* Use {@link #getFeatureMap()} instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> getFeature() {
return getFeatureMap();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> getFeatureMap() {
return internalGetFeature().getMap();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public org.tensorflow.proto.example.Feature getFeatureOrDefault(
java.lang.String key,
org.tensorflow.proto.example.Feature defaultValue) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> map =
internalGetFeature().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public org.tensorflow.proto.example.Feature getFeatureOrThrow(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> map =
internalGetFeature().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
com.google.protobuf.GeneratedMessageV3
.serializeStringMapTo(
output,
internalGetFeature(),
FeatureDefaultEntryHolder.defaultEntry,
1);
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (java.util.Map.Entry<java.lang.String, org.tensorflow.proto.example.Feature> entry
: internalGetFeature().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, org.tensorflow.proto.example.Feature>
feature__ = FeatureDefaultEntryHolder.defaultEntry.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, feature__);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.proto.example.Features)) {
return super.equals(obj);
}
org.tensorflow.proto.example.Features other = (org.tensorflow.proto.example.Features) obj;
if (!internalGetFeature().equals(
other.internalGetFeature())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (!internalGetFeature().getMap().isEmpty()) {
hash = (37 * hash) + FEATURE_FIELD_NUMBER;
hash = (53 * hash) + internalGetFeature().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.proto.example.Features parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.proto.example.Features parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.proto.example.Features parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.proto.example.Features parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.proto.example.Features parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.proto.example.Features parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.proto.example.Features parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.proto.example.Features parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.proto.example.Features parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.proto.example.Features parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.proto.example.Features parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.proto.example.Features parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.proto.example.Features prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tensorflow.Features}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:tensorflow.Features)
org.tensorflow.proto.example.FeaturesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMapField(
int number) {
switch (number) {
case 1:
return internalGetFeature();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMutableMapField(
int number) {
switch (number) {
case 1:
return internalGetMutableFeature();
default:
throw new RuntimeException(
"Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.proto.example.Features.class, org.tensorflow.proto.example.Features.Builder.class);
}
// Construct using org.tensorflow.proto.example.Features.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
internalGetMutableFeature().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.proto.example.FeatureProtos.internal_static_tensorflow_Features_descriptor;
}
@java.lang.Override
public org.tensorflow.proto.example.Features getDefaultInstanceForType() {
return org.tensorflow.proto.example.Features.getDefaultInstance();
}
@java.lang.Override
public org.tensorflow.proto.example.Features build() {
org.tensorflow.proto.example.Features result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.tensorflow.proto.example.Features buildPartial() {
org.tensorflow.proto.example.Features result = new org.tensorflow.proto.example.Features(this);
int from_bitField0_ = bitField0_;
result.feature_ = internalGetFeature();
result.feature_.makeImmutable();
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.tensorflow.proto.example.Features) {
return mergeFrom((org.tensorflow.proto.example.Features)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.proto.example.Features other) {
if (other == org.tensorflow.proto.example.Features.getDefaultInstance()) return this;
internalGetMutableFeature().mergeFrom(
other.internalGetFeature());
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.proto.example.Features parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.proto.example.Features) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.MapField<
java.lang.String, org.tensorflow.proto.example.Feature> feature_;
private com.google.protobuf.MapField<java.lang.String, org.tensorflow.proto.example.Feature>
internalGetFeature() {
if (feature_ == null) {
return com.google.protobuf.MapField.emptyMapField(
FeatureDefaultEntryHolder.defaultEntry);
}
return feature_;
}
private com.google.protobuf.MapField<java.lang.String, org.tensorflow.proto.example.Feature>
internalGetMutableFeature() {
onChanged();;
if (feature_ == null) {
feature_ = com.google.protobuf.MapField.newMapField(
FeatureDefaultEntryHolder.defaultEntry);
}
if (!feature_.isMutable()) {
feature_ = feature_.copy();
}
return feature_;
}
public int getFeatureCount() {
return internalGetFeature().getMap().size();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public boolean containsFeature(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
return internalGetFeature().getMap().containsKey(key);
}
/**
* Use {@link #getFeatureMap()} instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> getFeature() {
return getFeatureMap();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> getFeatureMap() {
return internalGetFeature().getMap();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public org.tensorflow.proto.example.Feature getFeatureOrDefault(
java.lang.String key,
org.tensorflow.proto.example.Feature defaultValue) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> map =
internalGetFeature().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public org.tensorflow.proto.example.Feature getFeatureOrThrow(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> map =
internalGetFeature().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearFeature() {
internalGetMutableFeature().getMutableMap()
.clear();
return this;
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public Builder removeFeature(
java.lang.String key) {
if (key == null) { throw new java.lang.NullPointerException(); }
internalGetMutableFeature().getMutableMap()
.remove(key);
return this;
}
/**
* Use alternate mutation accessors instead.
*/
@java.lang.Deprecated
public java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature>
getMutableFeature() {
return internalGetMutableFeature().getMutableMap();
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public Builder putFeature(
java.lang.String key,
org.tensorflow.proto.example.Feature value) {
if (key == null) { throw new java.lang.NullPointerException(); }
if (value == null) { throw new java.lang.NullPointerException(); }
internalGetMutableFeature().getMutableMap()
.put(key, value);
return this;
}
/**
* <pre>
* Map from feature name to feature.
* </pre>
*
* <code>map<string, .tensorflow.Feature> feature = 1;</code>
*/
public Builder putAllFeature(
java.util.Map<java.lang.String, org.tensorflow.proto.example.Feature> values) {
internalGetMutableFeature().getMutableMap()
.putAll(values);
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.Features)
}
// @@protoc_insertion_point(class_scope:tensorflow.Features)
private static final org.tensorflow.proto.example.Features DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.proto.example.Features();
}
public static org.tensorflow.proto.example.Features getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Features>
PARSER = new com.google.protobuf.AbstractParser<Features>() {
@java.lang.Override
public Features parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Features(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Features> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Features> getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.proto.example.Features getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deephacks.tools4j.config.internal.core.xml;
import static org.deephacks.tools4j.config.model.Events.CFG202_XML_SCHEMA_FILE_MISSING;
import static org.deephacks.tools4j.config.model.Events.CFG301_MISSING_RUNTIME_REF;
import static org.deephacks.tools4j.config.model.Events.CFG302_CANNOT_DELETE_BEAN;
import static org.deephacks.tools4j.config.model.Events.CFG303_BEAN_ALREADY_EXIST;
import static org.deephacks.tools4j.config.model.Events.CFG304_BEAN_DOESNT_EXIST;
import static org.deephacks.tools4j.config.model.Events.CFG307_SINGELTON_REMOVAL;
import static org.deephacks.tools4j.config.model.Events.CFG308_SINGELTON_CREATION;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.PropertyException;
import javax.xml.bind.Unmarshaller;
import org.deephacks.tools4j.config.internal.core.xml.XmlBeanAdapter.XmlBeans;
import org.deephacks.tools4j.config.model.Bean;
import org.deephacks.tools4j.config.model.Bean.BeanId;
import org.deephacks.tools4j.config.model.Events;
import org.deephacks.tools4j.config.spi.BeanManager;
import org.deephacks.tools4j.support.ServiceProvider;
import org.deephacks.tools4j.support.SystemProperties;
import org.deephacks.tools4j.support.event.AbortRuntimeException;
import com.google.common.io.Files;
/**
* ConfigBeanManagerDefault is responsible for storing config bean instances in
* XML format.
*/
@ServiceProvider(service = BeanManager.class)
public class XmlBeanManager extends BeanManager {
public static final String XML_BEAN_FILE_STORAGE_DIR_PROP = "config.spi.bean.xml.dir";
public static final String XML_BEAN_FILE_NAME = "bean.xml";
private static final SystemProperties PROP = SystemProperties.createDefault();
private static final long serialVersionUID = -4292817727054404604L;
@Override
public Bean getEager(BeanId id) {
List<Bean> all = readValues();
Bean bean = getEagerly(id, all);
if (bean == null) {
throw CFG304_BEAN_DOESNT_EXIST(id);
}
return bean;
}
private Bean getEagerly(BeanId id, List<Bean> all) {
Bean result = null;
for (Bean b : all) {
if (b.getId().equals(id)) {
result = b;
break;
}
}
if (result == null) {
return null;
}
// bean found, initalize references.
for (String name : result.getReferenceNames()) {
for (BeanId refId : result.getReference(name)) {
Bean b = getBean(refId, all);
if (b == null) {
throw CFG301_MISSING_RUNTIME_REF(result.getId(), refId);
}
refId.setBean(b);
}
}
return result;
}
@Override
public Bean getLazy(BeanId id) throws AbortRuntimeException {
List<Bean> all = readValues();
Bean result = null;
for (Bean b : all) {
if (b.getId().equals(id)) {
result = b;
break;
}
}
if (result == null) {
throw CFG304_BEAN_DOESNT_EXIST(id);
}
return result;
}
@Override
public Map<BeanId, Bean> getBeanToValidate(Bean bean) throws AbortRuntimeException {
return new HashMap<BeanId, Bean>();
}
@Override
public Bean getSingleton(String schemaName) throws IllegalArgumentException {
List<Bean> all = readValues();
for (Bean bean : all) {
if (bean.getId().getSchemaName().equals(schemaName)) {
if (!bean.getId().isSingleton()) {
throw new IllegalArgumentException("Schema [" + schemaName
+ "] is not a singleton.");
}
BeanId singletonId = bean.getId();
Bean singleton = getEagerly(singletonId, all);
if (singleton == null) {
throw CFG304_BEAN_DOESNT_EXIST(singletonId);
}
return singleton;
}
}
return null;
}
private Bean getBean(BeanId ref, List<Bean> all) {
for (Bean bean : all) {
if (ref.equals(bean.getId())) {
// recurse down the tree.
return getEagerly(ref, all);
}
}
return null;
}
@Override
public Map<BeanId, Bean> list(String name) {
List<Bean> all = readValues();
Map<BeanId, Bean> result = new HashMap<BeanId, Bean>();
for (Bean b : all) {
if (b.getId().getSchemaName().equals(name)) {
Bean bean = getEagerly(b.getId(), all);
result.put(bean.getId(), bean);
}
}
return result;
}
@Override
public void create(Bean bean) {
Map<BeanId, Bean> values = readValuesAsMap();
checkReferencesExist(bean, values);
checkCreateSingleton(bean, values);
checkUniquness(bean, values);
values.put(bean.getId(), bean);
writeValues(values);
}
@Override
public void create(Collection<Bean> set) {
Map<BeanId, Bean> beans = readValuesAsMap();
// first check uniquness towards storage
for (Bean bean : set) {
checkUniquness(bean, beans);
checkCreateSingleton(bean, beans);
}
// TODO: check that provided beans are unique among themselves.
// references may not exist in storage, but are provided
// as part of the transactions, so add them before validating references.
for (Bean bean : set) {
beans.put(bean.getId(), bean);
}
for (Bean bean : set) {
checkReferencesExist(bean, beans);
}
writeValues(beans);
}
@Override
public void createSingleton(BeanId singleton) {
Map<BeanId, Bean> values = readValuesAsMap();
Bean bean = Bean.create(singleton);
try {
checkUniquness(bean, values);
} catch (AbortRuntimeException e) {
// ignore and return silently.
return;
}
values.put(singleton, bean);
writeValues(values);
}
@Override
public void set(Bean bean) {
Map<BeanId, Bean> values = readValuesAsMap();
Bean existing = values.get(bean.getId());
if (existing == null) {
throw CFG304_BEAN_DOESNT_EXIST(bean.getId());
}
checkReferencesExist(bean, values);
checkInstanceExist(bean, values);
values.put(bean.getId(), bean);
writeValues(values);
}
@Override
public void set(Collection<Bean> set) {
Map<BeanId, Bean> beans = readValuesAsMap();
// TODO: check that provided beans are unique among themselves.
// references may not exist in storage, but are provided
// as part of the transactions, so add them before validating references.
for (Bean bean : set) {
Bean existing = beans.get(bean.getId());
if (existing == null) {
throw CFG304_BEAN_DOESNT_EXIST(bean.getId());
}
beans.put(bean.getId(), bean);
}
for (Bean bean : set) {
checkReferencesExist(bean, beans);
}
writeValues(beans);
}
@Override
public void merge(Bean bean) {
Map<BeanId, Bean> beans = readValuesAsMap();
Bean b = beans.get(bean.getId());
if (b == null) {
throw CFG304_BEAN_DOESNT_EXIST(bean.getId());
}
replace(b, bean, beans);
writeValues(beans);
}
@Override
public void merge(Collection<Bean> bean) {
Map<BeanId, Bean> beans = readValuesAsMap();
for (Bean replace : bean) {
Bean target = beans.get(replace.getId());
if (target == null) {
throw Events.CFG304_BEAN_DOESNT_EXIST(replace.getId());
}
replace(target, replace, beans);
}
writeValues(beans);
}
private void replace(Bean target, Bean replace, Map<BeanId, Bean> all) {
if (target == null) {
// bean did not exist in storage, create it.
target = replace;
}
checkReferencesExist(replace, all);
for (String name : replace.getPropertyNames()) {
List<String> values = replace.getValues(name);
if (values == null || values.size() == 0) {
// null/empty indicates a remove/reset-to-default op
target.remove(name);
} else {
target.setProperty(name, replace.getValues(name));
}
}
for (String name : replace.getReferenceNames()) {
List<BeanId> values = replace.getReference(name);
if (values == null || values.size() == 0) {
// null/empty indicates a remove/reset-to-default op
target.remove(name);
} else {
target.setReferences(name, values);
}
}
}
@Override
public void delete(BeanId id) {
Map<BeanId, Bean> beans = readValuesAsMap();
checkNoReferencesExist(id, beans);
checkDeleteSingleton(beans.get(id));
beans.remove(id);
writeValues(beans);
}
@Override
public void delete(String schemaName, Collection<String> instanceIds) {
Map<BeanId, Bean> beans = readValuesAsMap();
for (String instance : instanceIds) {
checkDeleteSingleton(beans.get(BeanId.create(instance, schemaName)));
checkNoReferencesExist(BeanId.create(instance, schemaName), beans);
beans.remove(BeanId.create(instance, schemaName));
}
writeValues(beans);
}
private List<Bean> readValues() {
String dirValue = PROP.get(XML_BEAN_FILE_STORAGE_DIR_PROP);
if (dirValue == null || "".equals(dirValue)) {
dirValue = System.getProperty("java.io.tmpdir");
}
File file = new File(new File(dirValue), XML_BEAN_FILE_NAME);
try {
if (!file.exists()) {
Files.write("<bean-xml></bean-xml>", file, Charset.defaultCharset());
}
FileInputStream in = new FileInputStream(file);
JAXBContext context = JAXBContext.newInstance(XmlBeans.class);
Unmarshaller unmarshaller = context.createUnmarshaller();
XmlBeans beans = (XmlBeans) unmarshaller.unmarshal(in);
return beans.getBeans();
} catch (JAXBException e) {
throw new RuntimeException(e);
} catch (FileNotFoundException e) {
throw CFG202_XML_SCHEMA_FILE_MISSING(file);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private Map<BeanId, Bean> readValuesAsMap() {
List<Bean> beans = readValues();
Map<BeanId, Bean> map = new HashMap<BeanId, Bean>();
for (Bean bean : beans) {
map.put(bean.getId(), bean);
}
return map;
}
private void writeValues(Map<BeanId, Bean> map) {
writeValues(new ArrayList<Bean>(map.values()));
}
private void writeValues(List<Bean> beans) {
String dirValue = PROP.get(XML_BEAN_FILE_STORAGE_DIR_PROP);
if (dirValue == null || "".equals(dirValue)) {
dirValue = System.getProperty("java.io.tmpdir");
}
File dir = new File(dirValue);
if (!dir.exists()) {
try {
dir.createNewFile();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
File file = new File(dir, XML_BEAN_FILE_NAME);
PrintWriter pw = null;
try {
XmlBeans xmlbeans = new XmlBeans(beans);
pw = new PrintWriter(file, "UTF-8");
JAXBContext context = JAXBContext.newInstance(XmlBeans.class);
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
marshaller.marshal(xmlbeans, pw);
} catch (PropertyException e) {
throw new RuntimeException(e);
} catch (JAXBException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (pw != null) {
pw.flush();
pw.close();
}
}
}
private static void checkNoReferencesExist(BeanId deleted, Map<BeanId, Bean> storage) {
Collection<BeanId> hasReferences = new ArrayList<BeanId>();
for (Bean b : storage.values()) {
if (hasReferences(b, deleted)) {
hasReferences.add(b.getId());
}
}
if (hasReferences.size() > 0) {
throw CFG302_CANNOT_DELETE_BEAN(Arrays.asList(deleted));
}
}
private static void checkReferencesExist(final Bean bean, final Map<BeanId, Bean> storage) {
ArrayList<BeanId> allRefs = new ArrayList<BeanId>();
for (String name : bean.getReferenceNames()) {
if (bean.getReference(name) == null) {
// the reference is about to be removed.
continue;
}
for (BeanId beanId : bean.getReference(name)) {
allRefs.add(beanId);
}
}
Collection<BeanId> missingReferences = new ArrayList<BeanId>();
for (BeanId beanId : allRefs) {
if (beanId.getInstanceId() == null) {
continue;
}
Bean b = storage.get(beanId);
if (b == null) {
missingReferences.add(beanId);
}
}
if (missingReferences.size() > 0) {
throw CFG301_MISSING_RUNTIME_REF(bean.getId(), missingReferences);
}
}
private static void checkInstanceExist(Bean bean, Map<BeanId, Bean> storage) {
Collection<Bean> beans = storage.values();
for (Bean existingBean : beans) {
if (existingBean.getId().equals(bean.getId())) {
return;
}
}
throw CFG304_BEAN_DOESNT_EXIST(bean.getId());
}
private static void checkUniquness(Bean bean, Map<BeanId, Bean> storage) {
Collection<Bean> beans = storage.values();
for (Bean existing : beans) {
if (bean.getId().equals(existing.getId())) {
throw CFG303_BEAN_ALREADY_EXIST(bean.getId());
}
}
}
private static void checkCreateSingleton(Bean bean, Map<BeanId, Bean> storage) {
for (Bean b : storage.values()) {
if (bean.getId().getSchemaName().equals(b.getId().getSchemaName())) {
if (b.getId().isSingleton()) {
throw CFG308_SINGELTON_CREATION(bean.getId());
}
}
}
}
private static void checkDeleteSingleton(Bean bean) {
if (bean == null) {
return;
}
if (bean.getId().isSingleton()) {
throw CFG307_SINGELTON_REMOVAL(bean.getId());
}
}
/**
* Returns the a list of property names of the target bean that have
* references to the bean id.
*/
private static boolean hasReferences(Bean target, BeanId reference) {
for (String name : target.getReferenceNames()) {
for (BeanId ref : target.getReference(name)) {
if (ref.equals(reference)) {
return true;
}
}
}
return false;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.indexing;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.OrderEntryUtil;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import com.intellij.openapi.roots.ui.configuration.SdkTestCase;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.HeavyPlatformTestCase;
import com.intellij.testFramework.PsiTestUtil;
import com.intellij.util.Function;
import com.intellij.util.ThrowableConsumer;
import com.intellij.util.indexing.roots.IndexableEntityProviderMethods;
import com.intellij.util.indexing.roots.IndexableFilesIterator;
import com.intellij.util.indexing.roots.kind.IndexableSetOrigin;
import com.intellij.workspaceModel.ide.WorkspaceModelChangeListener;
import com.intellij.workspaceModel.ide.WorkspaceModelTopics;
import com.intellij.workspaceModel.storage.EntityChange;
import com.intellij.workspaceModel.ide.impl.legacyBridge.library.LibraryBridge;
import com.intellij.workspaceModel.storage.VersionedStorageChange;
import com.intellij.workspaceModel.storage.bridgeEntities.LibraryId;
import com.intellij.workspaceModel.storage.bridgeEntities.LibraryTableId;
import kotlin.Pair;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class EntityIndexingServiceTest extends HeavyPlatformTestCase {
public void testIndexingModule() throws Exception {
doTest(this::createModuleAndSourceRoot, this::removeModule,
pair -> IndexableEntityProviderMethods.INSTANCE.createIterators(pair.getFirst(),
Collections.singletonList(pair.getSecond())));
}
@NotNull
private Pair<Module, VirtualFile> createModuleAndSourceRoot() throws IOException {
File root = createTempDir("otherModule");
Module module = createModuleAt("otherModule", getProject(), getModuleType(), root.toPath());
VirtualFile moduleDir = getOrCreateModuleDir(module);
VirtualFile src = moduleDir.createChildDirectory(this, "src");
PsiTestUtil.addSourceRoot(module, src);
return new Pair<>(module, src);
}
private void removeModule(Pair<Module, VirtualFile> data) throws IOException {
ModifiableModuleModel modifiableModel = ModuleManager.getInstance(getProject()).getModifiableModel();
modifiableModel.disposeModule(data.getFirst());
modifiableModel.commit();
data.getSecond().getParent().delete(this);
}
public void testIndexingProjectLibrary() throws Exception {
doTest(this::createProjectLibrary, this::removeProjectLibrary, IndexableEntityProviderMethods.INSTANCE::createIterators);
}
@NotNull
private LibraryBridge createProjectLibrary() {
return (LibraryBridge)createLibrary(LibraryTablesRegistrar.getInstance().getLibraryTable(getProject()));
}
@NotNull
private Library createLibrary(LibraryTable libraryTable) {
LibraryTable.ModifiableModel libraryTableModifiableModel = libraryTable.getModifiableModel();
Library lib = libraryTableModifiableModel.createLibrary("lib");
libraryTableModifiableModel.commit();
OrderEntryUtil.addLibraryToRoots(getModule(), lib, DependencyScope.RUNTIME, false);
return lib;
}
private void removeProjectLibrary(LibraryBridge library) {
removeLibrary(library, LibraryTablesRegistrar.getInstance().getLibraryTable(getProject()));
}
private void removeLibrary(Library library, LibraryTable libraryTable) {
ModifiableRootModel rootModel = ModuleRootManager.getInstance(getModule()).getModifiableModel();
LibraryOrderEntry libraryOrderEntry = OrderEntryUtil.findLibraryOrderEntry(rootModel, library);
rootModel.removeOrderEntry(libraryOrderEntry);
rootModel.commit();
LibraryTable.ModifiableModel libraryTableModifiableModel = libraryTable.getModifiableModel();
libraryTableModifiableModel.removeLibrary(library);
libraryTableModifiableModel.commit();
}
public void testIndexingGlobalLibrary() throws Exception {
doTest(this::createGlobalLibrary, this::removeGlobalLibrary,
pair -> IndexableEntityProviderMethods.INSTANCE.createIterators(pair.getFirst()));
}
@NotNull
private Pair<Library, LibraryId> createGlobalLibrary() {
Library library = createLibrary(LibraryTablesRegistrar.getInstance().getLibraryTable());
return new Pair<>(library,
new LibraryId(library.getName(), new LibraryTableId.GlobalLibraryTableId(library.getTable().getTableLevel())));
}
private void removeGlobalLibrary(Pair<Library, LibraryId> libraryPair) {
removeLibrary(libraryPair.getFirst(), LibraryTablesRegistrar.getInstance().getLibraryTable());
}
public void testIndexingModuleLibrary() throws Exception {
doTest(this::createModuleLibrary, this::removeModuleLibrary, IndexableEntityProviderMethods.INSTANCE::createIterators);
}
private void removeModuleLibrary(Library library) {
ModuleRootManagerEx moduleRootManager = ModuleRootManagerEx.getInstanceEx(getModule());
ModifiableRootModel model = moduleRootManager.getModifiableModel();
LibraryTable table = model.getModuleLibraryTable();
table.removeLibrary(library);
model.commit();
}
@NotNull
private LibraryBridge createModuleLibrary() {
ModuleRootManagerEx moduleRootManager = ModuleRootManagerEx.getInstanceEx(getModule());
ModifiableRootModel model = moduleRootManager.getModifiableModel();
LibraryTable table = model.getModuleLibraryTable();
Library lib = table.createLibrary("lib");
model.commit();
return (LibraryBridge)lib;
}
public void testIndexingSdk() throws Exception {
doTest(this::createSdk, this::removeSdk, IndexableEntityProviderMethods.INSTANCE::createIterators);
}
@NotNull
private Sdk createSdk() {
ProjectJdkTable jdkTable = ProjectJdkTable.getInstance();
Sdk result = jdkTable.createSdk("SDK", SdkTestCase.DependentTestSdkType.INSTANCE);
jdkTable.addJdk(result);
ModifiableRootModel model = ModuleRootManager.getInstance(getModule()).getModifiableModel();
model.setSdk(result);
model.commit();
return result;
}
private void removeSdk(Sdk sdk) {
ModifiableRootModel model = ModuleRootManager.getInstance(getModule()).getModifiableModel();
model.setSdk(null);
model.commit();
ProjectJdkTable.getInstance().removeJdk(sdk);
}
private <T> void doTest(ThrowableComputable<T, Exception> generator,
ThrowableConsumer<T, Exception> remover,
Function<T, Collection<IndexableFilesIterator>> expectedIteratorsProducer)
throws Exception {
MyWorkspaceModelChangeListener listener = new MyWorkspaceModelChangeListener();
WorkspaceModelTopics.getInstance(getProject())
.subscribeAfterModuleLoading(getProject().getMessageBus().connect(getTestRootDisposable()), listener);
T createdEntities = WriteAction.compute(generator);
List<IndexableFilesIterator> iterators;
try {
List<EntityChange<?>> changes = new ArrayList<>();
for (VersionedStorageChange event : listener.myEvents) {
Iterator<EntityChange<?>> iterator = event.getAllChanges().iterator();
while (iterator.hasNext()) {
EntityChange<?> next = iterator.next();
changes.add(next);
}
}
iterators = EntityIndexingServiceImpl.getIterators(getProject(), changes);
Collection<IndexableFilesIterator> expectedIterators = expectedIteratorsProducer.fun(createdEntities);
assertSameIterators(iterators, expectedIterators);
}
finally {
WriteAction.run(() -> remover.consume(createdEntities));
}
DumbService.getInstance(getProject()).queueTask(new UnindexedFilesUpdater(getProject(), iterators, getTestName(false)));
}
private static void assertSameIterators(List<IndexableFilesIterator> actualIterators,
Collection<IndexableFilesIterator> expectedIterators) {
assertEquals(expectedIterators.size(), actualIterators.size());
Collection<IndexableSetOrigin> expectedOrigins = collectOrigins(expectedIterators);
Collection<IndexableSetOrigin> actualOrigins = collectOrigins(actualIterators);
assertSameElements(actualOrigins, expectedOrigins);
}
private static Collection<IndexableSetOrigin> collectOrigins(Collection<IndexableFilesIterator> iterators) {
Set<IndexableSetOrigin> origins = new HashSet<>();
for (IndexableFilesIterator iterator : iterators) {
IndexableSetOrigin origin = iterator.getOrigin();
assertTrue("Origins should be unique", origins.add(origin));
}
return origins;
}
private static class MyWorkspaceModelChangeListener implements WorkspaceModelChangeListener {
final List<VersionedStorageChange> myEvents = new ArrayList<>();
@Override
public void beforeChanged(@NotNull VersionedStorageChange event) {
//ignore
}
@Override
public void changed(@NotNull VersionedStorageChange event) {
myEvents.add(event);
}
}
}
| |
package com.heavyplayer.audioplayerrecorder.service;
import com.heavyplayer.audioplayerrecorder.BuildConfig;
import com.heavyplayer.audioplayerrecorder.widget.AudioRecorderMicrophone;
import com.heavyplayer.audioplayerrecorder.widget.interface_.OnDetachListener;
import android.annotation.SuppressLint;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Binder;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.util.Log;
import android.view.View;
public class AudioRecorderService extends Service implements AudioManager.OnAudioFocusChangeListener {
public static final String LOG_TAG = AudioRecorderService.class.getSimpleName();
private final static int UPDATE_INTERVAL_MS = 100;
private final IBinder mBinder = new LocalBinder();
private Handler mHandler;
private AudioRecorderMicrophone mMicrophone;
private MicrophoneAmplitudeUpdater mMicrophoneAmplitudeUpdater = new MicrophoneAmplitudeUpdater();
private AudioRecorderStateListener mStateListener;
private Long mTimeLimit;
private TimeLimitStopper mTimeLimitStopper = new TimeLimitStopper();
private Uri mFileUri;
private MediaRecorder mRecorder;
private boolean mIsRecording;
@Override
public void onCreate() {
mHandler = new Handler();
mIsRecording = false;
if (BuildConfig.DEBUG) {
Log.i(LOG_TAG, "Local service started");
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// We want this service to continue running until it is explicitly stopped, so return sticky.
return START_STICKY;
}
@SuppressLint("InlinedApi")
protected void start(Uri fileUri) {
// If the output file changes, we want to stop the current recording.
if (mFileUri == null || !mFileUri.equals(fileUri)) {
stop();
mFileUri = fileUri;
}
if (!mIsRecording && mFileUri != null) {
gainAudioFocus();
if (mRecorder == null) {
mRecorder = new MediaRecorder();
}
try {
// Configure recorder.
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mRecorder.setOutputFile(mFileUri.getPath());
/* MediaRecorder.AudioEncoder.AAC was hidden in previous versions, but it's 3. */
mRecorder.setAudioEncoder(Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD_MR1 ?
MediaRecorder.AudioEncoder.AAC : 3);
mRecorder.setAudioChannels(1);
mRecorder.setAudioSamplingRate(22050);
mRecorder.setAudioEncodingBitRate(65536);
mRecorder.prepare();
// Start recording.
mRecorder.start();
mIsRecording = true;
scheduleTimeLimitStopper();
updateMicrophoneState();
startMicrophoneUpdater();
if (mStateListener != null) {
mStateListener.onStartRecorder();
}
} catch (Exception e) {
Log.w(LOG_TAG, e);
if (mStateListener != null) {
mStateListener.onStartRecorderFailed(e);
}
}
}
}
protected void stop() {
if (mIsRecording) {
if (mRecorder != null) {
try {
mRecorder.stop();
} catch (Exception e) {
// This can happen, for instance, when stop is called immediately after start.
// We will act like if the stop was successful, since the recording is stopped nonetheless.
Log.w(LOG_TAG, e);
}
mRecorder.reset();
mIsRecording = false;
removeTimeLimitStopper();
updateMicrophoneState();
if (mStateListener != null) {
mStateListener.onStopRecorder();
}
}
abandonAudioFocus();
}
}
protected void destroy() {
stop();
if (mRecorder != null) {
mRecorder.release();
mRecorder = null;
}
}
@Override
public void onDestroy() {
destroy();
if (BuildConfig.DEBUG) {
Log.i(LOG_TAG, "Local service stopped");
}
}
protected void gainAudioFocus() {
final int durationHint;
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
durationHint = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT;
} else {
// Request audio focus for recording without being disturbed by system sounds.
durationHint = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
}
final AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
audioManager.requestAudioFocus(
this,
AudioManager.STREAM_MUSIC,
durationHint);
}
protected void abandonAudioFocus() {
// Abandon audio focus when the recording complete.
final AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
audioManager.abandonAudioFocus(this);
}
@Override
public void onAudioFocusChange(int focusChange) {
// Do nothing.
}
protected void scheduleTimeLimitStopper() {
if (mTimeLimit != null) {
mHandler.postDelayed(mTimeLimitStopper, mTimeLimit);
}
}
protected void removeTimeLimitStopper() {
mHandler.removeCallbacks(mTimeLimitStopper);
}
protected void updateMicrophoneState() {
if (mMicrophone != null) {
mMicrophone.setSelected(mIsRecording);
if (!mIsRecording) {
mMicrophone.updateAmplitude(0, UPDATE_INTERVAL_MS * 3);
}
}
}
protected void startMicrophoneUpdater() {
// Star updating microphones amplitude.
mHandler.removeCallbacks(mMicrophoneAmplitudeUpdater);
mHandler.post(mMicrophoneAmplitudeUpdater);
}
private class MicrophoneAmplitudeUpdater implements Runnable {
@Override
public void run() {
if (mIsRecording && mRecorder != null && mMicrophone != null) {
final int amplitude = mRecorder.getMaxAmplitude();
mMicrophone.updateAmplitude(amplitude, UPDATE_INTERVAL_MS);
// Post animation runnable to update the animation.
mHandler.postDelayed(mMicrophoneAmplitudeUpdater, UPDATE_INTERVAL_MS);
}
}
}
/**
* Stops the recorder if the time limit is reached.
*/
public class TimeLimitStopper implements Runnable {
@Override
public void run() {
stop();
if (mStateListener != null) {
mStateListener.onTimeLimitExceeded();
}
}
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public class LocalBinder extends Binder {
public void register(AudioRecorderMicrophone microphone, AudioRecorderStateListener listener) {
mMicrophone = microphone;
mStateListener = listener;
// Configure microphone state.
microphone.setSelected(mIsRecording);
microphone.setOnDetachListener(new OnDetachListener() {
@Override
public void onDetachedFromWindow(View v) {
if (mMicrophone == v) {
mMicrophone = null;
}
}
@Override
public void onStartTemporaryDetach(View v) {
}
});
// Start microphone update.
startMicrophoneUpdater();
}
/**
* Time limit will apply the next time you call {@link #startRecorder(android.net.Uri)}.
*/
public void setTimeLimit(long timeLimit) {
mTimeLimit = timeLimit;
}
public void startRecorder(Uri fileUri) {
start(fileUri);
}
public void stopRecorder() {
stop();
}
public void destroyRecorder() {
destroy();
}
public boolean isRecording() {
return mIsRecording;
}
}
public interface AudioRecorderStateListener {
void onStartRecorder();
void onStartRecorderFailed(Exception e);
void onStopRecorder();
void onTimeLimitExceeded();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.seqno;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.ObjectLongMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import static org.elasticsearch.index.seqno.SequenceNumbersService.UNASSIGNED_SEQ_NO;
/**
* This class is responsible of tracking the global checkpoint. The global checkpoint is the highest sequence number for which all lower (or
* equal) sequence number have been processed on all shards that are currently active. Since shards count as "active" when the master starts
* them, and before this primary shard has been notified of this fact, we also include shards that have completed recovery. These shards
* have received all old operations via the recovery mechanism and are kept up to date by the various replications actions. The set of
* shards that are taken into account for the global checkpoint calculation are called the "in-sync shards".
* <p>
* The global checkpoint is maintained by the primary shard and is replicated to all the replicas (via {@link GlobalCheckpointSyncAction}).
*/
public class GlobalCheckpointTracker extends AbstractIndexShardComponent {
/*
* This map holds the last known local checkpoint for every active shard and initializing shard copies that has been brought up to speed
* through recovery. These shards are treated as valid copies and participate in determining the global checkpoint. This map is keyed by
* allocation IDs. All accesses to this set are guarded by a lock on this.
*/
private final ObjectLongMap<String> inSyncLocalCheckpoints;
/*
* This set holds the last set of known valid allocation ids as received by the master. This is important to make sure shard that are
* failed or relocated are cleaned up from {@link #inSyncLocalCheckpoints} and do not hold the global checkpoint back. All accesses to
* this set are guarded by a lock on this.
*/
private final Set<String> assignedAllocationIds;
/*
* The current global checkpoint for this shard. Note that this field is guarded by a lock on this and thus this field does not need to
* be volatile.
*/
private long globalCheckpoint;
/**
* Initialize the global checkpoint service. The specified global checkpoint should be set to the last known global checkpoint, or
* {@link SequenceNumbersService#UNASSIGNED_SEQ_NO}.
*
* @param shardId the shard ID
* @param indexSettings the index settings
* @param globalCheckpoint the last known global checkpoint for this shard, or {@link SequenceNumbersService#UNASSIGNED_SEQ_NO}
*/
GlobalCheckpointTracker(final ShardId shardId, final IndexSettings indexSettings, final long globalCheckpoint) {
super(shardId, indexSettings);
assert globalCheckpoint >= UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint;
inSyncLocalCheckpoints = new ObjectLongHashMap<>(1 + indexSettings.getNumberOfReplicas());
assignedAllocationIds = new HashSet<>(1 + indexSettings.getNumberOfReplicas());
this.globalCheckpoint = globalCheckpoint;
}
/**
* Notifies the service to update the local checkpoint for the shard with the provided allocation ID. If the checkpoint is lower than
* the currently known one, this is a no-op. If the allocation ID is not in sync, it is ignored. This is to prevent late arrivals from
* shards that are removed to be re-added.
*
* @param allocationId the allocation ID of the shard to update the local checkpoint for
* @param checkpoint the local checkpoint for the shard
*/
public synchronized void updateLocalCheckpoint(final String allocationId, final long checkpoint) {
final int indexOfKey = inSyncLocalCheckpoints.indexOf(allocationId);
if (indexOfKey >= 0) {
final long current = inSyncLocalCheckpoints.indexGet(indexOfKey);
if (current < checkpoint) {
inSyncLocalCheckpoints.indexReplace(indexOfKey, checkpoint);
if (logger.isTraceEnabled()) {
logger.trace("updated local checkpoint of [{}] to [{}] (was [{}])", allocationId, checkpoint, current);
}
} else {
logger.trace(
"skipping update of local checkpoint [{}], current checkpoint is higher (current [{}], incoming [{}], type [{}])",
allocationId,
current,
checkpoint,
allocationId);
}
} else {
logger.trace("[{}] isn't marked as in sync. ignoring local checkpoint of [{}].", allocationId, checkpoint);
}
}
/**
* Scans through the currently known local checkpoint and updates the global checkpoint accordingly.
*
* @return {@code true} if the checkpoint has been updated or if it can not be updated since one of the local checkpoints of one of the
* active allocations is not known.
*/
synchronized boolean updateCheckpointOnPrimary() {
long minCheckpoint = Long.MAX_VALUE;
if (inSyncLocalCheckpoints.isEmpty()) {
return false;
}
for (final ObjectLongCursor<String> cp : inSyncLocalCheckpoints) {
if (cp.value == UNASSIGNED_SEQ_NO) {
logger.trace("unknown local checkpoint for active allocationId [{}], requesting a sync", cp.key);
return true;
}
minCheckpoint = Math.min(cp.value, minCheckpoint);
}
if (minCheckpoint < globalCheckpoint) {
final String message =
String.format(Locale.ROOT, "new global checkpoint [%d] is lower than previous one [%d]", minCheckpoint, globalCheckpoint);
throw new IllegalStateException(message);
}
if (globalCheckpoint != minCheckpoint) {
logger.trace("global checkpoint updated to [{}]", minCheckpoint);
globalCheckpoint = minCheckpoint;
return true;
}
return false;
}
/**
* Returns the global checkpoint for the shard.
*
* @return the global checkpoint
*/
public synchronized long getCheckpoint() {
return globalCheckpoint;
}
/**
* Updates the global checkpoint on a replica shard after it has been updated by the primary.
*
* @param checkpoint the global checkpoint
*/
synchronized void updateCheckpointOnReplica(final long checkpoint) {
/*
* The global checkpoint here is a local knowledge which is updated under the mandate of the primary. It can happen that the primary
* information is lagging compared to a replica (e.g., if a replica is promoted to primary but has stale info relative to other
* replica shards). In these cases, the local knowledge of the global checkpoint could be higher than sync from the lagging primary.
*/
if (this.globalCheckpoint <= checkpoint) {
this.globalCheckpoint = checkpoint;
logger.trace("global checkpoint updated from primary to [{}]", checkpoint);
}
}
/**
* Notifies the service of the current allocation ids in the cluster state. This method trims any shards that have been removed.
*
* @param activeAllocationIds the allocation IDs of the currently active shard copies
* @param initializingAllocationIds the allocation IDs of the currently initializing shard copies
*/
public synchronized void updateAllocationIdsFromMaster(final Set<String> activeAllocationIds,
final Set<String> initializingAllocationIds) {
assignedAllocationIds.removeIf(
aId -> activeAllocationIds.contains(aId) == false && initializingAllocationIds.contains(aId) == false);
assignedAllocationIds.addAll(activeAllocationIds);
assignedAllocationIds.addAll(initializingAllocationIds);
for (String activeId : activeAllocationIds) {
if (inSyncLocalCheckpoints.containsKey(activeId) == false) {
inSyncLocalCheckpoints.put(activeId, UNASSIGNED_SEQ_NO);
}
}
inSyncLocalCheckpoints.removeAll(key -> assignedAllocationIds.contains(key) == false);
}
/**
* Marks the shard with the provided allocation ID as in-sync with the primary shard. This should be called at the end of recovery where
* the primary knows all operations below the global checkpoint have been completed on this shard.
*
* @param allocationId the allocation ID of the shard to mark as in-sync
*/
public synchronized void markAllocationIdAsInSync(final String allocationId) {
if (assignedAllocationIds.contains(allocationId) == false) {
// master has removed this allocation, ignore
return;
}
logger.trace("marked [{}] as in sync", allocationId);
inSyncLocalCheckpoints.put(allocationId, UNASSIGNED_SEQ_NO);
}
/**
* Returns the local checkpoint for the shard with the specified allocation ID, or {@link SequenceNumbersService#UNASSIGNED_SEQ_NO} if
* the shard is not in-sync.
*
* @param allocationId the allocation ID of the shard to obtain the local checkpoint for
* @return the local checkpoint, or {@link SequenceNumbersService#UNASSIGNED_SEQ_NO}
*/
synchronized long getLocalCheckpointForAllocationId(final String allocationId) {
if (inSyncLocalCheckpoints.containsKey(allocationId)) {
return inSyncLocalCheckpoints.get(allocationId);
}
return UNASSIGNED_SEQ_NO;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.test.rowSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.math.BigDecimal;
import java.math.MathContext;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import org.apache.drill.exec.physical.rowSet.RowSet;
import org.apache.drill.exec.physical.rowSet.RowSetReader;
import org.apache.drill.shaded.guava.com.google.common.base.Optional;
import org.apache.drill.shaded.guava.com.google.common.collect.HashMultiset;
import org.apache.drill.shaded.guava.com.google.common.collect.Multiset;
import org.apache.drill.exec.vector.accessor.ArrayReader;
import org.apache.drill.exec.vector.accessor.ObjectReader;
import org.apache.drill.exec.vector.accessor.ScalarReader;
import org.apache.drill.exec.vector.accessor.TupleReader;
import org.apache.drill.exec.vector.accessor.VariantReader;
import org.bouncycastle.util.Arrays;
import org.junit.Assert;
/**
* For testing, compare the contents of two row sets (record batches)
* to verify that they are identical. Supports masks to exclude certain
* columns from comparison.
* <p>
* Drill rows are analogous to JSON documents: they can have scalars,
* arrays and maps, with maps and lists holding maps, arrays and scalars.
* This class walks the row structure tree to compare each structure
* of two row sets checking counts, types and values to ensure that the
* "actual" result set (result of a test) matches the "expected" result
* set.
* <p>
* This class acts as an example of how to use the suite of reader
* abstractions.
*/
public class RowSetComparison {
/**
* Row set with the expected outcome of a test. This is the "golden"
* copy defined in the test itself.
*/
private final RowSet expected;
/**
* Some tests wish to ignore certain (top-level) columns. If a
* mask is provided, then only those columns with a <tt>true</tt>
* will be verified.
*/
private final boolean[] mask;
/**
* Floats and doubles do not compare exactly. This MathContext is used
* to construct BigDecimals of the desired precision.
*/
private MathContext scale = new MathContext(3);
/**
* Floats and doubles do not compare exactly. This delta is used
* by JUnit for such comparisons. This is not a general solution;
* it assumes that tests won't create values that require more than
* three digits of precision.
*/
private final double delta = 0.001;
/**
* Tests can skip the first n rows.
*/
private int offset;
private int span = -1;
public RowSetComparison(RowSet expected) {
this.expected = expected;
// TODO: The mask only works at the top level presently
mask = new boolean[expected.schema().size()];
java.util.Arrays.fill(mask, true);
}
/**
* Mark a specific column as excluded from comparisons.
* @param colNo the index of the column to exclude
* @return this builder
*/
public RowSetComparison exclude(int colNo) {
mask[colNo] = false;
return this;
}
/**
* Specifies a "selection" mask that determines which columns
* to compare. Columns marked as "false" are omitted from the
* comparison.
*
* @param flags variable-length list of column flags
* @return this builder
*/
public RowSetComparison withMask(Boolean...flags) {
for (int i = 0; i < flags.length; i++) {
mask[i] = flags[i];
}
return this;
}
/**
* Specify the precision to use when comparing float or
* double values.
*
* @param scale the precision to use for comparing floats and doubles. See {@link BigDecimal#scale()} for
* a definition scale.
* @return this builder
*/
public RowSetComparison withScale(int scale) {
this.scale = new MathContext(scale);
return this;
}
/**
* Specify an offset into the row sets to start the comparison.
* Usually combined with {@link #span(int)}.
*
* @param offset offset into the row set to start the comparison
* @return this builder
*/
public RowSetComparison offset(int offset) {
this.offset = offset;
return this;
}
/**
* Specify a subset of rows to compare. Usually combined
* with {@link #offset(int)}.
*
* @param span the number of rows to compare
* @return this builder
*/
public RowSetComparison span(int span) {
this.span = span;
return this;
}
private void compareSchemasAndCounts(RowSet actual) {
if (!expected.schema().isEquivalent(actual.schema())) {
// Avoid building the error string on every comparison,
// only build on failures.
fail("Schemas don't match.\n" +
"Expected: " + expected.schema().toString() +
"\nActual: " + actual.schema().toString());
}
int testLength = getTestLength();
int dataLength = offset + testLength;
assertTrue("Missing expected rows", expected.rowCount() >= dataLength);
assertTrue("Missing actual rows", actual.rowCount() >= dataLength);
}
private int getTestLength() {
return span > -1 ? span : expected.rowCount() - offset;
}
public void unorderedVerify(RowSet actual) {
compareSchemasAndCounts(actual);
int testLength = getTestLength();
RowSetReader er = expected.reader();
RowSetReader ar = actual.reader();
for (int i = 0; i < offset; i++) {
er.next();
ar.next();
}
final Multiset<List<Object>> expectedSet = HashMultiset.create();
final Multiset<List<Object>> actualSet = HashMultiset.create();
for (int rowCounter = 0; rowCounter < testLength; rowCounter++) {
er.next();
ar.next();
expectedSet.add(buildRow(er));
actualSet.add(buildRow(ar));
}
Assert.assertEquals(expectedSet, actualSet);
}
/**
* Verifies the actual results, then frees memory
* for both the expected and actual result sets.
* @param actual the actual results to verify
*/
public void unorderedVerifyAndClearAll(RowSet actual) {
try {
unorderedVerify(actual);
} finally {
expected.clear();
actual.clear();
}
}
private List<Object> buildRow(RowSetReader reader) {
final List<Object> row = new ArrayList<>();
for (int i = 0; i < mask.length; i++) {
if (!mask[i]) {
continue;
}
final ScalarReader scalarReader = reader.column(i).scalar();
final Object value = getScalar(scalarReader);
row.add(value);
}
return row;
}
/**
* Verify the actual rows using the rules defined in this builder
* @param actual the actual results to verify
*/
public void verify(RowSet actual) {
compareSchemasAndCounts(actual);
int testLength = getTestLength();
RowSetReader er = expected.reader();
RowSetReader ar = actual.reader();
for (int i = 0; i < offset; i++) {
er.next();
ar.next();
}
for (int i = 0; i < testLength; i++) {
er.next();
ar.next();
String label = Integer.toString(er.logicalIndex() + 1);
verifyRow(label, er, ar);
}
}
/**
* Convenience method to verify the actual results, then free memory
* for the actual result sets.
* @param actual the actual results to verify
*/
public void verifyAndClear(RowSet actual) {
try {
verify(actual);
} finally {
actual.clear();
}
}
/**
* Convenience method to verify the actual results, then free memory
* for both the expected and actual result sets.
* @param actual the actual results to verify
*/
public void verifyAndClearAll(RowSet actual) {
try {
verify(actual);
} finally {
expected.clear();
actual.clear();
}
}
private void verifyRow(String label, TupleReader er, TupleReader ar) {
String prefix = label + ":";
for (int i = 0; i < mask.length; i++) {
if (! mask[i]) {
continue;
}
verifyColumn(prefix + i, er.column(i), ar.column(i));
}
}
private void verifyColumn(String label, ObjectReader ec, ObjectReader ac) {
assertEquals(label, ec.type(), ac.type());
switch (ec.type()) {
case ARRAY:
verifyArray(label, ec.array(), ac.array());
break;
case SCALAR:
verifyScalar(label, ec.scalar(), ac.scalar());
break;
case TUPLE:
verifyTuple(label, ec.tuple(), ac.tuple());
break;
case VARIANT:
verifyVariant(label, ec.variant(), ac.variant());
break;
default:
throw new IllegalStateException( "Unexpected type: " + ec.type());
}
}
private void verifyTuple(String label, TupleReader er, TupleReader ar) {
assertEquals(label + " - tuple count", er.columnCount(), ar.columnCount());
String prefix = label + ":";
for (int i = 0; i < er.columnCount(); i++) {
verifyColumn(prefix + i, er.column(i), ar.column(i));
}
}
private void verifyScalar(String label, ScalarReader ec, ScalarReader ac) {
assertEquals(label + " - value type", ec.valueType(), ac.valueType());
if (ec.isNull()) {
assertTrue(label + " - column not null", ac.isNull());
return;
}
if (!ec.isNull()) {
assertFalse(label + " - column is null", ac.isNull());
}
switch (ec.valueType()) {
case BYTES:
byte[] expected = ac.getBytes();
byte[] actual = ac.getBytes();
assertEquals(label + " - byte lengths differ", expected.length, actual.length);
assertTrue(label, Arrays.areEqual(expected, actual));
break;
// Double must be handled specially since BigDecimal cannot handle
// INF or NAN double values.
case DOUBLE:
assertEquals(label, ec.getDouble(), ac.getDouble(), delta);
break;
// repeated_contains is claimed to return a boolean,
// actually returns a count, but in a bit field. To test
// this function, we must treat BIT as an integer.
default:
assertEquals(label, getScalar(ec), getScalar(ac));
}
}
private Object getScalar(final ScalarReader scalarReader) {
if (scalarReader.isNull()) {
return Optional.absent();
}
switch (scalarReader.valueType()) {
case BYTES:
return ByteBuffer.wrap(scalarReader.getBytes());
case DOUBLE:
return new BigDecimal(scalarReader.getDouble(), this.scale).stripTrailingZeros();
default:
return scalarReader.getObject();
}
}
private void verifyArray(String label, ArrayReader ea, ArrayReader aa) {
assertEquals(label + " - array element type", ea.entryType(), aa.entryType());
assertEquals(label + " - array length", ea.size(), aa.size());
int i = 0;
while (ea.next()) {
assertTrue(aa.next());
verifyColumn(label + "[" + i++ + "]", ea.entry(), aa.entry());
}
}
private void verifyVariant(String label, VariantReader ev,
VariantReader av) {
assertEquals(label + " null", ev.isNull(), av.isNull());
if (ev.isNull()) {
return;
}
assertEquals(label + " type", ev.dataType(), av.dataType());
switch (ev.dataType()) {
case LIST:
verifyArray(label, ev.array(), av.array());
break;
case MAP:
verifyTuple(label, ev.tuple(), av.tuple());
break;
case UNION:
throw new IllegalStateException("Unions not allowed in unions.");
case GENERIC_OBJECT:
case LATE:
case NULL:
throw new UnsupportedOperationException(ev.dataType().toString());
default:
verifyScalar(label, ev.scalar(), av.scalar());
}
}
// TODO make a native RowSetComparison comparator
public static class ObjectComparator implements Comparator<Object> {
public static final ObjectComparator INSTANCE = new ObjectComparator();
private ObjectComparator() {
}
@Override
public int compare(Object a, Object b) {
if (a instanceof Integer) {
int aInt = (Integer) a;
int bInt = (Integer) b;
return aInt - bInt;
} else if (a instanceof Long) {
Long aLong = (Long) a;
Long bLong = (Long) b;
return aLong.compareTo(bLong);
} else if (a instanceof Float) {
Float aFloat = (Float) a;
Float bFloat = (Float) b;
return aFloat.compareTo(bFloat);
} else if (a instanceof Double) {
Double aDouble = (Double) a;
Double bDouble = (Double) b;
return aDouble.compareTo(bDouble);
} else if (a instanceof String) {
String aString = (String) a;
String bString = (String) b;
return aString.compareTo(bString);
} else {
throw new UnsupportedOperationException(String.format("Unsupported type %s", a.getClass().getCanonicalName()));
}
}
}
}
| |
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */
/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package org.cs3.prolog.connector.internal.cterm.parser;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (without unicode processing).
*/
@SuppressWarnings("javadoc")
public class SimpleCharStream
{
/** Whether parser is static. */
public static final boolean staticFlag = false;
int bufsize;
int available;
int tokenBegin;
/** Position in buffer. */
public int bufpos = -1;
protected int bufline[];
protected int bufcolumn[];
protected int column = 0;
protected int line = 1;
protected boolean prevCharIsCR = false;
protected boolean prevCharIsLF = false;
protected java.io.Reader inputStream;
protected char[] buffer;
protected int maxNextCharInd = 0;
protected int inBuf = 0;
protected int tabSize = 8;
protected void setTabSize(int i) { tabSize = i; }
protected int getTabSize(int i) { return tabSize; }
protected void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int newbufline[] = new int[bufsize + 2048];
int newbufcolumn[] = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos += (bufsize - tokenBegin));
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos -= tokenBegin);
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
bufsize += 2048;
available = bufsize;
tokenBegin = 0;
}
protected void FillBuff() throws java.io.IOException
{
if (maxNextCharInd == available)
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = maxNextCharInd = 0;
available = tokenBegin;
}
else if (tokenBegin < 0)
bufpos = maxNextCharInd = 0;
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
int i;
try {
if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1)
{
inputStream.close();
throw new java.io.IOException();
}
else
maxNextCharInd += i;
return;
}
catch(java.io.IOException e) {
--bufpos;
backup(0);
if (tokenBegin == -1)
tokenBegin = bufpos;
throw e;
}
}
/** Start. */
public char BeginToken() throws java.io.IOException
{
tokenBegin = -1;
char c = readChar();
tokenBegin = bufpos;
return c;
}
protected void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (tabSize - (column % tabSize));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
/** Read a character. */
public char readChar() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
return buffer[bufpos];
}
if (++bufpos >= maxNextCharInd)
FillBuff();
char c = buffer[bufpos];
UpdateLineColumn(c);
return c;
}
@Deprecated
/**
* @deprecated
* @see #getEndColumn
*/
public int getColumn() {
return bufcolumn[bufpos];
}
@Deprecated
/**
* @deprecated
* @see #getEndLine
*/
public int getLine() {
return bufline[bufpos];
}
/** Get token end column number. */
public int getEndColumn() {
return bufcolumn[bufpos];
}
/** Get token end line number. */
public int getEndLine() {
return bufline[bufpos];
}
/** Get token beginning column number. */
public int getBeginColumn() {
return bufcolumn[tokenBegin];
}
/** Get token beginning line number. */
public int getBeginLine() {
return bufline[tokenBegin];
}
/** Backup a number of characters. */
public void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
bufpos = -1;
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, 1, 1, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Get token literal value. */
public String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
/** Get the suffix. */
public char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
/** Reset buffer when finished. */
public void Done()
{
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k = 0;
int nextColDiff = 0, columnDiff = 0;
while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}
/* JavaCC - OriginalChecksum=5dfe9b496774df99be0eb09f84cd72cd (do not edit this line) */
| |
package com.xaosia.bungeepex.platform.bungee.utils;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import com.xaosia.bungeepex.BungeePEX;
import com.xaosia.bungeepex.platform.bungee.BungeeConfig;
import com.xaosia.bungeepex.PermissionGroup;
import com.xaosia.bungeepex.Statics;
import com.xaosia.bungeepex.PermissionUser;
import com.xaosia.bungeepex.platform.NetworkNotifier;
import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
@RequiredArgsConstructor
public class BungeeNotifier implements NetworkNotifier
{
private final BungeeConfig config;
@Override
public void deleteUser(PermissionUser u, String origin)
{
if (config.isUseUUIDs())
{
sendPM(u.getUUID(), "deleteUser;" + u.getUUID(), origin);
}
else
{
sendPM(u.getName(), "deleteUser;" + u.getName(), origin);
}
}
@Override
public void deleteGroup(PermissionGroup g, String origin)
{
sendPMAll("deleteGroup;" + g.getName(), origin);
}
@Override
public void reloadUser(PermissionUser u, String origin)
{
if (config.isUseUUIDs())
{
sendPM(u.getUUID(), "reloadUser;" + u.getUUID(), origin);
}
else
{
sendPM(u.getName(), "reloadUser;" + u.getName(), origin);
}
}
@Override
public void reloadGroup(PermissionGroup g, String origin)
{
sendPMAll("reloadGroup;" + g.getName(), origin);
}
@Override
public void reloadUsers(String origin)
{
sendPMAll("reloadUsers", origin);
}
@Override
public void reloadGroups(String origin)
{
sendPMAll("reloadGroups", origin);
}
@Override
public void reloadAll(String origin)
{
sendPMAll("reloadall", origin);
}
public void sendUUIDAndPlayer(String name, UUID uuid)
{
if (config.isUseUUIDs())
{
sendPM(uuid, "uuidcheck;" + name + ";" + uuid, null);
}
}
//bukkit-bungeeperms reload information functions
private void sendPM(String player, String msg, String origin)
{
//if standalone no network messages
if (config.getNetworkType() == NetworkType.Standalone)
{
return;
}
ProxiedPlayer pp = ProxyServer.getInstance().getPlayer(player);
if (pp != null && pp.getServer() != null)
{
//ignore servers not in config and netork type is server dependend
if (config.getNetworkType() == NetworkType.ServerDependend
&& !Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName()))
{
return;
}
if (config.getNetworkType() == NetworkType.ServerDependendBlacklist
&& Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName()))
{
return;
}
//no feedback loop
if (origin != null && pp.getServer().getInfo().getName().equalsIgnoreCase(origin))
{
return;
}
//send message
pp.getServer().getInfo().sendData(BungeePEX.CHANNEL, msg.getBytes());
sendConfig(pp.getServer().getInfo());
}
}
private void sendPM(UUID player, String msg, String origin)
{
//if standalone no network messages
if (config.getNetworkType() == NetworkType.Standalone)
{
return;
}
ProxiedPlayer pp = ProxyServer.getInstance().getPlayer(player);
if (pp != null && pp.getServer() != null)
{
//ignore servers not in config and netork type is server dependend
if (config.getNetworkType() == NetworkType.ServerDependend
&& !Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName()))
{
return;
}
if (config.getNetworkType() == NetworkType.ServerDependendBlacklist
&& Statics.listContains(config.getNetworkServers(), pp.getServer().getInfo().getName()))
{
return;
}
//no feedback loop
if (origin != null && pp.getServer().getInfo().getName().equalsIgnoreCase(origin))
{
return;
}
//send message
pp.getServer().getInfo().sendData(BungeePEX.CHANNEL, msg.getBytes());
sendConfig(pp.getServer().getInfo());
}
}
private void sendPMAll(String msg, String origin)
{
//if standalone no network messages
if (config.getNetworkType() == NetworkType.Standalone)
{
return;
}
for (ServerInfo si : ProxyServer.getInstance().getConfig().getServers().values())
{
//ignore servers not in config and netork type is server dependend
if (config.getNetworkType() == NetworkType.ServerDependend
&& !Statics.listContains(config.getNetworkServers(), si.getName()))
{
return;
}
if (config.getNetworkType() == NetworkType.ServerDependendBlacklist
&& Statics.listContains(config.getNetworkServers(), si.getName()))
{
return;
}
//no feedback loop
if (origin != null && si.getName().equalsIgnoreCase(origin))
{
continue;
}
//send message
si.sendData(BungeePEX.CHANNEL, msg.getBytes());
sendConfig(si);
}
}
private long lastConfigUpdate = 0;
private void sendConfig(ServerInfo info)
{
synchronized (this)
{
long now = System.currentTimeMillis();
if (lastConfigUpdate + 5 * 60 * 1000 < now)
{
lastConfigUpdate = now;
info.sendData(BungeePEX.CHANNEL, ("configcheck;" + info.getName() + ";" + config.getBackEndType() + ";" + config.getBackEndType() + ";" + config.isUseUUIDs()).getBytes());
}
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.DebuggerInvocationUtil;
import com.intellij.debugger.EvaluatingComputable;
import com.intellij.debugger.SourcePosition;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.engine.evaluation.TextWithImportsImpl;
import com.intellij.debugger.engine.evaluation.expression.*;
import com.intellij.debugger.engine.events.DebuggerContextCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.impl.DebuggerContextImpl;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.ui.impl.watch.ValueDescriptorImpl;
import com.intellij.openapi.progress.util.ProgressWindow;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.xdebugger.XExpression;
import com.intellij.xdebugger.frame.XValueModifier;
import com.sun.jdi.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.psi.CommonClassNames.JAVA_LANG_STRING;
/*
* Class SetValueAction
* @author Jeka
*/
public abstract class JavaValueModifier extends XValueModifier {
private final JavaValue myJavaValue;
public JavaValueModifier(JavaValue javaValue) {
myJavaValue = javaValue;
}
@Override
public void calculateInitialValueEditorText(final XInitialValueCallback callback) {
final Value value = myJavaValue.getDescriptor().getValue();
if (value == null || value instanceof PrimitiveValue) {
String valueString = myJavaValue.getDescriptor().getValueText();
int pos = valueString.lastIndexOf('('); //skip hex presentation if any
if (pos > 1) {
valueString = valueString.substring(0, pos).trim();
}
callback.setValue(valueString);
}
else if (value instanceof StringReference) {
final EvaluationContextImpl evaluationContext = myJavaValue.getEvaluationContext();
evaluationContext.getManagerThread().schedule(new SuspendContextCommandImpl(evaluationContext.getSuspendContext()) {
@Override
public Priority getPriority() {
return Priority.NORMAL;
}
@Override
public void contextAction(@NotNull SuspendContextImpl suspendContext) throws Exception {
callback.setValue(
StringUtil.wrapWithDoubleQuote(DebuggerUtils.translateStringValue(DebuggerUtils.getValueAsString(evaluationContext, value))));
}
});
}
else {
callback.setValue(null);
}
}
protected static void update(final DebuggerContextImpl context) {
DebuggerInvocationUtil.swingInvokeLater(context.getProject(), () -> {
final DebuggerSession session = context.getDebuggerSession();
if (session != null) {
session.refresh(false);
}
});
//node.setState(context);
}
protected abstract void setValueImpl(@NotNull XExpression expression, @NotNull XModificationCallback callback);
@Override
public void setValue(@NotNull XExpression expression, @NotNull XModificationCallback callback) {
final ValueDescriptorImpl descriptor = myJavaValue.getDescriptor();
if(!descriptor.canSetValue()) {
return;
}
if (myJavaValue.getEvaluationContext().getSuspendContext().isResumed()) {
callback.errorOccurred(DebuggerBundle.message("error.context.has.changed"));
return;
}
setValueImpl(expression, callback);
}
protected static Value preprocessValue(EvaluationContextImpl context, Value value, @NotNull Type varType) throws EvaluateException {
if (value != null && JAVA_LANG_STRING.equals(varType.name()) && !(value instanceof StringReference)) {
String v = DebuggerUtils.getValueAsString(context, value);
if (v != null) {
value = DebuggerUtilsEx.mirrorOfString(v, context.getDebugProcess().getVirtualMachineProxy(), context);
}
}
if (value instanceof DoubleValue) {
double dValue = ((DoubleValue) value).doubleValue();
if(varType instanceof FloatType && Float.MIN_VALUE <= dValue && dValue <= Float.MAX_VALUE){
value = context.getDebugProcess().getVirtualMachineProxy().mirrorOf((float)dValue);
}
}
if (value != null) {
if (varType instanceof PrimitiveType) {
if (!(value instanceof PrimitiveValue)) {
value = (Value)UnBoxingEvaluator.unbox(value, context);
}
}
else if (varType instanceof ReferenceType) {
if (value instanceof PrimitiveValue) {
value = (Value)BoxingEvaluator.box(value, context);
}
}
}
return value;
}
protected interface SetValueRunnable {
void setValue(EvaluationContextImpl evaluationContext, Value newValue) throws ClassNotLoadedException,
InvalidTypeException,
EvaluateException,
IncompatibleThreadStateException;
default ClassLoaderReference getClassLoader(EvaluationContextImpl evaluationContext) throws EvaluateException {
return evaluationContext.getClassLoader();
}
@Nullable
Type getLType() throws ClassNotLoadedException, EvaluateException;
}
@Nullable
private static ExpressionEvaluator tryDirectAssignment(@NotNull XExpression expression,
@Nullable Type varType,
@NotNull EvaluationContextImpl evaluationContext) {
if (varType instanceof LongType) {
try {
return new ExpressionEvaluatorImpl(new IdentityEvaluator(
evaluationContext.getDebugProcess().getVirtualMachineProxy().mirrorOf(Long.decode(expression.getExpression()))));
}
catch (NumberFormatException ignored) {
}
}
return null;
}
private static void setValue(ExpressionEvaluator evaluator, EvaluationContextImpl evaluationContext, SetValueRunnable setValueRunnable) throws EvaluateException {
Value value;
try {
value = evaluator.evaluate(evaluationContext);
setValueRunnable.setValue(evaluationContext, value);
}
catch (IllegalArgumentException ex) {
throw EvaluateExceptionUtil.createEvaluateException(ex.getMessage());
}
catch (InvalidTypeException ex) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.type.mismatch"));
}
catch (IncompatibleThreadStateException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
catch (ClassNotLoadedException ex) {
if (!evaluationContext.isAutoLoadClasses()) {
throw EvaluateExceptionUtil.createEvaluateException(ex);
}
final ReferenceType refType;
try {
refType = evaluationContext.getDebugProcess().loadClass(evaluationContext,
ex.className(),
setValueRunnable.getClassLoader(evaluationContext));
if (refType != null) {
//try again
setValue(evaluator, evaluationContext, setValueRunnable);
}
}
catch (InvocationException | InvalidTypeException | IncompatibleThreadStateException | ClassNotLoadedException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
catch (ObjectCollectedException e) {
throw EvaluateExceptionUtil.OBJECT_WAS_COLLECTED;
}
}
}
protected void set(@NotNull final XExpression expression,
final XModificationCallback callback,
final DebuggerContextImpl debuggerContext,
final SetValueRunnable setValueRunnable) {
final ProgressWindow progressWindow = new ProgressWindow(true, debuggerContext.getProject());
final EvaluationContextImpl evaluationContext = myJavaValue.getEvaluationContext();
SuspendContextCommandImpl askSetAction = new DebuggerContextCommandImpl(debuggerContext) {
@Override
public Priority getPriority() {
return Priority.HIGH;
}
@Override
public void threadAction(@NotNull SuspendContextImpl suspendContext) {
ExpressionEvaluator evaluator;
try {
evaluator = tryDirectAssignment(expression, setValueRunnable.getLType(), evaluationContext);
if (evaluator == null) {
Project project = evaluationContext.getProject();
SourcePosition position = ContextUtil.getSourcePosition(evaluationContext);
PsiElement context = ContextUtil.getContextElement(evaluationContext, position);
evaluator = DebuggerInvocationUtil.commitAndRunReadAction(project, new EvaluatingComputable<ExpressionEvaluator>() {
@Override
public ExpressionEvaluator compute() throws EvaluateException {
return EvaluatorBuilderImpl
.build(TextWithImportsImpl.fromXExpression(expression), context, position, project);
}
});
}
setValue(evaluator, evaluationContext, new SetValueRunnable() {
@Override
public void setValue(EvaluationContextImpl evaluationContext, Value newValue) throws ClassNotLoadedException,
InvalidTypeException,
EvaluateException,
IncompatibleThreadStateException {
if (!progressWindow.isCanceled()) {
setValueRunnable.setValue(evaluationContext, newValue);
//node.calcValue();
}
}
@Nullable
@Override
public Type getLType() throws EvaluateException, ClassNotLoadedException {
return setValueRunnable.getLType();
}
});
callback.valueModified();
}
catch (EvaluateException | ClassNotLoadedException e) {
callback.errorOccurred(e.getMessage());
}
}
};
progressWindow.setTitle(DebuggerBundle.message("title.evaluating"));
evaluationContext.getDebugProcess().getManagerThread().startProgress(askSetAction, progressWindow);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spiller;
import com.facebook.presto.common.Page;
import com.facebook.presto.memory.context.LocalMemoryContext;
import com.facebook.presto.operator.SpillContext;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.page.PagesSerde;
import com.facebook.presto.spi.page.PagesSerdeUtil;
import com.facebook.presto.spi.spiller.SpillCipher;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Closer;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import io.airlift.slice.InputStreamSliceInput;
import io.airlift.slice.OutputStreamSliceOutput;
import io.airlift.slice.SliceOutput;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import static com.facebook.presto.common.block.PageBuilderStatus.DEFAULT_MAX_PAGE_SIZE_IN_BYTES;
import static com.facebook.presto.execution.buffer.PageSplitterUtil.splitPage;
import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static com.facebook.presto.spi.page.PagesSerdeUtil.writeSerializedPage;
import static com.facebook.presto.spiller.FileSingleStreamSpillerFactory.SPILL_FILE_PREFIX;
import static com.facebook.presto.spiller.FileSingleStreamSpillerFactory.SPILL_FILE_SUFFIX;
import static com.google.common.base.Preconditions.checkState;
import static java.nio.file.StandardOpenOption.APPEND;
import static java.util.Objects.requireNonNull;
@NotThreadSafe
public class FileSingleStreamSpiller
implements SingleStreamSpiller
{
@VisibleForTesting
static final int BUFFER_SIZE = 4 * 1024;
private final FileHolder targetFile;
private final Closer closer = Closer.create();
private final PagesSerde serde;
private final SpillerStats spillerStats;
private final SpillContext localSpillContext;
private final LocalMemoryContext memoryContext;
private final Optional<SpillCipher> spillCipher;
private final ListeningExecutorService executor;
private boolean writable = true;
private long spilledPagesInMemorySize;
private ListenableFuture<?> spillInProgress = Futures.immediateFuture(null);
public FileSingleStreamSpiller(
PagesSerde serde,
ListeningExecutorService executor,
Path spillPath,
SpillerStats spillerStats,
SpillContext spillContext,
LocalMemoryContext memoryContext,
Optional<SpillCipher> spillCipher)
{
this.serde = requireNonNull(serde, "serde is null");
this.executor = requireNonNull(executor, "executor is null");
this.spillerStats = requireNonNull(spillerStats, "spillerStats is null");
this.localSpillContext = spillContext.newLocalSpillContext();
this.memoryContext = requireNonNull(memoryContext, "memoryContext is null");
this.spillCipher = requireNonNull(spillCipher, "spillCipher is null");
checkState(!spillCipher.isPresent() || !spillCipher.get().isDestroyed(), "spillCipher is already destroyed");
this.spillCipher.ifPresent(cipher -> closer.register(cipher::destroy));
// HACK!
// The writePages() method is called in a separate thread pool and it's possible that
// these spiller thread can run concurrently with the close() method.
// Due to this race when the spiller thread is running, the driver thread:
// 1. Can zero out the memory reservation even though the spiller thread physically holds onto that memory.
// 2. Can close/delete the temp file(s) used for spilling, which doesn't have any visible side effects, but still not desirable.
// To hack around the first issue we reserve the memory in the constructor and we release it in the close() method.
// This means we start accounting for the memory before the spiller thread allocates it, and we release the memory reservation
// before/after the spiller thread allocates that memory -- -- whether before or after depends on whether writePages() is in the
// middle of execution when close() is called (note that this applies to both readPages() and writePages() methods).
this.memoryContext.setBytes(BUFFER_SIZE);
try {
this.targetFile = closer.register(new FileHolder(Files.createTempFile(spillPath, SPILL_FILE_PREFIX, SPILL_FILE_SUFFIX)));
}
catch (IOException e) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Failed to create spill file", e);
}
}
@Override
public ListenableFuture<?> spill(Iterator<Page> pageIterator)
{
requireNonNull(pageIterator, "pageIterator is null");
checkNoSpillInProgress();
spillInProgress = executor.submit(() -> writePages(pageIterator));
return spillInProgress;
}
@Override
public long getSpilledPagesInMemorySize()
{
return spilledPagesInMemorySize;
}
@Override
public Iterator<Page> getSpilledPages()
{
checkNoSpillInProgress();
return readPages();
}
@Override
public ListenableFuture<List<Page>> getAllSpilledPages()
{
return executor.submit(() -> ImmutableList.copyOf(getSpilledPages()));
}
private void writePages(Iterator<Page> pageIterator)
{
checkState(writable, "Spilling no longer allowed. The spiller has been made non-writable on first read for subsequent reads to be consistent");
try (SliceOutput output = new OutputStreamSliceOutput(targetFile.newOutputStream(APPEND), BUFFER_SIZE)) {
while (pageIterator.hasNext()) {
Page page = pageIterator.next();
spilledPagesInMemorySize += page.getSizeInBytes();
// page serialization requires page.getSizeInBytes() + Integer.BYTES to fit in an integer
splitPage(page, DEFAULT_MAX_PAGE_SIZE_IN_BYTES).stream()
.map(serde::serialize)
.forEach(serializedPage -> {
long pageSize = serializedPage.getSizeInBytes();
localSpillContext.updateBytes(pageSize);
spillerStats.addToTotalSpilledBytes(pageSize);
writeSerializedPage(output, serializedPage);
});
}
}
catch (UncheckedIOException | IOException e) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Failed to spill pages", e);
}
}
private Iterator<Page> readPages()
{
checkState(writable, "Repeated reads are disallowed to prevent potential resource leaks");
writable = false;
try {
InputStream input = closer.register(targetFile.newInputStream());
Iterator<Page> pages = PagesSerdeUtil.readPages(serde, new InputStreamSliceInput(input, BUFFER_SIZE));
return closeWhenExhausted(pages, input);
}
catch (IOException e) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Failed to read spilled pages", e);
}
}
@Override
public void close()
{
closer.register(localSpillContext);
closer.register(() -> memoryContext.setBytes(0));
try {
closer.close();
}
catch (IOException e) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Failed to close spiller", e);
}
}
private void checkNoSpillInProgress()
{
checkState(spillInProgress.isDone(), "spill in progress");
}
private static <T> Iterator<T> closeWhenExhausted(Iterator<T> iterator, Closeable resource)
{
requireNonNull(iterator, "iterator is null");
requireNonNull(resource, "resource is null");
return new AbstractIterator<T>()
{
@Override
protected T computeNext()
{
if (iterator.hasNext()) {
return iterator.next();
}
try {
resource.close();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
return endOfData();
}
};
}
}
| |
/**
* Copyright 2005-2008 Noelios Technologies.
*
* The contents of this file are subject to the terms of the following open
* source licenses: LGPL 3.0 or LGPL 2.1 or CDDL 1.0 (the "Licenses"). You can
* select the license that you prefer but you may not use this file except in
* compliance with one of these Licenses.
*
* You can obtain a copy of the LGPL 3.0 license at
* http://www.gnu.org/licenses/lgpl-3.0.html
*
* You can obtain a copy of the LGPL 2.1 license at
* http://www.gnu.org/licenses/lgpl-2.1.html
*
* You can obtain a copy of the CDDL 1.0 license at
* http://www.sun.com/cddl/cddl.html
*
* See the Licenses for the specific language governing permissions and
* limitations under the Licenses.
*
* Alternatively, you can obtain a royaltee free commercial license with less
* limitations, transferable or non-transferable, directly at
* http://www.noelios.com/products/restlet-engine
*
* Restlet is a registered trademark of Noelios Technologies.
*/
package com.noelios.restlet.http;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Date;
import java.util.logging.Level;
import org.restlet.Context;
import org.restlet.data.Cookie;
import org.restlet.data.CookieSetting;
import org.restlet.data.Parameter;
import org.restlet.util.DateUtils;
/**
* Cookie header reader.
*
* @author Jerome Louvel
*/
public class CookieReader extends HeaderReader {
private static final String NAME_DOMAIN = "$Domain";
private static final String NAME_PATH = "$Path";
private static final String NAME_SET_ACCESS_RESTRICTED = "httpOnly";
private static final String NAME_SET_COMMENT = "comment";
private static final String NAME_SET_COMMENT_URL = "commentURL";
private static final String NAME_SET_DISCARD = "discard";
private static final String NAME_SET_DOMAIN = "domain";
private static final String NAME_SET_EXPIRES = "expires";
private static final String NAME_SET_MAX_AGE = "max-age";
private static final String NAME_SET_PATH = "path";
private static final String NAME_SET_PORT = "port";
private static final String NAME_SET_SECURE = "secure";
private static final String NAME_SET_VERSION = "version";
private static final String NAME_VERSION = "$Version";
/** The cached pair. Used by the readPair() method. */
private volatile Parameter cachedPair;
/** The global cookie specification version. */
private volatile int globalVersion;
/**
* Constructor.
*
* @param header
* The header to read.
*/
public CookieReader(String header) {
super(header);
this.cachedPair = null;
this.globalVersion = -1;
}
/**
* Reads the next cookie available or null.
*
* @return The next cookie available or null.
* @throws IOException
*/
public Cookie readCookie() throws IOException {
Cookie result = null;
Parameter pair = readPair();
if (this.globalVersion == -1) {
// Cookies version not yet detected
if (pair.getName().equalsIgnoreCase(NAME_VERSION)) {
if (pair.getValue() != null) {
this.globalVersion = Integer.parseInt(pair.getValue());
} else {
throw new IOException(
"Empty cookies version attribute detected. Please check your cookie header");
}
} else {
// Set the default version for old Netscape cookies
this.globalVersion = 0;
}
}
while ((pair != null) && (pair.getName().charAt(0) == '$')) {
// Unexpected special attribute
// Silently ignore it as it may have been introduced by new
// specifications
pair = readPair();
}
if (pair != null) {
// Set the cookie name and value
result = new Cookie(this.globalVersion, pair.getName(), pair
.getValue());
pair = readPair();
}
while ((pair != null) && (pair.getName().charAt(0) == '$')) {
if (pair.getName().equalsIgnoreCase(NAME_PATH)) {
result.setPath(pair.getValue());
} else if (pair.getName().equalsIgnoreCase(NAME_DOMAIN)) {
result.setDomain(pair.getValue());
} else {
// Unexpected special attribute
// Silently ignore it as it may have been introduced by new
// specifications
}
pair = readPair();
}
if (pair != null) {
// We started to read the next cookie
// So let's put it back into the stream
this.cachedPair = pair;
}
return result;
}
/**
* Reads the next cookie setting available or null.
*
* @return The next cookie setting available or null.
* @throws IOException
*/
public CookieSetting readCookieSetting() throws IOException {
CookieSetting result = null;
Parameter pair = readPair();
while ((pair != null) && (pair.getName().charAt(0) == '$')) {
// Unexpected special attribute
// Silently ignore it as it may have been introduced by new
// specifications
pair = readPair();
}
if (pair != null) {
// Set the cookie name and value
result = new CookieSetting(pair.getName(), pair.getValue());
pair = readPair();
}
while (pair != null) {
if (pair.getName().equalsIgnoreCase(NAME_SET_PATH)) {
result.setPath(pair.getValue());
} else if (pair.getName().equalsIgnoreCase(NAME_SET_DOMAIN)) {
result.setDomain(pair.getValue());
} else if (pair.getName().equalsIgnoreCase(NAME_SET_COMMENT)) {
result.setComment(pair.getValue());
} else if (pair.getName().equalsIgnoreCase(NAME_SET_COMMENT_URL)) {
// No yet supported
} else if (pair.getName().equalsIgnoreCase(NAME_SET_DISCARD)) {
result.setMaxAge(-1);
} else if (pair.getName().equalsIgnoreCase(NAME_SET_EXPIRES)) {
final Date current = new Date(System.currentTimeMillis());
Date expires = DateUtils.parse(pair.getValue(),
DateUtils.FORMAT_RFC_1036);
if (expires == null) {
expires = DateUtils.parse(pair.getValue(),
DateUtils.FORMAT_RFC_1123);
}
if (expires == null) {
expires = DateUtils.parse(pair.getValue(),
DateUtils.FORMAT_ASC_TIME);
}
if (expires != null) {
if (DateUtils.after(current, expires)) {
result.setMaxAge((int) ((expires.getTime() - current
.getTime()) / 1000));
} else {
result.setMaxAge(0);
}
} else {
// Ignore the expires header
Context.getCurrentLogger().log(
Level.WARNING,
"Ignoring cookie setting expiration date. Unable to parse the date: "
+ pair.getValue());
}
} else if (pair.getName().equalsIgnoreCase(NAME_SET_MAX_AGE)) {
result.setMaxAge(Integer.valueOf(pair.getValue()));
} else if (pair.getName().equalsIgnoreCase(NAME_SET_PORT)) {
// No yet supported
} else if (pair.getName().equalsIgnoreCase(NAME_SET_SECURE)) {
if ((pair.getValue() == null)
|| (pair.getValue().length() == 0)) {
result.setSecure(true);
}
} else if (pair.getName().equalsIgnoreCase(
NAME_SET_ACCESS_RESTRICTED)) {
if ((pair.getValue() == null)
|| (pair.getValue().length() == 0)) {
result.setAccessRestricted(true);
}
} else if (pair.getName().equalsIgnoreCase(NAME_SET_VERSION)) {
result.setVersion(Integer.valueOf(pair.getValue()));
} else {
// Unexpected special attribute
// Silently ignore it as it may have been introduced by new
// specifications
}
pair = readPair();
}
return result;
}
/**
* Reads the next pair as a parameter.
*
* @return The next pair as a parameter.
* @throws IOException
*/
private Parameter readPair() throws IOException {
Parameter result = null;
if (this.cachedPair != null) {
result = this.cachedPair;
this.cachedPair = null;
} else {
try {
boolean readingName = true;
boolean readingValue = false;
final StringBuilder nameBuffer = new StringBuilder();
final StringBuilder valueBuffer = new StringBuilder();
int nextChar = 0;
while ((result == null) && (nextChar != -1)) {
nextChar = read();
if (readingName) {
if ((HttpUtils.isSpace(nextChar))
&& (nameBuffer.length() == 0)) {
// Skip spaces
} else if ((nextChar == -1) || (nextChar == ';')
|| (nextChar == ',')) {
if (nameBuffer.length() > 0) {
// End of pair with no value
result = HttpUtils.createParameter(nameBuffer,
null);
} else if (nextChar == -1) {
// Do nothing return null preference
} else {
throw new IOException(
"Empty cookie name detected. Please check your cookies");
}
} else if (nextChar == '=') {
readingName = false;
readingValue = true;
} else if (HttpUtils.isTokenChar(nextChar)
|| (this.globalVersion < 1)) {
nameBuffer.append((char) nextChar);
} else {
throw new IOException(
"Separator and control characters are not allowed within a token. Please check your cookie header");
}
} else if (readingValue) {
if ((HttpUtils.isSpace(nextChar))
&& (valueBuffer.length() == 0)) {
// Skip spaces
} else if ((nextChar == -1) || (nextChar == ';')) {
// End of pair
result = HttpUtils.createParameter(nameBuffer,
valueBuffer);
} else if ((nextChar == '"')
&& (valueBuffer.length() == 0)) {
valueBuffer.append(readQuotedString());
} else if (HttpUtils.isTokenChar(nextChar)
|| (this.globalVersion < 1)) {
valueBuffer.append((char) nextChar);
} else {
throw new IOException(
"Separator and control characters are not allowed within a token. Please check your cookie header");
}
}
}
} catch (UnsupportedEncodingException uee) {
throw new IOException(
"Unsupported encoding. Please contact the administrator");
}
}
return result;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Gui2D.Maps;
import Gui2D.SpriteController.Maps.House1_sprites;
import Gui2D.SpriteController.SingleSprite.PlayerSprite;
import Gui2D.SpriteController.Sprite;
import Gui2D.SpriteController.SpriteController;
import Gui2D.WizardOfTreldan;
import TWoT.Command;
import TWoT.CommandWord;
import TWoT.TWoT;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javafx.animation.AnimationTimer;
import javafx.geometry.Rectangle2D;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.TextArea;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
public class House1 extends PlayableMaps {
// Arraylist for player movement
private ArrayList<String> input;
// ArrayList for menu key strokes.
private ArrayList<String> menu_input;
// our global TWoT object
private TWoT game;
// our house sprites
private final House1_sprites house_sprites;
/**
* Constructor for Cellar
*
* @param world SpriteController
*/
public House1(SpriteController world) {
//init our super constructor
super();
//set the house sprites
house_sprites = new House1_sprites(world);
house_sprites.setHouse1_background_SingleSprites();
}
/**
* @return the scene for this map
*/
@Override
public Scene getScene() {
// Link our globals to super class user inputs since no inheritence in AnimationTimer
input = super.getInput();
menu_input = super.getMenu_input();
//link the TWoT object to our main TWoT object
this.game = WizardOfTreldan.getGame();
//our main Group for all the components
Group root = new Group();
Scene theScene = new Scene(root);
//set background color
theScene.setFill(Color.rgb(83, 83, 83));
//set the styleScheet
theScene.getStylesheets().add("TextAreaStyle.css");
Canvas canvas_background = new Canvas(400, 300);
//relocate the canvas so its centered.
canvas_background.relocate(312, 75);
root.getChildren().add(canvas_background);
//set canvas of our items
Canvas house3_monsters = new Canvas(400, 300);
//relocate the canvas
house3_monsters.relocate(312, 75);
//add the canvas to the group
root.getChildren().add(house3_monsters);
//add a canvas only for the player
Canvas player_canvas = new Canvas(400, 300);
//relocate the canvas
player_canvas.relocate(312, 75);
//add the canvas to the group
root.getChildren().add(player_canvas);
//minimap ontop of everything else
MiniMap miniMap = new MiniMap(game);
//get the group of canvases from minimap object
Group miniMapGroup = miniMap.getMinimap();
//update the minimap correctly with the player canvas size
miniMap.updateMiniMap(400.0, 300.0);
//add the group to the root group
root.getChildren().add(miniMapGroup);
/**
* TextArea used to give the user more information about the game. What
* to do and and what happens.
*/
TextArea infobox = Infobox.getInfoBox();
//adding stackPane with the textarea component.
StackPane infoboxPane = new StackPane(infobox);
infoboxPane.setPrefSize(300, 150);
infoboxPane.relocate(0, 362);
root.getChildren().add(infoboxPane);
//get some of the games welcome message and add to the infobox
HashMap<Integer, String> welcome = game.getWelcomeMessages();
infobox.appendText(welcome.get(3) + "\n");
//Inventory menu
PlayerInventory playerinventory = new PlayerInventory(game, infobox);
AnchorPane menu = playerinventory.getMenu();
//escape menu
GameMenu escmenu = new GameMenu();
AnchorPane gameMenu = escmenu.getMenu();
//get our player from super class since no inheritence in AnimationTimer
PlayerSprite player = super.getPlayer();
player.setPosition(170, 50);
//set the keylisteners to the scene.
theScene.setOnKeyReleased(getOnKeyRelease(player));
theScene.setOnKeyPressed(getOnKeyPress());
//add graphicscontext to each canvas
GraphicsContext background_gc = canvas_background.getGraphicsContext2D();
//create GraphicsContext from our monster canvas containing all
GraphicsContext monster_gc = house3_monsters.getGraphicsContext2D();
//create GraphicsContext from our player_canvas
GraphicsContext moveable_gc = player_canvas.getGraphicsContext2D();
//get all the sprites of monsters
List<Sprite> sprites_interact = house_sprites.getHouse1_monster_sprites();
if (game.checkExisting("man")) {
sprites_interact.get(0).render(monster_gc);
}
//stranger sprite
Sprite stranger_sprite = house_sprites.getStranger_sprite();
if (game.checkExisting("stranger")) {
stranger_sprite.render(monster_gc);
}
//generate all the background sprites
List<Sprite> sprites_still = house_sprites.getHouse1();
for (Sprite sprite : sprites_still) {
sprite.render(background_gc);
}
//set our world boundaries
Rectangle2D worldBoundRight = new Rectangle2D(350, 0, 1, 300);
Rectangle2D worldBoundLeft = new Rectangle2D(0, 0, 1, 300);
Rectangle2D worldBoundBottom = new Rectangle2D(0, 220, 400, 1);
Rectangle2D worldBoundTop = new Rectangle2D(0, 0, 400, 1);
new AnimationTimer() {
//set the current time we started.
private long lastNanoTime = System.nanoTime();
//what to do each cycle
@Override
public void handle(long currentNanoTime) {
//request the focus back
root.requestFocus();
//get how many sec have passed
double elapsedTime = (currentNanoTime - lastNanoTime) / 1000000000.0;
//set the lastNanoTime to the nano time from parameter
lastNanoTime = currentNanoTime;
//set our initial direction standstill
player.setDirection(PlayerSprite.Direction.STANDSTILL);
// <editor-fold defaultstate="collapsed" desc=" LEFT INPUT ">
//now check for the users input
//check if the user wants to walk left.
if (input.contains("LEFT")) {
//check if the user walks into a world boundary
if (player.intersects_left(worldBoundLeft)) {
//Reset the velocity
player.setVelocity(0, 0);
//no collission continue
} else if (player.intersects_left(sprites_still.get(3))
|| player.intersects_left(sprites_still.get(4))
|| player.intersects_left(sprites_still.get(5))
|| player.intersects_left(sprites_still.get(6))) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (game.checkExisting("man") && player.intersects_left(sprites_interact.get(0))) {
player.setVelocity(0, 0);
} else if (game.checkExisting("stranger") && player.intersects_left(stranger_sprite)) {
player.setVelocity(0, 0);
} else {
player.setVelocity(-100, 0);
}
//set the direction the player walks
player.setDirection(PlayerSprite.Direction.WALK_LEFT);
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc=" RIGHT INPUT ">
//check if the user wants to walk right.
if (input.contains("RIGHT")) {
//check if the user walks into a world boundary
if (player.intersects_right(worldBoundRight)) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (player.intersects_right(sprites_still.get(3))
|| player.intersects_right(sprites_still.get(4))
|| player.intersects_right(sprites_still.get(5))
|| player.intersects_right(sprites_still.get(6))) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (game.checkExisting("man") && player.intersects_right(sprites_interact.get(0))) {
player.setVelocity(0, 0);
} else if (game.checkExisting("stranger") && player.intersects_right(stranger_sprite)) {
player.setVelocity(0, 0);
} else {
player.setVelocity(100, 0);
}
//set the direction the player walks
player.setDirection(PlayerSprite.Direction.WALK_RIGHT);
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc=" UP INPUT ">
//check if the user wants to walk up.
if (input.contains("UP")) {
//check if the user walks into a world boundary
if (player.intersects_top(worldBoundTop)) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (player.intersects_top(sprites_still.get(3))
|| player.intersects_top(sprites_still.get(4))
|| player.intersects_top(sprites_still.get(5))
|| player.intersects_top(sprites_still.get(6))) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (player.intersects_top(sprites_still.get(2))) {
//Reset the velocity
player.setVelocity(0, 0);
game.goTo(new Command(CommandWord.GO, "door"));
//remove all the inputs
input.removeAll(input);
//stop this AnimationTimer
this.stop();
//clear the textarea
infobox.clear();
//set the menu as a scene instead.
setNewScene();
//save the game when we walk out
WizardOfTreldan.saveGame();
} else if (game.checkExisting("man") && player.intersects_top(sprites_interact.get(0))) {
player.setVelocity(0, 0);
} else if (game.checkExisting("stranger") && player.intersects_top(stranger_sprite)) {
player.setVelocity(0, 0);
} else {
player.setVelocity(0, -100);
}
//set the direction the player walks
player.setDirection(PlayerSprite.Direction.WALK_UP);
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc=" DOWN INPUT ">
//check if the user wants to walk down.
if (input.contains("DOWN")) {
//check if the user walks into a world boundary
if (player.intersects_bottom(worldBoundBottom)) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (player.intersects_bottom(sprites_still.get(3))
|| player.intersects_bottom(sprites_still.get(4))
|| player.intersects_bottom(sprites_still.get(5))
|| player.intersects_bottom(sprites_still.get(6))) {
//Reset the velocity
player.setVelocity(0, 0);
} else if (game.checkExisting("man") && player.intersects_bottom(sprites_interact.get(0))) {
player.setVelocity(0, 0);
} else if (game.checkExisting("stranger") && player.intersects_bottom(stranger_sprite)) {
player.setVelocity(0, 0);
} else {
player.setVelocity(0, 100);
}
//set the direction the player walks
player.setDirection(PlayerSprite.Direction.WALK_DOWN);
}
// </editor-fold>
//check if the user wanst to pickup an item
if (menu_input.contains("E")) {
//check if the player intersects with the chest and the item exist ingame
if (game.checkExisting("chest") && player.intersect(sprites_still.get(4))) {
//goto the chest and print out what happens
for (String s : game.goTo(new Command(CommandWord.GO, "chest"))) {
infobox.appendText("\n" + s + "\n");
}
//update the inventory since we might have pickedup an item
playerinventory.update(game);
}
//check if the player intersects with the man and the man exist ingame
if (game.checkExisting("man") && player.intersect(sprites_interact.get(0))) {
//goto the man and print what happens
for (String s : game.goTo(new Command(CommandWord.GO, "man"))) {
infobox.appendText("\n" + s + "\n");
}
//update the inventory.
playerinventory.update(game);
}
//check if the player intersects with the stranger and the stranger exist ingame
if (game.checkExisting("stranger") && player.intersect(stranger_sprite)) {
//Reset the velocity
player.setVelocity(0, 0);
game.goTo(new Command(CommandWord.GO, "stranger"));
//remove all the inputs
input.removeAll(input);
//stop this AnimationTimer
this.stop();
//clear the textarea
infobox.clear();
//set the menu as a scene instead.
setNewScene();
//save the game when we walk out
WizardOfTreldan.saveGame();
}
//remove the key E from the list
menu_input.remove("E");
}
//update the players velocity
player.update(elapsedTime);
//clear our player
moveable_gc.clearRect(0, 0, 400, 300);
//render our new player
player.render(moveable_gc);
//clear the monster graphicscontext
monster_gc.clearRect(0, 0, 400, 300);
//render the man ingame
if (game.checkExisting("man")) {
sprites_interact.get(0).render(monster_gc);
}
//render the stranger if he exists
if (game.checkExisting("stranger")) {
stranger_sprite.render(monster_gc);
}
//check if the user wants to see the esc menu.
if (menu_input.contains("ESCAPE")) {
//if the menu is not shown show it
if (!escmenu.isShown()) {
//add the menu to the root
root.getChildren().add(gameMenu);
//set the menu to shown.
escmenu.setShown(true);
}
} else //if the menu is shown remove it
{
if (escmenu.isShown()) {
root.getChildren().remove(gameMenu);
escmenu.setShown(false);
}
}
//check if the user wants to see the inventory
if (menu_input.contains("I")) {
//if the inventory is not shown show it.
if (!playerinventory.isShown()) {
root.getChildren().add(menu);
playerinventory.setShown(true);
}
//if the inventory is shown remove the inventory.
} else if (playerinventory.isShown()) {
root.getChildren().remove(menu);
playerinventory.setShown(false);
}
//update the player on the minimaps position
miniMap.updateMiniMap_player(player.getPositionX(), player.getPositionY());
}
/**
* set the new scene depending on which room you entered
*/
public void setNewScene() {
switch (game.getCurrentRoomId()) {
case 2:
WizardOfTreldan.setVillageScene();
break;
case 11:
WizardOfTreldan.setDungeonScene();
break;
}
}
}.start();
//return the scene
return theScene;
}
}
| |
/*******************************************************************************
* Copyright 2017 Vincenzo-Maria Cappelleri <vincenzo.cappelleri@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/**
*
*/
package raw.blockChain.services.thinNode.implementations;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.sql.SQLException;
import java.sql.SQLIntegrityConstraintViolationException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import com.google.common.collect.ImmutableList;
import raw.blockChain.api.Block;
import raw.blockChain.api.BlockChainConstants;
import raw.blockChain.api.BlockCompactRepresentation;
import raw.blockChain.api.BlockHeader;
import raw.blockChain.api.HashValue;
import raw.blockChain.api.Merkler;
import raw.blockChain.api.Transaction;
import raw.blockChain.api.implementations.DefaultMerkler;
import raw.blockChain.exceptions.TransactionNotPresentException;
import raw.blockChain.services.dbHelper.BlocksToDataBase;
import raw.blockChain.services.dbHelper.implementations.DefaultBlockToDataBase;
import raw.blockChain.services.miner.messages.types.SubmitTransactionMessage;
import raw.blockChain.services.thickNode.messages.types.BlockCompactRepresentationRequestMessage;
import raw.blockChain.services.thickNode.messages.types.BlockRequestMessage;
import raw.blockChain.services.thickNode.messages.types.CommunicationMessage;
import raw.blockChain.services.thickNode.messages.types.ThickNodeAddressMessage;
import raw.blockChain.services.thickNode.messages.types.TransactionBlockNumberMessage;
import raw.blockChain.services.thickNode.messages.types.CommunicationMessage.Type;
import raw.blockChain.services.thinNode.ThinNode;
import raw.blockChain.services.utils.ThickNodeAddressBookFile;
import raw.logger.Log;
import raw.settings.BlockChainProperties;
import raw.settings.ModuleProperty;
import raw.settings.PropertiesManager;
import raw.utils.RAWServiceUtils;
/**
* Default implementation of {@link ThinNode} interface.
*
* @author vic
*
*/
public class DefaultThinNode implements ThinNode {
private Log log;
private List<InetSocketAddress> thickNodes;
private List<InetSocketAddress> unresponsiveThickNodes;
private BlocksToDataBase database;
private boolean running;
private boolean initialized;
private InetSocketAddress myAddress;
public DefaultThinNode() {
log = Log.getLogger();
thickNodes = Collections.synchronizedList(new ArrayList<InetSocketAddress>());
unresponsiveThickNodes = Collections.synchronizedList(new ArrayList<InetSocketAddress>());
database = new DefaultBlockToDataBase(this);
initialized = false;
}
/* (non-Javadoc)
* @see java.util.concurrent.Callable#call()
*/
@Override
public Void call() throws Exception {
log.info("Starting Thin Node");
running = true;
database.open();
BlockChainProperties props = (BlockChainProperties) PropertiesManager.getManager().getProperties(ModuleProperty.BLOCK_CHAIN);
ThickNodeAddressBookFile addressBook = new ThickNodeAddressBookFile();
thickNodes = Collections.synchronizedList(addressBook.getAddressList());
InetAddress myIP = null;
while (myIP == null) {
try {
myIP = RAWServiceUtils.resolveIP();
} catch (Exception e) {
log.debug("Cant resolve my ip. Waiting 2 seconds and trying again.");
Thread.sleep(1850);
}
}
myAddress = new InetSocketAddress(myIP, props.getListeningSocket());
Random rand = new Random(System.currentTimeMillis());
while (running) {
int thickNodesSize;
synchronized (thickNodes) {
thickNodesSize = thickNodes.size();
}
if(thickNodesSize>0){
InetSocketAddress toBeAsked;
synchronized (thickNodes) {
toBeAsked = thickNodes.get(rand.nextInt(thickNodes.size()));
}
log.debug("Asking a new contact to "+toBeAsked);
askNodeContactToAnotherNode(toBeAsked);
} else {
log.verboseDebug("No nodes to ask new contacts. Skipping.");
}
if(initialized){
int sleeptime = PING_MILLISECONDS_INTERTIME + (rand.nextInt(PING_MILLISECONDS_VARIABILITY*2) - PING_MILLISECONDS_VARIABILITY);
log.verboseDebug("Gonna sleep for "+sleeptime+" ms.");
Thread.sleep(sleeptime);
log.verboseDebug("I woke up!");
}
InetSocketAddress lastPinged = null;
synchronized (thickNodes) {
thickNodesSize = thickNodes.size();
}
if(thickNodesSize>0){
InetSocketAddress toPing;
synchronized (thickNodes) {
toPing = thickNodes.get(rand.nextInt(thickNodes.size()));
}
log.debug("Try to ping "+toPing);
boolean success = sendAPing(toPing);
if(!success){
moveThickNodeToUnresponsive(toPing);
lastPinged = toPing;
}
} else {
log.verboseDebug("No nodes to ping. Skipping.");
}
int unresponsiveThickNodesSize;
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodesSize = unresponsiveThickNodes.size();
}
if(unresponsiveThickNodesSize > 0){
InetSocketAddress toPing;
synchronized (unresponsiveThickNodes) {
toPing = unresponsiveThickNodes.get(rand.nextInt(unresponsiveThickNodes.size()));
}
if(lastPinged != null){
int counter = 0;
while(lastPinged.equals(toPing)){
toPing = null;
if(counter > (unresponsiveThickNodesSize/2)){
log.verboseDebug("Cant find a suitable unresponsive node to ping.");
break;
}
synchronized (unresponsiveThickNodes) {
toPing = unresponsiveThickNodes.get(rand.nextInt(unresponsiveThickNodes.size()));
}
counter++;
}
}
if(toPing != null){
log.debug("Try to ping unresponsive node "+toPing);
boolean success = sendAPing(toPing);
if(success){
addNodeAddressIfNotPresent(toPing);
} else {
log.verboseDebug(toPing + " is dead. Deleting contact.");
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodes.remove(toPing);
}
}
} else {
log.verboseDebug("I do not have a \"good\" unresponsive node to ping. Skipping.");
}
} else {
log.verboseDebug("No unresponsive nodes to ping. Skipping.");
synchronized (thickNodes) {
thickNodesSize = thickNodes.size();
}
if(thickNodesSize == 0){
log.debug("My contact list is empty. Try to reload contacts from file or defaults.");
unresponsiveThickNodes = Collections.synchronizedList(new ThickNodeAddressBookFile().getAddressList());
}
}
initialized = true;
}
log.verboseDebug("Thin node is done.");
return null;
}
private void askNodeContactToAnotherNode(InetSocketAddress nodeToBeAsked){
ThickNodeAddressMessage request = new ThickNodeAddressMessage();
Socket sock = null;
try {
sock = new Socket(nodeToBeAsked.getAddress(), nodeToBeAsked.getPort());
} catch (IOException e5) {
// log.exception(e5);
log.verboseDebug("IOException creating socket to "+nodeToBeAsked+". Aborting contact.");
return;
}
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(sock.getOutputStream());
} catch (IOException e4) {
log.verboseDebug("Cannot establish connection output stream. Aborting request.");
closeIfNotNull(sock);
return;
}
try {
oos.writeObject(request);
} catch (IOException e3) {
log.verboseDebug("Cannot Send request. Aborting.");
closeIfNotNull(sock);
return;
}
try {
sock.setSoTimeout(BlockChainConstants.SOCKETS_MILLISECONDS_TIMEOUT);
} catch (SocketException e1) {
log.exception(e1);
closeIfNotNull(sock);
return;
}
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(sock.getInputStream());
} catch (IOException e2) {
if(!(e2 instanceof SocketTimeoutException)){
log.verboseDebug("Cannot establish connection input stream. Aborting request.");
} else {
log.verboseDebug("Socket timeout opening input stream. Aborting request.");;
}
closeIfNotNull(sock);
return;
}
Object obj = null;
try {
obj = ois.readObject();
} catch (ClassNotFoundException e1) {
log.exception(e1);
closeIfNotNull(sock);
return;
} catch (IOException e1) {
if (!(e1 instanceof SocketTimeoutException)) {
log.verboseDebug("Cannot read reply. Aborting request.");
} else {
log.verboseDebug("Read from socket timeout. Aborting request.");;
}
closeIfNotNull(sock);
return;
}
try {
sock.close();
} catch (IOException e) {
log.exception(e);
}
if(obj instanceof ThickNodeAddressMessage){
ThickNodeAddressMessage reply = (ThickNodeAddressMessage) obj;
BlockChainProperties props = (BlockChainProperties) PropertiesManager.getManager().getProperties(ModuleProperty.BLOCK_CHAIN);
if((!reply.isRequest())&&reply.getChainName().equals(props.getBlockChainName())){
addNodeAddressIfNotPresent(reply.getAddress());
}
}
}
private void moveThickNodeToUnresponsive(InetSocketAddress address){
synchronized (thickNodes) {
thickNodes.remove(address);
}
boolean unresponsiveThickNodesContains;
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodesContains = unresponsiveThickNodes.contains(address);
}
if(!unresponsiveThickNodesContains){
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodes.add(address);
}
}
}
private boolean sendAPing(InetSocketAddress address){
CommunicationMessage ping = new CommunicationMessage(Type.PING_FROM_THIN);
ping.attachInetSocketAddress(myAddress);
try (Socket sock = new Socket(address.getAddress(), address.getPort())){
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(sock.getOutputStream());
} catch (IOException e) {
closeIfNotNull(sock);
return false;
}
try {
oos.writeObject(ping);
} catch (IOException e) {
closeIfNotNull(sock);
return false;
}
try {
sock.setSoTimeout(BlockChainConstants.SOCKETS_MILLISECONDS_TIMEOUT);
} catch (SocketException e1) {
log.exception(e1);
closeIfNotNull(sock);
return false;
}
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(sock.getInputStream());
} catch (IOException e) {
closeIfNotNull(sock);
return false;
}
Object obj = null;
try {
obj = ois.readObject();
} catch (ClassNotFoundException e1) {
closeIfNotNull(sock);
return false;
} catch (IOException e1) {
closeIfNotNull(sock);
return false;
}
try {
sock.close();
} catch (IOException e) {
log.exception(e);
}
if(obj instanceof CommunicationMessage){
CommunicationMessage reply = (CommunicationMessage) obj;
BlockChainProperties props = (BlockChainProperties) PropertiesManager.getManager().getProperties(ModuleProperty.BLOCK_CHAIN);
if((!(reply.getMessage() == Type.PONG) || !reply.getChainName().equals(props.getBlockChainName()))){
return false;
}
} else {
return false;
}
return true;
} catch (Exception e) {
log.exception(e);
for(Throwable t : e.getSuppressed()){
log.exception(t);
}
return false;
}
}
private void closeIfNotNull(Socket sock){
if(sock != null){
try {
sock.close();
} catch (IOException e) {
log.exception(e);
}
}
}
private void addNodeAddressIfNotPresent(InetSocketAddress address){
boolean thickNodesContains;
synchronized (thickNodes) {
thickNodesContains = thickNodes.contains(address);
}
if(!thickNodesContains){
boolean unresponsiveThickNodesContains;
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodesContains = unresponsiveThickNodes.contains(address);
}
if(unresponsiveThickNodesContains){
synchronized (unresponsiveThickNodes) {
unresponsiveThickNodes.remove(address);
}
}
synchronized (thickNodes) {
thickNodes.add(address);
}
}
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getBlockFromHeader(raw.blockChain.api.BlockHeader)
*/
@Override
public Block getBlockFromHeader(BlockHeader header) {
return getBlockFromHash(header.hash());
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getBlockFromHash(raw.blockChain.api.HashValue)
*/
@Override
public Block getBlockFromHash(HashValue hash) {
Block block = localGetBlockFromHash(hash);
if(block == null){
// in the database the block is not found.
// BUT. we could ask to thick nodes.
ArrayList<InetSocketAddress> nodesToAsk = getNodesToAsk();
ArrayList<Block> blocks = new ArrayList<Block>();
for(InetSocketAddress node : nodesToAsk){
Block received = askBlockByHashToThickNode(node, hash);
if(received != null){
blocks.add(received);
}
}
block = selectTheGoodBlock(blocks);
if(block != null){
try {
database.storeOnDataBase(block);
} catch (SQLIntegrityConstraintViolationException e) {
log.verboseDebug("Retrieved block is possibly duplicated.");
return null;
}
}
}
return block;
}
private Block selectTheGoodBlock(ArrayList<Block> blocks){
if(blocks.size() == 0){
return null;
}
boolean strongConsensus = true;
Block check = blocks.get(0);
for(Block bl : blocks){
if(!bl.equals(check)){
strongConsensus = false;
break;
}
}
if(strongConsensus){
return check;
}
Block block = moreAttendibleBlock(blocks);
return block;
}
private ArrayList<InetSocketAddress> getNodesToAsk(){
int numberOfNodesToAsk;
synchronized (thickNodes) {
numberOfNodesToAsk = Math.min(THICK_NODE_CONSENSUS, thickNodes.size());
}
Random rand = new Random(System.currentTimeMillis());
ArrayList<InetSocketAddress> nodesToAsk = new ArrayList<InetSocketAddress>();
while (numberOfNodesToAsk > 0) {
InetSocketAddress address;
synchronized (thickNodes) {
address = thickNodes.get(rand.nextInt(thickNodes.size()));
}
if(!nodesToAsk.contains(address)){
nodesToAsk.add(address);
numberOfNodesToAsk-=1;
}
}
return nodesToAsk;
}
private Block localGetBlockFromHash(HashValue hash){
Block block = null;
try {
block = database.getBlockFromHash(hash);
} catch (SQLException e) {
log.exception(e);
}
return block;
}
/**
* Given a set of blocks try to find the "best" one.
* @param blocks
* @return
*/
private Block moreAttendibleBlock(ArrayList<Block> blocks){
HashMap<Block, Integer> count = new HashMap<Block, Integer>();
for(Block block : blocks){
int freq = Collections.frequency(blocks, block);
if(!count.containsKey(block)){
count.put(block, freq);
}
}
ArrayList<Map.Entry<Block, Integer>> countToList = new ArrayList<Map.Entry<Block,Integer>>(count.entrySet());
Collections.sort(countToList, new Comparator<Map.Entry<Block, Integer>>() {
@Override
public int compare(Map.Entry<Block, Integer> o1, Map.Entry<Block, Integer> o2) {
return o1.getValue().intValue() - o2.getValue().intValue();
}
});
int maxValue = -1;
ArrayList<Block> papabili = new ArrayList<Block>();
for(Map.Entry<Block, Integer> entry : countToList){
if(entry.getValue().intValue() >= maxValue){
papabili.add(entry.getKey());
maxValue = entry.getValue().intValue();
} else {
break;
}
}
if(papabili.size() == 1){
return papabili.get(0);
}
Block chosen = null;
for(Block block : papabili){
BlockHeader myPrevious = localGetBlockHeaderByNumber(block.getHeader().getBlockNumber() - 1 );
if(myPrevious != null){
if(block.getHeader().previousBlock().equals(myPrevious.hash())){
chosen = block;
break;
}
}
}
if(chosen == null){
Random rand = new Random(System.currentTimeMillis());
chosen = papabili.get(rand.nextInt(papabili.size()));
}
return chosen;
}
private BlockHeader localGetBlockHeaderByNumber(long blockNumber){
return database.getBlockHeaderByNumber(blockNumber);
}
private Block askBlockByHashToThickNode(InetSocketAddress nodeToAsk, HashValue hash){
BlockRequestMessage request = new BlockRequestMessage(hash);
return sendBlockRequest(nodeToAsk, request);
}
private Block sendBlockRequest(InetSocketAddress nodeToAsk, BlockRequestMessage request){
log.verboseDebug("Sending Block request to "+nodeToAsk+".");
try (Socket sock = new Socket(nodeToAsk.getAddress(), nodeToAsk.getPort())){
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(sock.getOutputStream());
} catch (IOException e) {
log.verboseDebug("Cannot establish connection output stream. Returning.");
closeIfNotNull(sock);
return null;
}
try {
oos.writeObject(request);
} catch (IOException e) {
log.verboseDebug("Cannot send request. Returning.");
closeIfNotNull(sock);
return null;
}
try {
sock.setSoTimeout(BlockChainConstants.SOCKETS_MILLISECONDS_TIMEOUT);
} catch (SocketException e1) {
log.exception(e1);
closeIfNotNull(sock);
return null;
}
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(sock.getInputStream());
} catch (IOException e) {
log.verboseDebug("Cannot establish connection input stream. Returning.");
closeIfNotNull(sock);
return null;
}
Object obj = null;
try {
obj = ois.readObject();
} catch (ClassNotFoundException e) {
log.exception(e);
closeIfNotNull(sock);
return null;
} catch (IOException e) {
log.verboseDebug("Cannot receive reply. Returning.");
closeIfNotNull(sock);
return null;
}
try {
sock.close();
} catch (IOException e) {
log.exception(e);
}
Block block = null;
if(obj instanceof BlockRequestMessage){
BlockRequestMessage reply = (BlockRequestMessage) obj;
if(reply.isPositiveReply()){
block = reply.getBlock();
}
}
return block;
} catch (Exception e) {
log.exception(e);
for(Throwable t : e.getSuppressed()){
log.exception(t);
}
return null;
}
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getLastBlockHeaderInChain()
*/
@Override
public BlockHeader getLastBlockHeaderInChain() {
// we should ask it to the thick nodes
Block lastBlock = retrieveLastBlockInChain();
if(lastBlock == null){
return null;
}
return lastBlock.getHeader();
}
private Block retrieveLastBlockInChain(){
ArrayList<InetSocketAddress> nodesToAsk = getNodesToAsk();
ArrayList<Block> blocks = new ArrayList<Block>();
for(InetSocketAddress node : nodesToAsk){
Block received = askLastBlockInChain(node);
if(received != null){
blocks.add(received);
}
}
Block block = selectTheGoodBlock(blocks);
if(block != null){
try {
database.storeOnDataBase(block);
} catch (SQLIntegrityConstraintViolationException e) {
log.verboseDebug("Retrieved block is possibly duplicated.");
BlockHeader header = database.getLastBlockHeaderInChain(false);
if(!block.getHeader().equals(header)){
return null;
}
}
}
return block;
}
private Block askLastBlockInChain(InetSocketAddress nodeToAsk){
BlockRequestMessage request = new BlockRequestMessage();
return sendBlockRequest(nodeToAsk, request);
}
private BlockHeader localGetLastBlockHeaderInChain(){
return database.getLastBlockHeaderInChain(false);
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getLastBlockInChain()
*/
@Override
public Block getLastBlockInChain() {
BlockHeader lastHeader = localGetLastBlockHeaderInChain();
Block lastBlock = null;
if(lastHeader != null){
lastBlock = localGetBlockFromHash(lastHeader.hash());
}
if(lastBlock == null){
lastBlock = retrieveLastBlockInChain();
}
return lastBlock;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getBlockHeaderByNumber(long)
*/
@Override
public BlockHeader getBlockHeaderByNumber(long blockNumber) {
BlockHeader header = localGetBlockHeaderByNumber(blockNumber);
if(header == null){
ArrayList<InetSocketAddress> nodesToAsk = getNodesToAsk();
ArrayList<Block> blocks = new ArrayList<Block>();
for(InetSocketAddress node : nodesToAsk){
Block received = askBlockByBumberToThickNode(node, blockNumber);
if(received != null){
blocks.add(received);
}
}
Block block = selectTheGoodBlock(blocks);
if(block != null){
try {
database.storeOnDataBase(block);
} catch (SQLIntegrityConstraintViolationException e) {
log.verboseDebug("Retrieved block is possibly duplicated.");
return null;
}
header = block.getHeader();
}
}
return header;
}
private Block askBlockByBumberToThickNode(InetSocketAddress nodeToAsk, long blockNumber){
BlockRequestMessage request = new BlockRequestMessage(blockNumber);
return sendBlockRequest(nodeToAsk, request);
}
/* (non-Javadoc)
* @see raw.blockChain.services.thinNode.ThinNode#stopService()
*/
@Override
public boolean stopService() {
log.verboseDebug("Stop issued.");
database.close();
running = false;
int thickNodesSize;
synchronized (thickNodes) {
thickNodesSize = thickNodes.size();
}
if(thickNodesSize > 0){
log.verboseDebug("Saving thick nodes addresses");
ThickNodeAddressBookFile addressBook;
synchronized (thickNodes) {
addressBook = new ThickNodeAddressBookFile(thickNodes);
}
addressBook.writeToFile();
}
return true;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#checkTransactionInBlockByHeader(raw.blockChain.api.BlockHeader, raw.blockChain.api.Transaction)
*/
@Override
public boolean checkTransactionInBlockByHeader(BlockHeader blockHeader, Transaction transaction) {
BlockCompactRepresentation compact = null;
try {
compact = database.getBlockCompatRepresentation(blockHeader, transaction);
} catch (SQLException e) {
log.exception(e);
}
if(compact == null){
BlockCompactRepresentationRequestMessage request = new BlockCompactRepresentationRequestMessage(blockHeader, transaction);
ArrayList<InetSocketAddress> nodes = getNodesToAsk();
ArrayList<BlockCompactRepresentation> gotCompacts = new ArrayList<BlockCompactRepresentation>();
for(InetSocketAddress node : nodes){
BlockCompactRepresentation retrieved = askBlockCompactRepresentation(node, request);
if(retrieved != null){
gotCompacts.add(retrieved);
}
}
if(gotCompacts.size() == 0){
return false;
}
compact = selectGoodBlockCompactRepresentation(gotCompacts);
}
Merkler merkler = new DefaultMerkler(1);
HashValue root = null;
try {
root = merkler.getMerkleRootByIntermediate(transaction, compact.getIntermediateValues());
} catch (TransactionNotPresentException e) {
log.exception(e);
}
return compact.getHeader().merkleRoot().equals(root);
}
private BlockCompactRepresentation selectGoodBlockCompactRepresentation(ArrayList<BlockCompactRepresentation> blocks){
HashMap<BlockCompactRepresentation, Integer> freqs = new HashMap<BlockCompactRepresentation, Integer>();
for(BlockCompactRepresentation compact : blocks){
if(!freqs.containsKey(compact)){
int freq = Collections.frequency(blocks, compact);
freqs.put(compact, new Integer(freq));
}
}
ArrayList<Map.Entry<BlockCompactRepresentation, Integer>> freqsList = new ArrayList<Map.Entry<BlockCompactRepresentation, Integer>>(freqs.entrySet());
Collections.sort(freqsList, new Comparator<Map.Entry<BlockCompactRepresentation, Integer>>() {
@Override
public int compare(Entry<BlockCompactRepresentation, Integer> o1,
Entry<BlockCompactRepresentation, Integer> o2) {
int a = o1.getValue().intValue();
int b = o2.getValue().intValue();
return a - b;
}
});
ArrayList<BlockCompactRepresentation> papabili = new ArrayList<BlockCompactRepresentation>();
int maxValue = -1;
for(Map.Entry<BlockCompactRepresentation, Integer> item : freqsList){
if(item.getValue().intValue() >= maxValue){
maxValue = item.getValue().intValue();
papabili.add(item.getKey());
} else {
break;
}
}
if(papabili.size() == 1){
return papabili.get(0);
}
Random rand = new Random(System.currentTimeMillis());
return papabili.get(rand.nextInt(papabili.size()));
}
private BlockCompactRepresentation askBlockCompactRepresentation(InetSocketAddress nodeAddress, BlockCompactRepresentationRequestMessage requestMessage){
Object obj = null;
log.verboseDebug("Ready to open socket.");
try (Socket sock = new Socket(nodeAddress.getAddress(), nodeAddress.getPort())){
log.verboseDebug("Sending request to "+nodeAddress);
ObjectOutputStream oos = new ObjectOutputStream(sock.getOutputStream());
oos.writeObject(requestMessage);
sock.setSoTimeout(BlockChainConstants.SOCKETS_MILLISECONDS_TIMEOUT);
ObjectInputStream ois = new ObjectInputStream(sock.getInputStream());
obj = ois.readObject();
log.verboseDebug("Read object reply");
} catch (Exception e) {
log.exception(e);
for(Throwable t : e.getSuppressed()){
log.exception(t);
}
}
BlockCompactRepresentation retrieved = null;
if(obj instanceof BlockCompactRepresentationRequestMessage){
log.verboseDebug("Replyed object is a BlockCompactRepresentationRequestMessage.");
BlockCompactRepresentationRequestMessage reply = (BlockCompactRepresentationRequestMessage) obj;
if(reply.isPositiveReply()){
retrieved = reply.getBlockCompactRepresentation();
log.verboseDebug("Compact representation retrieved.");
}
}
return retrieved;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#checkTransactionInBlockByHeaderHash(raw.blockChain.api.HashValue, raw.blockChain.api.Transaction)
*/
@Override
public boolean checkTransactionInBlockByHeaderHash(HashValue headerHash, Transaction transaction) {
BlockHeader header = database.getBlockHeaderByHash(headerHash);
BlockCompactRepresentation compact = null;
if(header != null){
try {
database.getBlockCompatRepresentation(header, transaction);
} catch (SQLException e) {
log.exception(e);
}
}
if(compact == null){
BlockCompactRepresentationRequestMessage request = new BlockCompactRepresentationRequestMessage(headerHash, transaction);
ArrayList<InetSocketAddress> nodes = getNodesToAsk();
ArrayList<BlockCompactRepresentation> gotCompacts = new ArrayList<BlockCompactRepresentation>();
for(InetSocketAddress node : nodes){
BlockCompactRepresentation retrieved = askBlockCompactRepresentation(node, request);
if(retrieved != null){
gotCompacts.add(retrieved);
}
}
if(gotCompacts.size() == 0){
return false;
}
compact = selectGoodBlockCompactRepresentation(gotCompacts);
}
Merkler merkler = new DefaultMerkler(1);
HashValue root = null;
try {
root = merkler.getMerkleRootByIntermediate(transaction, compact.getIntermediateValues());
} catch (TransactionNotPresentException e) {
log.exception(e);
}
return compact.getHeader().merkleRoot().equals(root);
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#checkTransactionInBlockByBlockNumber(long, raw.blockChain.api.Transaction)
*/
@Override
public boolean checkTransactionInBlockByBlockNumber(long blockNumber, Transaction transaction) {
log.verboseDebug("Checking: "+transaction+" is in block #"+blockNumber+"?");
// BlockHeader header = localGetBlockHeaderByNumber(blockNumber);
BlockHeader header = getBlockHeaderByNumber(blockNumber);
log.verboseDebug("Header: "+header);
BlockCompactRepresentation compact = null;
if(header != null){
try {
database.getBlockCompatRepresentation(header, transaction);
} catch (SQLException e) {
log.exception(e);
}
}
if(compact == null){
log.verboseDebug("No db-stored compact representation for "+transaction+" in "+header);
BlockCompactRepresentationRequestMessage request = new BlockCompactRepresentationRequestMessage(blockNumber, transaction);
ArrayList<InetSocketAddress> nodes = getNodesToAsk();
ArrayList<BlockCompactRepresentation> gotCompacts = new ArrayList<BlockCompactRepresentation>();
for(InetSocketAddress node : nodes){
BlockCompactRepresentation retrieved = askBlockCompactRepresentation(node, request);
log.verboseDebug("Completed request to "+node);
if(retrieved != null){
log.verboseDebug("Compact representation retrieved from "+node);
gotCompacts.add(retrieved);
}
}
if(gotCompacts.size() == 0){
return false;
}
compact = selectGoodBlockCompactRepresentation(gotCompacts);
log.verboseDebug("Selected final compact representation!");
}
Merkler merkler = new DefaultMerkler(1);
HashValue root = null;
try {
root = merkler.getMerkleRootByIntermediate(transaction, compact.getIntermediateValues());
} catch (TransactionNotPresentException e) {
log.exception(e);
}
boolean result = compact.getHeader().merkleRoot().equals(root);
// return compact.getHeader().merkleRoot().equals(root);
log.verboseDebug("Check result = "+result+" . (Computated root:"+root+" VS actual root:"+compact.getHeader().merkleRoot()+")");
return result;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#submitTransaction(raw.blockChain.api.Transaction)
*/
@Override
public void submitTransaction(Transaction transaction) {
ArrayList<InetSocketAddress> allThickNodes = new ArrayList<InetSocketAddress>();
synchronized (thickNodes) {
allThickNodes.addAll(thickNodes);
}
synchronized (unresponsiveThickNodes) {
allThickNodes.addAll(unresponsiveThickNodes);
}
SubmitTransactionMessage submission = new SubmitTransactionMessage(myAddress, transaction);
for(InetSocketAddress node : allThickNodes){
Socket sock = null;
try {
sock = new Socket(node.getAddress(), node.getPort());
} catch (IOException e) {
log.exception(e);
return;
}
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(sock.getOutputStream());
} catch (IOException e) {
log.exception(e);
closeIfNotNull(sock);
return;
}
try {
oos.writeObject(submission);
} catch (IOException e) {
log.exception(e);
closeIfNotNull(sock);
return;
}
try {
sock.close();
} catch (IOException e) {
log.exception(e);
return;
}
}
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#isUp()
*/
@Override
public boolean isUp() {
return initialized;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getThickNodesList()
*/
@Override
public ArrayList<InetSocketAddress> getThickNodesList() {
ArrayList<InetSocketAddress> copy;
synchronized (thickNodes) {
copy = new ArrayList<>(thickNodes);
}
return copy;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#getNodeAddress()
*/
@Override
public InetSocketAddress getNodeAddress() {
return myAddress;
}
/* (non-Javadoc)
* @see raw.blockChain.services.CommonNode#transcationLastOccurrence(raw.blockChain.api.Transaction)
*/
@Override
public long transcationLastOccurrence(Transaction transaction) {
ArrayList<Long> numbers = new ArrayList<Long>();
List<InetSocketAddress> thickNodesCopy;
synchronized (thickNodes) {
thickNodesCopy = ImmutableList.copyOf(thickNodes);
}
for(InetSocketAddress node : thickNodesCopy){
numbers.add(askTransactionLastOccurrence(node, transaction));
}
Collections.sort(numbers);
long current = -1;
int count = 0;
long max = -1;
int maxCount = 0;
for(Long number : numbers){
if(number != current){
count = 1;
current = number;
} else {
count += 1;
}
if(count > maxCount){
max = current;
maxCount = count;
}
}
return max;
}
private long askTransactionLastOccurrence(InetSocketAddress node, Transaction transaction){
TransactionBlockNumberMessage reply = null;
try (Socket sock = new Socket(node.getAddress(), node.getPort());){
sock.setSoTimeout(BlockChainConstants.SOCKETS_MILLISECONDS_TIMEOUT);
ObjectOutputStream oos = new ObjectOutputStream(sock.getOutputStream());
TransactionBlockNumberMessage request = new TransactionBlockNumberMessage(transaction);
oos.writeObject(request);
ObjectInputStream ois = new ObjectInputStream(sock.getInputStream());
Object received = ois.readObject();
if(received instanceof TransactionBlockNumberMessage){
TransactionBlockNumberMessage converted = (TransactionBlockNumberMessage) received;
if(!converted.isRequest()){
reply = converted;
}
}
} catch (Exception e) {
log.exception(e);
for(Throwable t : e.getSuppressed()){
log.exception(t);
}
}
if(reply == null){
return -1;
}
return reply.getBlockNumber();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.collect.ImmutableList;
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
import org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.CompressionFactory.LongEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.indexing.TuningConfig;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
public class IndexTaskSerdeTest
{
private static final ObjectMapper MAPPER = new DefaultObjectMapper();
@Rule
public ExpectedException expectedException = ExpectedException.none();
@BeforeClass
public static void setup()
{
MAPPER.registerSubtypes(new NamedType(IndexTuningConfig.class, "index"));
}
@Test
public void testSerdeTuningConfigWithDynamicPartitionsSpec() throws IOException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
null,
100,
2000L,
null,
null,
null,
null,
null,
new DynamicPartitionsSpec(1000, 2000L),
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
false,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
1234
);
assertSerdeTuningConfig(tuningConfig);
}
@Test
public void testSerdeTuningConfigWithHashedPartitionsSpec() throws IOException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
null,
100,
2000L,
null,
null,
null,
null,
null,
new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")),
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
true,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
null
);
assertSerdeTuningConfig(tuningConfig);
}
@Test
public void testSerdeTuningConfigWithDeprecatedDynamicPartitionsSpec() throws IOException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
1000,
null,
100,
2000L,
null,
3000L,
null,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
false,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
null
);
assertSerdeTuningConfig(tuningConfig);
}
@Test
public void testSerdeTuningConfigWithDeprecatedHashedPartitionsSpec() throws IOException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
null,
100,
2000L,
null,
null,
null,
10,
ImmutableList.of("dim1", "dim2"),
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
false,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
1234
);
assertSerdeTuningConfig(tuningConfig);
}
@Test
public void testForceGuaranteedRollupWithDynamicPartitionsSpec()
{
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec cannot be used for perfect rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
null,
100,
2000L,
null,
null,
null,
null,
null,
new DynamicPartitionsSpec(1000, 2000L),
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
true,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
null
);
}
@Test
public void testBestEffortRollupWithHashedPartitionsSpec()
{
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("DynamicPartitionsSpec must be used for best-effort rollup");
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
null,
100,
2000L,
null,
null,
null,
null,
null,
new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")),
new IndexSpec(
new RoaringBitmapSerdeFactory(false),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
null,
null,
false,
null,
null,
100L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
true,
10,
100,
null
);
}
private static void assertSerdeTuningConfig(IndexTuningConfig tuningConfig) throws IOException
{
final byte[] json = MAPPER.writeValueAsBytes(tuningConfig);
final IndexTuningConfig fromJson = (IndexTuningConfig) MAPPER.readValue(json, TuningConfig.class);
Assert.assertEquals(tuningConfig, fromJson);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.costexplorer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The aggregated numbers for your reservation usage.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/ReservationAggregates" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ReservationAggregates implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The percentage of reservation time that you used.
* </p>
*/
private String utilizationPercentage;
/**
* <p>
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units. Normalized units
* are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*/
private String utilizationPercentageInUnits;
/**
* <p>
* How many reservation hours that you purchased.
* </p>
*/
private String purchasedHours;
/**
* <p>
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*/
private String purchasedUnits;
/**
* <p>
* The total number of reservation hours that you used.
* </p>
*/
private String totalActualHours;
/**
* <p>
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*/
private String totalActualUnits;
/**
* <p>
* The number of reservation hours that you didn't use.
* </p>
*/
private String unusedHours;
/**
* <p>
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*/
private String unusedUnits;
/**
* <p>
* How much your reservation costs if charged On-Demand rates.
* </p>
*/
private String onDemandCostOfRIHoursUsed;
/**
* <p>
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
* </p>
*/
private String netRISavings;
/**
* <p>
* How much you might save if you use your entire reservation.
* </p>
*/
private String totalPotentialRISavings;
/**
* <p>
* The upfront cost of your reservation. It's amortized over the reservation period.
* </p>
*/
private String amortizedUpfrontFee;
/**
* <p>
* The monthly cost of your reservation. It's amortized over the reservation period.
* </p>
*/
private String amortizedRecurringFee;
/**
* <p>
* The total cost of your reservation. It's amortized over the reservation period.
* </p>
*/
private String totalAmortizedFee;
/**
* <p>
* The cost of unused hours for your reservation.
* </p>
*/
private String rICostForUnusedHours;
/**
* <p>
* The realized savings because of purchasing and using a reservation.
* </p>
*/
private String realizedSavings;
/**
* <p>
* The unrealized savings because of purchasing and using a reservation.
* </p>
*/
private String unrealizedSavings;
/**
* <p>
* The percentage of reservation time that you used.
* </p>
*
* @param utilizationPercentage
* The percentage of reservation time that you used.
*/
public void setUtilizationPercentage(String utilizationPercentage) {
this.utilizationPercentage = utilizationPercentage;
}
/**
* <p>
* The percentage of reservation time that you used.
* </p>
*
* @return The percentage of reservation time that you used.
*/
public String getUtilizationPercentage() {
return this.utilizationPercentage;
}
/**
* <p>
* The percentage of reservation time that you used.
* </p>
*
* @param utilizationPercentage
* The percentage of reservation time that you used.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withUtilizationPercentage(String utilizationPercentage) {
setUtilizationPercentage(utilizationPercentage);
return this;
}
/**
* <p>
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units. Normalized units
* are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param utilizationPercentageInUnits
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public void setUtilizationPercentageInUnits(String utilizationPercentageInUnits) {
this.utilizationPercentageInUnits = utilizationPercentageInUnits;
}
/**
* <p>
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units. Normalized units
* are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @return The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public String getUtilizationPercentageInUnits() {
return this.utilizationPercentageInUnits;
}
/**
* <p>
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units. Normalized units
* are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param utilizationPercentageInUnits
* The percentage of Amazon EC2 reservation time that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withUtilizationPercentageInUnits(String utilizationPercentageInUnits) {
setUtilizationPercentageInUnits(utilizationPercentageInUnits);
return this;
}
/**
* <p>
* How many reservation hours that you purchased.
* </p>
*
* @param purchasedHours
* How many reservation hours that you purchased.
*/
public void setPurchasedHours(String purchasedHours) {
this.purchasedHours = purchasedHours;
}
/**
* <p>
* How many reservation hours that you purchased.
* </p>
*
* @return How many reservation hours that you purchased.
*/
public String getPurchasedHours() {
return this.purchasedHours;
}
/**
* <p>
* How many reservation hours that you purchased.
* </p>
*
* @param purchasedHours
* How many reservation hours that you purchased.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withPurchasedHours(String purchasedHours) {
setPurchasedHours(purchasedHours);
return this;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param purchasedUnits
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public void setPurchasedUnits(String purchasedUnits) {
this.purchasedUnits = purchasedUnits;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @return The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public String getPurchasedUnits() {
return this.purchasedUnits;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param purchasedUnits
* The number of Amazon EC2 reservation hours that you purchased. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withPurchasedUnits(String purchasedUnits) {
setPurchasedUnits(purchasedUnits);
return this;
}
/**
* <p>
* The total number of reservation hours that you used.
* </p>
*
* @param totalActualHours
* The total number of reservation hours that you used.
*/
public void setTotalActualHours(String totalActualHours) {
this.totalActualHours = totalActualHours;
}
/**
* <p>
* The total number of reservation hours that you used.
* </p>
*
* @return The total number of reservation hours that you used.
*/
public String getTotalActualHours() {
return this.totalActualHours;
}
/**
* <p>
* The total number of reservation hours that you used.
* </p>
*
* @param totalActualHours
* The total number of reservation hours that you used.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withTotalActualHours(String totalActualHours) {
setTotalActualHours(totalActualHours);
return this;
}
/**
* <p>
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param totalActualUnits
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public void setTotalActualUnits(String totalActualUnits) {
this.totalActualUnits = totalActualUnits;
}
/**
* <p>
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @return The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public String getTotalActualUnits() {
return this.totalActualUnits;
}
/**
* <p>
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param totalActualUnits
* The total number of Amazon EC2 reservation hours that you used. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withTotalActualUnits(String totalActualUnits) {
setTotalActualUnits(totalActualUnits);
return this;
}
/**
* <p>
* The number of reservation hours that you didn't use.
* </p>
*
* @param unusedHours
* The number of reservation hours that you didn't use.
*/
public void setUnusedHours(String unusedHours) {
this.unusedHours = unusedHours;
}
/**
* <p>
* The number of reservation hours that you didn't use.
* </p>
*
* @return The number of reservation hours that you didn't use.
*/
public String getUnusedHours() {
return this.unusedHours;
}
/**
* <p>
* The number of reservation hours that you didn't use.
* </p>
*
* @param unusedHours
* The number of reservation hours that you didn't use.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withUnusedHours(String unusedHours) {
setUnusedHours(unusedHours);
return this;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param unusedUnits
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public void setUnusedUnits(String unusedUnits) {
this.unusedUnits = unusedUnits;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @return The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
*/
public String getUnusedUnits() {
return this.unusedUnits;
}
/**
* <p>
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units. Normalized
* units are available only for Amazon EC2 usage after November 11, 2017.
* </p>
*
* @param unusedUnits
* The number of Amazon EC2 reservation hours that you didn't use. It's converted to normalized units.
* Normalized units are available only for Amazon EC2 usage after November 11, 2017.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withUnusedUnits(String unusedUnits) {
setUnusedUnits(unusedUnits);
return this;
}
/**
* <p>
* How much your reservation costs if charged On-Demand rates.
* </p>
*
* @param onDemandCostOfRIHoursUsed
* How much your reservation costs if charged On-Demand rates.
*/
public void setOnDemandCostOfRIHoursUsed(String onDemandCostOfRIHoursUsed) {
this.onDemandCostOfRIHoursUsed = onDemandCostOfRIHoursUsed;
}
/**
* <p>
* How much your reservation costs if charged On-Demand rates.
* </p>
*
* @return How much your reservation costs if charged On-Demand rates.
*/
public String getOnDemandCostOfRIHoursUsed() {
return this.onDemandCostOfRIHoursUsed;
}
/**
* <p>
* How much your reservation costs if charged On-Demand rates.
* </p>
*
* @param onDemandCostOfRIHoursUsed
* How much your reservation costs if charged On-Demand rates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withOnDemandCostOfRIHoursUsed(String onDemandCostOfRIHoursUsed) {
setOnDemandCostOfRIHoursUsed(onDemandCostOfRIHoursUsed);
return this;
}
/**
* <p>
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
* </p>
*
* @param netRISavings
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
*/
public void setNetRISavings(String netRISavings) {
this.netRISavings = netRISavings;
}
/**
* <p>
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
* </p>
*
* @return How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
*/
public String getNetRISavings() {
return this.netRISavings;
}
/**
* <p>
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
* </p>
*
* @param netRISavings
* How much you saved due to purchasing and utilizing reservation. Amazon Web Services calculates this by
* subtracting <code>TotalAmortizedFee</code> from <code>OnDemandCostOfRIHoursUsed</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withNetRISavings(String netRISavings) {
setNetRISavings(netRISavings);
return this;
}
/**
* <p>
* How much you might save if you use your entire reservation.
* </p>
*
* @param totalPotentialRISavings
* How much you might save if you use your entire reservation.
*/
public void setTotalPotentialRISavings(String totalPotentialRISavings) {
this.totalPotentialRISavings = totalPotentialRISavings;
}
/**
* <p>
* How much you might save if you use your entire reservation.
* </p>
*
* @return How much you might save if you use your entire reservation.
*/
public String getTotalPotentialRISavings() {
return this.totalPotentialRISavings;
}
/**
* <p>
* How much you might save if you use your entire reservation.
* </p>
*
* @param totalPotentialRISavings
* How much you might save if you use your entire reservation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withTotalPotentialRISavings(String totalPotentialRISavings) {
setTotalPotentialRISavings(totalPotentialRISavings);
return this;
}
/**
* <p>
* The upfront cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param amortizedUpfrontFee
* The upfront cost of your reservation. It's amortized over the reservation period.
*/
public void setAmortizedUpfrontFee(String amortizedUpfrontFee) {
this.amortizedUpfrontFee = amortizedUpfrontFee;
}
/**
* <p>
* The upfront cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @return The upfront cost of your reservation. It's amortized over the reservation period.
*/
public String getAmortizedUpfrontFee() {
return this.amortizedUpfrontFee;
}
/**
* <p>
* The upfront cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param amortizedUpfrontFee
* The upfront cost of your reservation. It's amortized over the reservation period.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withAmortizedUpfrontFee(String amortizedUpfrontFee) {
setAmortizedUpfrontFee(amortizedUpfrontFee);
return this;
}
/**
* <p>
* The monthly cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param amortizedRecurringFee
* The monthly cost of your reservation. It's amortized over the reservation period.
*/
public void setAmortizedRecurringFee(String amortizedRecurringFee) {
this.amortizedRecurringFee = amortizedRecurringFee;
}
/**
* <p>
* The monthly cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @return The monthly cost of your reservation. It's amortized over the reservation period.
*/
public String getAmortizedRecurringFee() {
return this.amortizedRecurringFee;
}
/**
* <p>
* The monthly cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param amortizedRecurringFee
* The monthly cost of your reservation. It's amortized over the reservation period.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withAmortizedRecurringFee(String amortizedRecurringFee) {
setAmortizedRecurringFee(amortizedRecurringFee);
return this;
}
/**
* <p>
* The total cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param totalAmortizedFee
* The total cost of your reservation. It's amortized over the reservation period.
*/
public void setTotalAmortizedFee(String totalAmortizedFee) {
this.totalAmortizedFee = totalAmortizedFee;
}
/**
* <p>
* The total cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @return The total cost of your reservation. It's amortized over the reservation period.
*/
public String getTotalAmortizedFee() {
return this.totalAmortizedFee;
}
/**
* <p>
* The total cost of your reservation. It's amortized over the reservation period.
* </p>
*
* @param totalAmortizedFee
* The total cost of your reservation. It's amortized over the reservation period.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withTotalAmortizedFee(String totalAmortizedFee) {
setTotalAmortizedFee(totalAmortizedFee);
return this;
}
/**
* <p>
* The cost of unused hours for your reservation.
* </p>
*
* @param rICostForUnusedHours
* The cost of unused hours for your reservation.
*/
public void setRICostForUnusedHours(String rICostForUnusedHours) {
this.rICostForUnusedHours = rICostForUnusedHours;
}
/**
* <p>
* The cost of unused hours for your reservation.
* </p>
*
* @return The cost of unused hours for your reservation.
*/
public String getRICostForUnusedHours() {
return this.rICostForUnusedHours;
}
/**
* <p>
* The cost of unused hours for your reservation.
* </p>
*
* @param rICostForUnusedHours
* The cost of unused hours for your reservation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withRICostForUnusedHours(String rICostForUnusedHours) {
setRICostForUnusedHours(rICostForUnusedHours);
return this;
}
/**
* <p>
* The realized savings because of purchasing and using a reservation.
* </p>
*
* @param realizedSavings
* The realized savings because of purchasing and using a reservation.
*/
public void setRealizedSavings(String realizedSavings) {
this.realizedSavings = realizedSavings;
}
/**
* <p>
* The realized savings because of purchasing and using a reservation.
* </p>
*
* @return The realized savings because of purchasing and using a reservation.
*/
public String getRealizedSavings() {
return this.realizedSavings;
}
/**
* <p>
* The realized savings because of purchasing and using a reservation.
* </p>
*
* @param realizedSavings
* The realized savings because of purchasing and using a reservation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withRealizedSavings(String realizedSavings) {
setRealizedSavings(realizedSavings);
return this;
}
/**
* <p>
* The unrealized savings because of purchasing and using a reservation.
* </p>
*
* @param unrealizedSavings
* The unrealized savings because of purchasing and using a reservation.
*/
public void setUnrealizedSavings(String unrealizedSavings) {
this.unrealizedSavings = unrealizedSavings;
}
/**
* <p>
* The unrealized savings because of purchasing and using a reservation.
* </p>
*
* @return The unrealized savings because of purchasing and using a reservation.
*/
public String getUnrealizedSavings() {
return this.unrealizedSavings;
}
/**
* <p>
* The unrealized savings because of purchasing and using a reservation.
* </p>
*
* @param unrealizedSavings
* The unrealized savings because of purchasing and using a reservation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ReservationAggregates withUnrealizedSavings(String unrealizedSavings) {
setUnrealizedSavings(unrealizedSavings);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUtilizationPercentage() != null)
sb.append("UtilizationPercentage: ").append(getUtilizationPercentage()).append(",");
if (getUtilizationPercentageInUnits() != null)
sb.append("UtilizationPercentageInUnits: ").append(getUtilizationPercentageInUnits()).append(",");
if (getPurchasedHours() != null)
sb.append("PurchasedHours: ").append(getPurchasedHours()).append(",");
if (getPurchasedUnits() != null)
sb.append("PurchasedUnits: ").append(getPurchasedUnits()).append(",");
if (getTotalActualHours() != null)
sb.append("TotalActualHours: ").append(getTotalActualHours()).append(",");
if (getTotalActualUnits() != null)
sb.append("TotalActualUnits: ").append(getTotalActualUnits()).append(",");
if (getUnusedHours() != null)
sb.append("UnusedHours: ").append(getUnusedHours()).append(",");
if (getUnusedUnits() != null)
sb.append("UnusedUnits: ").append(getUnusedUnits()).append(",");
if (getOnDemandCostOfRIHoursUsed() != null)
sb.append("OnDemandCostOfRIHoursUsed: ").append(getOnDemandCostOfRIHoursUsed()).append(",");
if (getNetRISavings() != null)
sb.append("NetRISavings: ").append(getNetRISavings()).append(",");
if (getTotalPotentialRISavings() != null)
sb.append("TotalPotentialRISavings: ").append(getTotalPotentialRISavings()).append(",");
if (getAmortizedUpfrontFee() != null)
sb.append("AmortizedUpfrontFee: ").append(getAmortizedUpfrontFee()).append(",");
if (getAmortizedRecurringFee() != null)
sb.append("AmortizedRecurringFee: ").append(getAmortizedRecurringFee()).append(",");
if (getTotalAmortizedFee() != null)
sb.append("TotalAmortizedFee: ").append(getTotalAmortizedFee()).append(",");
if (getRICostForUnusedHours() != null)
sb.append("RICostForUnusedHours: ").append(getRICostForUnusedHours()).append(",");
if (getRealizedSavings() != null)
sb.append("RealizedSavings: ").append(getRealizedSavings()).append(",");
if (getUnrealizedSavings() != null)
sb.append("UnrealizedSavings: ").append(getUnrealizedSavings());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ReservationAggregates == false)
return false;
ReservationAggregates other = (ReservationAggregates) obj;
if (other.getUtilizationPercentage() == null ^ this.getUtilizationPercentage() == null)
return false;
if (other.getUtilizationPercentage() != null && other.getUtilizationPercentage().equals(this.getUtilizationPercentage()) == false)
return false;
if (other.getUtilizationPercentageInUnits() == null ^ this.getUtilizationPercentageInUnits() == null)
return false;
if (other.getUtilizationPercentageInUnits() != null && other.getUtilizationPercentageInUnits().equals(this.getUtilizationPercentageInUnits()) == false)
return false;
if (other.getPurchasedHours() == null ^ this.getPurchasedHours() == null)
return false;
if (other.getPurchasedHours() != null && other.getPurchasedHours().equals(this.getPurchasedHours()) == false)
return false;
if (other.getPurchasedUnits() == null ^ this.getPurchasedUnits() == null)
return false;
if (other.getPurchasedUnits() != null && other.getPurchasedUnits().equals(this.getPurchasedUnits()) == false)
return false;
if (other.getTotalActualHours() == null ^ this.getTotalActualHours() == null)
return false;
if (other.getTotalActualHours() != null && other.getTotalActualHours().equals(this.getTotalActualHours()) == false)
return false;
if (other.getTotalActualUnits() == null ^ this.getTotalActualUnits() == null)
return false;
if (other.getTotalActualUnits() != null && other.getTotalActualUnits().equals(this.getTotalActualUnits()) == false)
return false;
if (other.getUnusedHours() == null ^ this.getUnusedHours() == null)
return false;
if (other.getUnusedHours() != null && other.getUnusedHours().equals(this.getUnusedHours()) == false)
return false;
if (other.getUnusedUnits() == null ^ this.getUnusedUnits() == null)
return false;
if (other.getUnusedUnits() != null && other.getUnusedUnits().equals(this.getUnusedUnits()) == false)
return false;
if (other.getOnDemandCostOfRIHoursUsed() == null ^ this.getOnDemandCostOfRIHoursUsed() == null)
return false;
if (other.getOnDemandCostOfRIHoursUsed() != null && other.getOnDemandCostOfRIHoursUsed().equals(this.getOnDemandCostOfRIHoursUsed()) == false)
return false;
if (other.getNetRISavings() == null ^ this.getNetRISavings() == null)
return false;
if (other.getNetRISavings() != null && other.getNetRISavings().equals(this.getNetRISavings()) == false)
return false;
if (other.getTotalPotentialRISavings() == null ^ this.getTotalPotentialRISavings() == null)
return false;
if (other.getTotalPotentialRISavings() != null && other.getTotalPotentialRISavings().equals(this.getTotalPotentialRISavings()) == false)
return false;
if (other.getAmortizedUpfrontFee() == null ^ this.getAmortizedUpfrontFee() == null)
return false;
if (other.getAmortizedUpfrontFee() != null && other.getAmortizedUpfrontFee().equals(this.getAmortizedUpfrontFee()) == false)
return false;
if (other.getAmortizedRecurringFee() == null ^ this.getAmortizedRecurringFee() == null)
return false;
if (other.getAmortizedRecurringFee() != null && other.getAmortizedRecurringFee().equals(this.getAmortizedRecurringFee()) == false)
return false;
if (other.getTotalAmortizedFee() == null ^ this.getTotalAmortizedFee() == null)
return false;
if (other.getTotalAmortizedFee() != null && other.getTotalAmortizedFee().equals(this.getTotalAmortizedFee()) == false)
return false;
if (other.getRICostForUnusedHours() == null ^ this.getRICostForUnusedHours() == null)
return false;
if (other.getRICostForUnusedHours() != null && other.getRICostForUnusedHours().equals(this.getRICostForUnusedHours()) == false)
return false;
if (other.getRealizedSavings() == null ^ this.getRealizedSavings() == null)
return false;
if (other.getRealizedSavings() != null && other.getRealizedSavings().equals(this.getRealizedSavings()) == false)
return false;
if (other.getUnrealizedSavings() == null ^ this.getUnrealizedSavings() == null)
return false;
if (other.getUnrealizedSavings() != null && other.getUnrealizedSavings().equals(this.getUnrealizedSavings()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUtilizationPercentage() == null) ? 0 : getUtilizationPercentage().hashCode());
hashCode = prime * hashCode + ((getUtilizationPercentageInUnits() == null) ? 0 : getUtilizationPercentageInUnits().hashCode());
hashCode = prime * hashCode + ((getPurchasedHours() == null) ? 0 : getPurchasedHours().hashCode());
hashCode = prime * hashCode + ((getPurchasedUnits() == null) ? 0 : getPurchasedUnits().hashCode());
hashCode = prime * hashCode + ((getTotalActualHours() == null) ? 0 : getTotalActualHours().hashCode());
hashCode = prime * hashCode + ((getTotalActualUnits() == null) ? 0 : getTotalActualUnits().hashCode());
hashCode = prime * hashCode + ((getUnusedHours() == null) ? 0 : getUnusedHours().hashCode());
hashCode = prime * hashCode + ((getUnusedUnits() == null) ? 0 : getUnusedUnits().hashCode());
hashCode = prime * hashCode + ((getOnDemandCostOfRIHoursUsed() == null) ? 0 : getOnDemandCostOfRIHoursUsed().hashCode());
hashCode = prime * hashCode + ((getNetRISavings() == null) ? 0 : getNetRISavings().hashCode());
hashCode = prime * hashCode + ((getTotalPotentialRISavings() == null) ? 0 : getTotalPotentialRISavings().hashCode());
hashCode = prime * hashCode + ((getAmortizedUpfrontFee() == null) ? 0 : getAmortizedUpfrontFee().hashCode());
hashCode = prime * hashCode + ((getAmortizedRecurringFee() == null) ? 0 : getAmortizedRecurringFee().hashCode());
hashCode = prime * hashCode + ((getTotalAmortizedFee() == null) ? 0 : getTotalAmortizedFee().hashCode());
hashCode = prime * hashCode + ((getRICostForUnusedHours() == null) ? 0 : getRICostForUnusedHours().hashCode());
hashCode = prime * hashCode + ((getRealizedSavings() == null) ? 0 : getRealizedSavings().hashCode());
hashCode = prime * hashCode + ((getUnrealizedSavings() == null) ? 0 : getUnrealizedSavings().hashCode());
return hashCode;
}
@Override
public ReservationAggregates clone() {
try {
return (ReservationAggregates) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.costexplorer.model.transform.ReservationAggregatesMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2015 Justin W. Flory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.justinwflory.withershot;
import org.bukkit.Bukkit;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.scheduler.BukkitTask;
import java.io.*;
import java.net.Proxy;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.UUID;
import java.util.logging.Level;
import java.util.zip.GZIPOutputStream;
public class MetricsLite {
/**
* The current revision number
*/
private final static int REVISION = 7;
/**
* The base url of the metrics domain
*/
private static final String BASE_URL = "http://report.mcstats.org";
/**
* The url used to report a server's status
*/
private static final String REPORT_URL = "/plugin/%s";
/**
* Interval of time to ping (in minutes)
*/
private final static int PING_INTERVAL = 15;
/**
* The plugin this metrics submits for
*/
private final Plugin plugin;
/**
* The plugin configuration file
*/
private final YamlConfiguration configuration;
/**
* The plugin configuration file
*/
private final File configurationFile;
/**
* Unique server id
*/
private final String guid;
/**
* Debug mode
*/
private final boolean debug;
/**
* Lock for synchronization
*/
private final Object optOutLock = new Object();
/**
* Id of the scheduled task
*/
private volatile BukkitTask task = null;
public MetricsLite(Plugin plugin) throws IOException {
if (plugin == null) {
throw new IllegalArgumentException("Plugin cannot be null");
}
this.plugin = plugin;
// load the config
configurationFile = getConfigFile();
configuration = YamlConfiguration.loadConfiguration(configurationFile);
// add some defaults
configuration.addDefault("opt-out", false);
configuration.addDefault("guid", UUID.randomUUID().toString());
configuration.addDefault("debug", false);
// Do we need to create the file?
if (configuration.get("guid", null) == null) {
configuration.options().header("http://mcstats.org").copyDefaults(true);
configuration.save(configurationFile);
}
// Load the guid then
guid = configuration.getString("guid");
debug = configuration.getBoolean("debug", false);
}
/**
* Start measuring statistics. This will immediately create an async repeating task as the plugin and send
* the initial data to the metrics backend, and then after that it will post in increments of
* PING_INTERVAL * 1200 ticks.
*
* @return True if statistics measuring is running, otherwise false.
*/
public boolean start() {
synchronized (optOutLock) {
// Did we opt out?
if (isOptOut()) {
return false;
}
// Is metrics already running?
if (task != null) {
return true;
}
// Begin hitting the server with glorious data
task = plugin.getServer().getScheduler().runTaskTimerAsynchronously(plugin, new Runnable() {
private boolean firstPost = true;
public void run() {
try {
// This has to be synchronized or it can collide with the disable method.
synchronized (optOutLock) {
// Disable Task, if it is running and the server owner decided to opt-out
if (isOptOut() && task != null) {
task.cancel();
task = null;
}
}
// We use the inverse of firstPost because if it is the first time we are posting,
// it is not a interval ping, so it evaluates to FALSE
// Each time thereafter it will evaluate to TRUE, i.e PING!
postPlugin(!firstPost);
// After the first post we set firstPost to false
// Each post thereafter will be a ping
firstPost = false;
} catch (IOException e) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + e.getMessage());
}
}
}
}, 0, PING_INTERVAL * 1200);
return true;
}
}
/**
* Has the server owner denied plugin metrics?
*
* @return true if metrics should be opted out of it
*/
public boolean isOptOut() {
synchronized (optOutLock) {
try {
// Reload the metrics file
configuration.load(getConfigFile());
} catch (IOException ex) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage());
}
return true;
} catch (InvalidConfigurationException ex) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage());
}
return true;
}
return configuration.getBoolean("opt-out", false);
}
}
/**
* Enables metrics for the server by setting "opt-out" to false in the config file and starting the metrics task.
*
* @throws java.io.IOException
*/
public void enable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (isOptOut()) {
configuration.set("opt-out", false);
configuration.save(configurationFile);
}
// Enable Task, if it is not running
if (task == null) {
start();
}
}
}
/**
* Disables metrics for the server by setting "opt-out" to true in the config file and canceling the metrics task.
*
* @throws java.io.IOException
*/
public void disable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (!isOptOut()) {
configuration.set("opt-out", true);
configuration.save(configurationFile);
}
// Disable Task, if it is running
if (task != null) {
task.cancel();
task = null;
}
}
}
/**
* Gets the File object of the config file that should be used to store data such as the GUID and opt-out status
*
* @return the File object for the config file
*/
public File getConfigFile() {
// I believe the easiest way to get the base folder (e.g craftbukkit set via -P) for plugins to use
// is to abuse the plugin object we already have
// plugin.getDataFolder() => base/plugins/PluginA/
// pluginsFolder => base/plugins/
// The base is not necessarily relative to the startup directory.
File pluginsFolder = plugin.getDataFolder().getParentFile();
// return => base/plugins/PluginMetrics/config.yml
return new File(new File(pluginsFolder, "PluginMetrics"), "config.yml");
}
/**
* Generic method that posts a plugin to the metrics website
*/
private void postPlugin(boolean isPing) throws IOException {
// Server software specific section
PluginDescriptionFile description = plugin.getDescription();
String pluginName = description.getName();
boolean onlineMode = Bukkit.getServer().getOnlineMode(); // TRUE if online mode is enabled
String pluginVersion = description.getVersion();
String serverVersion = Bukkit.getVersion();
int playersOnline = Bukkit.getServer().getOnlinePlayers().size();
// END server software specific section -- all code below does not use any code outside of this class / Java
// Construct the post data
StringBuilder json = new StringBuilder(1024);
json.append('{');
// The plugin's description file containg all of the plugin data such as name, version, author, etc
appendJSONPair(json, "guid", guid);
appendJSONPair(json, "plugin_version", pluginVersion);
appendJSONPair(json, "server_version", serverVersion);
appendJSONPair(json, "players_online", Integer.toString(playersOnline));
// New data as of R6
String osname = System.getProperty("os.name");
String osarch = System.getProperty("os.arch");
String osversion = System.getProperty("os.version");
String java_version = System.getProperty("java.version");
int coreCount = Runtime.getRuntime().availableProcessors();
// normalize os arch .. amd64 -> x86_64
if (osarch.equals("amd64")) {
osarch = "x86_64";
}
appendJSONPair(json, "osname", osname);
appendJSONPair(json, "osarch", osarch);
appendJSONPair(json, "osversion", osversion);
appendJSONPair(json, "cores", Integer.toString(coreCount));
appendJSONPair(json, "auth_mode", onlineMode ? "1" : "0");
appendJSONPair(json, "java_version", java_version);
// If we're pinging, append it
if (isPing) {
appendJSONPair(json, "ping", "1");
}
// close json
json.append('}');
// Create the url
URL url = new URL(BASE_URL + String.format(REPORT_URL, urlEncode(pluginName)));
// Connect to the website
URLConnection connection;
// Mineshafter creates a socks proxy, so we can safely bypass it
// It does not reroute POST requests so we need to go around it
if (isMineshafterPresent()) {
connection = url.openConnection(Proxy.NO_PROXY);
} else {
connection = url.openConnection();
}
byte[] uncompressed = json.toString().getBytes();
byte[] compressed = gzip(json.toString());
// Headers
connection.addRequestProperty("User-Agent", "MCStats/" + REVISION);
connection.addRequestProperty("Content-Type", "application/json");
connection.addRequestProperty("Content-Encoding", "gzip");
connection.addRequestProperty("Content-Length", Integer.toString(compressed.length));
connection.addRequestProperty("Accept", "application/json");
connection.addRequestProperty("Connection", "close");
connection.setDoOutput(true);
if (debug) {
System.out.println("[Metrics] Prepared request for " + pluginName + " uncompressed=" + uncompressed.length + " compressed=" + compressed.length);
}
// Write the data
OutputStream os = connection.getOutputStream();
os.write(compressed);
os.flush();
// Now read the response
final BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String response = reader.readLine();
// close resources
os.close();
reader.close();
if (response == null || response.startsWith("ERR") || response.startsWith("7")) {
if (response == null) {
response = "null";
} else if (response.startsWith("7")) {
response = response.substring(response.startsWith("7,") ? 2 : 1);
}
throw new IOException(response);
}
}
/**
* GZip compress a string of bytes
*
* @param input
* @return
*/
public static byte[] gzip(String input) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
GZIPOutputStream gzos = null;
try {
gzos = new GZIPOutputStream(baos);
gzos.write(input.getBytes("UTF-8"));
} catch (IOException e) {
e.printStackTrace();
} finally {
if (gzos != null) try {
gzos.close();
} catch (IOException ignore) {
}
}
return baos.toByteArray();
}
/**
* Check if mineshafter is present. If it is, we need to bypass it to send POST requests
*
* @return true if mineshafter is installed on the server
*/
private boolean isMineshafterPresent() {
try {
Class.forName("mineshafter.MineServer");
return true;
} catch (Exception e) {
return false;
}
}
/**
* Appends a json encoded key/value pair to the given string builder.
*
* @param json
* @param key
* @param value
* @throws UnsupportedEncodingException
*/
private static void appendJSONPair(StringBuilder json, String key, String value) throws UnsupportedEncodingException {
boolean isValueNumeric = false;
try {
if (value.equals("0") || !value.endsWith("0")) {
Double.parseDouble(value);
isValueNumeric = true;
}
} catch (NumberFormatException e) {
isValueNumeric = false;
}
if (json.charAt(json.length() - 1) != '{') {
json.append(',');
}
json.append(escapeJSON(key));
json.append(':');
if (isValueNumeric) {
json.append(value);
} else {
json.append(escapeJSON(value));
}
}
/**
* Escape a string to create a valid JSON string
*
* @param text
* @return
*/
private static String escapeJSON(String text) {
StringBuilder builder = new StringBuilder();
builder.append('"');
for (int index = 0; index < text.length(); index++) {
char chr = text.charAt(index);
switch (chr) {
case '"':
case '\\':
builder.append('\\');
builder.append(chr);
break;
case '\b':
builder.append("\\b");
break;
case '\t':
builder.append("\\t");
break;
case '\n':
builder.append("\\n");
break;
case '\r':
builder.append("\\r");
break;
default:
if (chr < ' ') {
String t = "000" + Integer.toHexString(chr);
builder.append("\\u" + t.substring(t.length() - 4));
} else {
builder.append(chr);
}
break;
}
}
builder.append('"');
return builder.toString();
}
/**
* Encode text as UTF-8
*
* @param text the text to encode
* @return the encoded text, as UTF-8
*/
private static String urlEncode(final String text) throws UnsupportedEncodingException {
return URLEncoder.encode(text, "UTF-8");
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package jta;
import java.util.*;
import java.sql.*;
import javax.naming.Context;
import javax.sql.DataSource;
import javax.transaction.UserTransaction;
import javax.transaction.RollbackException;
import com.gemstone.gemfire.InternalGemFireError;
import com.gemstone.gemfire.cache.*;
import com.gemstone.gemfire.internal.OSProcess;
import com.gemstone.gemfire.internal.cache.*;
import com.gemstone.gemfire.internal.datasource.DataSourceFactory;
import com.gemstone.gemfire.internal.jta.GlobalTransaction;
import dunit.DistributedTestCase;
import hydra.*;
import util.*;
/**
* @author nandk
*
* Reworked by Lynn Hughes-Godfrey (6.1)
*/
public class JtaCacheCallbackHydraTest {
static final String TABLENAME = "CacheTest";
static boolean isSerialExecution;
/**
* Start Task (to create database)
*
* @throws Exception
*/
public static void createDatabase() {
try {
if (JtaPrms.useDerbyNetworkServer()) {
GlobalTransaction.DISABLE_TRANSACTION_TIMEOUT_SETTING = true;
DataSourceFactory.setTestConnectionUrl(getDerbyURL());
DataSourceFactory.setTestConnectionHost(DerbyServerHelper.getEndpoint().getHost());
DataSourceFactory.setTestConnectionPort(String.valueOf(DerbyServerHelper.getEndpoint().getPort()));
}
Cache cache = CacheHelper.createCacheFromXml(JtaPrms.getCacheXmlFile());
Log.getLogWriter().info("Creating database ...");
DBUtil.createTable(TABLENAME);
Log.getLogWriter().info("Created database.");
CacheHelper.closeCache();
} catch (Exception e) {
throw new TestException("Error in createDatabase()" + TestHelper.getStackTrace(e));
}
}
/**
* Init task for bridgeServers working with JTA edge clients
*/
public synchronized static void HydraTask_initializeBridgeServer() {
if (CacheHelper.getCache() == null) {
Cache c = CacheHelper.createCache(ConfigPrms.getCacheConfig());
// Install the TransactionListener, if configured
TransactionListener txListener = JtaPrms.getTxListener();
if (txListener != null) {
c.getCacheTransactionManager().setListener(txListener);
Log.getLogWriter().info("Installed TransactionListener " + txListener);
}
// Install the TransactionWriter, if configured
TransactionWriter txWriter = JtaPrms.getTxWriter();
if (txWriter != null) {
((CacheTransactionManager)c.getCacheTransactionManager()).setWriter(txWriter);
Log.getLogWriter().info("Installed TransactionWriter " + txWriter);
}
try {
// create root region (baseed on RegionPrms)
Region root = RegionHelper.createRegion("root", ConfigPrms.getRegionConfig());
// create employee subregion
RegionAttributes ratts = RegionHelper.getRegionAttributes("bridgeSubregion");
Log.getLogWriter().info("Creating employee subregion ...");
Region employee = root.createSubregion("employee", ratts);
Log.getLogWriter().info("Created employee subregion.");
} catch (Exception e) {
throw new TestException("Error in initTask() " + TestHelper.getStackTrace(e));
}
}
BridgeHelper.startBridgeServer(ConfigPrms.getBridgeConfig());
}
/**
* EndTask (to display final values in database)
*
* @throws Exception
*/
public static void dumpDatabase() {
try {
if (JtaPrms.useDerbyNetworkServer()) {
GlobalTransaction.DISABLE_TRANSACTION_TIMEOUT_SETTING = true;
DataSourceFactory.setTestConnectionUrl(getDerbyURL());
DataSourceFactory.setTestConnectionHost(DerbyServerHelper.getEndpoint().getHost());
DataSourceFactory.setTestConnectionPort(String.valueOf(DerbyServerHelper.getEndpoint().getPort()));
}
Cache cache = CacheHelper.createCacheFromXml(JtaPrms.getCacheXmlFile());
DBUtil.displayData(TABLENAME);
CacheHelper.closeCache();
} catch (Exception e) {
throw new TestException("Error in dumpDatabase()" + TestHelper.getStackTrace(e));
}
}
/**
* Use a loader to get the entries from the database into the cache
*/
public synchronized static void loadCache() {
Cache cache = CacheHelper.getCache();
Region employee = RegionHelper.getRegion("/root/employee");
if (employee.isEmpty()) {
Log.getLogWriter().info("Loading database entries into cache ....");
for (int i=1; i <=4; i++) {
String key = "key"+i;
employee.get(key);
}
}
Log.getLogWriter().info("Loaded database entries into cache.");
}
/**
* Initialize Cache, region and database table
*/
public synchronized static void initTask() {
// initialize cache, regions and database
Cache cache = CacheHelper.getCache();
if (cache == null) {
try {
System.setProperty("derby.locks.waitTimeout","180");
if (JtaPrms.useDerbyNetworkServer()) {
GlobalTransaction.DISABLE_TRANSACTION_TIMEOUT_SETTING = true;
DataSourceFactory.setTestConnectionUrl(getDerbyURL());
DataSourceFactory.setTestConnectionHost(DerbyServerHelper.getEndpoint().getHost());
DataSourceFactory.setTestConnectionPort(String.valueOf(DerbyServerHelper.getEndpoint().getPort()));
}
cache = CacheHelper.createCacheFromXml(JtaPrms.getCacheXmlFile());
// Install the TransactionListener, if configured
TransactionListener txListener = JtaPrms.getTxListener();
if (txListener != null) {
cache.getCacheTransactionManager().setListener(txListener);
Log.getLogWriter().info("Installed TransactionListener " + txListener);
}
// Install the TransactionWriter, if configured
TransactionWriter txWriter = JtaPrms.getTxWriter();
if (txWriter != null) {
((CacheTransactionManager)cache.getCacheTransactionManager()).setWriter(txWriter);
Log.getLogWriter().info("Installed TransactionWriter " + txWriter);
}
// create employee subregion
Region root = cache.getRegion("root");
RegionAttributes ratts = RegionHelper.getRegionAttributes(ConfigPrms.getRegionConfig());
// For tests with edgeClients, create the pool before creating the subregion
String poolConfig = ratts.getPoolName();
if (poolConfig != null) {
PoolHelper.createPool(poolConfig);
}
Log.getLogWriter().info("Creating employee subregion ...");
Region employee = root.createSubregion("employee", ratts);
if (employee.getAttributes().getPoolName() != null) {
employee.registerInterest( "ALL_KEYS", InterestResultPolicy.KEYS_VALUES );
Log.getLogWriter().info("registered interest in ALL_KEYS for " + employee.getFullPath());
}
Log.getLogWriter().info("Created employee subregion.");
} catch (Exception e) {
throw new TestException("Error in initTask() " + TestHelper.getStackTrace(e));
}
isSerialExecution = TestConfig.tab().booleanAt(Prms.serialExecution, false);
}
}
/**
* All threads update values transactionally.
*/
public static void testTask() {
long localCounter = 0;
String key = null;
String dbValue = null;
String newValue = null;
String regionVal = null;
boolean isRollBack = false;
// check for any listener exceptions thrown by previous method execution
TestHelper.checkForEventError(JtaBB.getBB());
Cache cache = CacheHelper.getCache();
Region employee = cache.getRegion("root/employee");
((DBLoader)employee.getAttributes().getCacheLoader()).setTxLoad(true);
try {
// Step 1: begin a transaction and update an entry in via region.put()
Context ctx = null;
UserTransaction utx = null;
ctx = cache.getJNDIContext();
utx = (UserTransaction) ctx.lookup("java:/UserTransaction");
boolean rolledback = false;
utx.begin();
localCounter = JtaBB.getBB().getSharedCounters().incrementAndRead(JtaBB.COUNTER);
key = "key" + ((localCounter % 4) + 1); // key1 ... key4
newValue = "value" + localCounter;
dbValue = DBUtil.getDBValue(TABLENAME, key);
if (TestConfig.tab().getRandGen().nextBoolean()) {
regionVal = (String)employee.get(key);
Log.getLogWriter().info("region value for key (" + key + ") = " + regionVal);
}
Log.getLogWriter().info("Selected " + key + " for update, original dbValue is " + dbValue + " new value will be " + newValue);
employee.put(key, newValue);
// With PRs (aswith bridgeServers), the data could be hosted in a remote VM which would cause the writer
// to also be invoked in the remote VM, which means we will not have our DB connection
// (since associated with the utx thread), so we cannot do the db update/commit in
// a callback in these configurations. We can only use the callbacks to update the db
// with a single VM OR with replicate peers (since this is the only time we can guarantee
// that the writer will be invoked locally in the same thread that established the db connection.
if (JtaPrms.executeDBOpsInline()) {
try {
Connection conn = DBUtil.getXADSConnection();
String sql = "UPDATE " + TABLENAME + " SET name = '" + newValue + "' WHERE id = ('" + key + "')";
int i = DBUtil.executeUpdate(sql, conn);
Log.getLogWriter().info("rows updated = " + i);
conn.close();
} catch(Exception e) {
synchronized(JtaCacheCallbackHydraTest.class) { // synchronize to prevent interleaving
Log.getLogWriter().info("testTask caught " + e, e);
if (e.getMessage().indexOf("A lock could not be obtained") >= 0) {
DistributedTestCase.dumpMyThreads(Log.getLogWriter());
}
}
throw new TestException("testTask caught " + e);
}
}
try {
if (TestConfig.tab().getRandGen().nextInt(1,100) < 25) {
Log.getLogWriter().info("ROLLING BACK transaction with " + key + "(" + newValue + ")");
utx.rollback();
Log.getLogWriter().info("ROLLED BACK.");
isRollBack = true;
} else {
Log.getLogWriter().info("COMMITTING transaction with " + key + "(" + newValue + ")");
utx.commit();
Log.getLogWriter().info("COMMITTED.");
}
} catch (RollbackException r) {
Throwable causedBy = r.getCause();
String errStr = causedBy.toString();
boolean isCausedByTransactionWriterException = errStr.indexOf("intentionally throw") >= 0;
if (isSerialExecution && !isCausedByTransactionWriterException) {
throw new TestException("Unexpected exception " + r + " caught in serialExecution test");
} else {
Log.getLogWriter().info("Caught RolledbackException " + r + " for " + key + "(" + newValue + "): expected with concurrent operations, continuing test");
isRollBack = true;
}
} catch (Exception e) {
Log.getLogWriter().error("testTask caught exception during commit/rollback ", e);
throw new TestException("Error in testTask during commit/rollback " + TestHelper.getStackTrace(e));
}
// Verify region <-> database consistency (only for serialExecution tests)
if (isSerialExecution) {
try {
Log.getLogWriter().info("Validating data consistency for " + (isRollBack?"Rollback":"Commit") + " with " + key + "(" + newValue + ")");
if (!checkData(employee, key, newValue, !isRollBack)) {
String s = "Region/DataBase inconsistent for " +(isRollBack?"Rollback":"Commit") + " with " + key + "(" + newValue + ")";
Log.getLogWriter().info(s);
throw new TestException(s + " " + TestHelper.getStackTrace());
}
Log.getLogWriter().info("data consistency verified after " + (isRollBack?"Rollback":"Commit") + " for " + key + "(" + newValue + ")");
} catch(Exception e) {
String s = "Exception while retrieving the Data for validation of commit for " + key + "(" + newValue + ")";
Log.getLogWriter().info(s, e);
throw new TestException(s + " " + TestHelper.getStackTrace(e));
}
}
} catch (TransactionDataNodeHasDepartedException e) {
Log.getLogWriter().info("Caught TransactionDataNodeHasDepartedException. Expected with concurrent execution, continuing test.");
} catch (TransactionDataRebalancedException e) {
Log.getLogWriter().info("Caught Exception " + e + ". Expected with concurrent execution, continuing test.");
} catch (Exception e) {
Log.getLogWriter().error("Exception caught in testTask() for " + key + "(" + newValue + ")", e);
throw new TestException("Error in testTask() " + TestHelper.getStackTrace(e));
}
}
/** Validate value for key in region
*
* @param region - GemFire region
* @param key - entry key in region, database and valuesMap
* @param value - expected value
* @param isCommit - true if tx committed (false if rolled back)
*/
public static boolean checkData(Region region, String key, String value, boolean isCommit) throws Exception {
String regionVal = (String)region.get(key);
Log.getLogWriter().info("checkData (" + ((isCommit)?"commit":"rollback") + ") for " + key + " and newValue " + value + " found region value " + regionVal);
if (isCommit) {
if (!(regionVal.equals(value))) {
Log.getLogWriter().info("checkData (on commit): region value for " + key + " is " + regionVal + ", expected newValue " + value);
return false;
}
} else { // rollback
if (regionVal.equals(value)) {
Log.getLogWriter().info("checkData (on rollback): region value for " + key + " is " + regionVal + ", did not expect it to be updated to newValue " + value + " after rollback");
return false;
}
}
return true;
}
/**
* CloseTask to verify that all values in the database are consistent with the GemFire cache
*/
public static void closeTask() {
validateFinally();
}
/** verify that all validates in the database and consistent with the values
* in the Cache.
*/
public static void validateFinally() {
Cache cache = CacheHelper.getCache();
Region employee = cache.getRegion("root/employee");
Set keySet = employee.keySet();
StringBuffer aStr = new StringBuffer();
Map map = null;
try {
map = DBUtil.getData(TABLENAME);
} catch(Exception e){
throw new TestException("Caught unexpected Exception in validateFinally " + e);
}
Set dbKeys = map.keySet();
if (dbKeys.size() != keySet.size()) {
aStr.append("Inconsistency detected with database size = " + dbKeys.size() + " and cache size = " + keySet.size());
}
// What keys are missing from keySet?
for (Iterator ks = keySet.iterator(); ks.hasNext(); ) {
String key = (String)ks.next();
if (!dbKeys.contains(key)) {
aStr.append(key + " missing from database\n");
}
}
// What keys are missing from the database?
for (Iterator db = dbKeys.iterator(); db.hasNext(); ) {
String key = (String)db.next();
if (!keySet.contains(key)) {
aStr.append(key + " missing from GemFire cache\n");
}
}
StringBuffer displayStr = new StringBuffer();
try {
Log.getLogWriter().info("In validateFinally(), verifying " + map.size() + " entries");
for (Iterator ks = keySet.iterator(); ks.hasNext(); ) {
String key = (String)ks.next();
String regionValue = (String)employee.get(key);
String dbValue = DBUtil.getDBValue(TABLENAME, key);
displayStr.append(" " + key + " dbValue = " + dbValue + " regionValue = " + regionValue + "\n");
if(!dbValue.equals(regionValue)){
aStr.append("Data inconsistency detected for " + key + " dbValue = " + dbValue + " and regionValue " + regionValue);
}
}
} catch(Exception e){
throw new TestException("Caught unexpected Exception in validateFinally " + e);
}
if (aStr.length() > 0) {
Log.getLogWriter().info("Validation FAILED\n" + displayStr.toString());
throw new TestException(aStr.toString() + TestHelper.getStackTrace());
} else {
Log.getLogWriter().info("Validation SUCCESSFUL\n" + displayStr.toString());
}
}
/** For multiVM JTA tests (which require the derby network server)
* create a derby network server URL to look like:
* "jdbc:derby://<host>:<port>/<dbName>;create=true"
*/
private static String getDerbyURL() {
String protocol = "jdbc:derby:";
String dbName = "newDB";
String host = DerbyServerHelper.getEndpoint().getHost();
int port = DerbyServerHelper.getEndpoint().getPort();
StringBuffer url = new StringBuffer();
url.append(protocol);
url.append("//" + host + ":" + port + "/");
url.append(dbName);
url.append(";create=true");
Log.getLogWriter().info("Derby URL = " + url.toString());
return (url.toString());
}
/**
* Randomly stop and restart vms which are not rebalancing (rebalance must
* be part of the clientName). Tests which invoke this must also call
* util.StopStartVMs.StopStart_init as a INITTASK (to write clientNames
* to the Blackboard).
*/
public static void HydraTask_stopStartServerVM() {
int numVMsToStop = TestConfig.tab().intAt(StopStartPrms.numVMsToStop);
int randInt = TestConfig.tab().getRandGen().nextInt(1, numVMsToStop);
Object[] objArr = StopStartVMs.getOtherVMsWithExclude(randInt, "edge");
List clientVmInfoList = (List)(objArr[0]);
List stopModeList = (List)(objArr[1]);
StopStartVMs.stopStartVMs(clientVmInfoList, stopModeList);
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.sqoop;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.database.DatabaseInterface;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.namedcluster.NamedClusterManager;
import org.pentaho.di.core.namedcluster.model.NamedCluster;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.AbstractJobEntry;
import org.pentaho.di.job.JobEntryMode;
import org.pentaho.di.job.JobEntryUtils;
import org.pentaho.di.job.LoggingProxy;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.hadoop.shim.ConfigurationException;
import org.pentaho.hadoop.shim.HadoopConfiguration;
import org.pentaho.hadoop.shim.api.Configuration;
import org.pentaho.hadoop.shim.spi.HadoopShim;
import org.pentaho.hadoop.shim.spi.SqoopShim;
import org.pentaho.metastore.api.exceptions.MetaStoreException;
import org.w3c.dom.Node;
/**
* Base class for all Sqoop job entries.
*/
public abstract class AbstractSqoopJobEntry<S extends SqoopConfig> extends AbstractJobEntry<S> implements Cloneable,
JobEntryInterface {
/**
* Log4j appender that redirects all Log4j logging to a Kettle {@link org.pentaho.di.core.logging.LogChannel}
*/
private Appender sqoopToKettleAppender;
/**
* Logging proxy that redirects all {@link java.io.PrintStream} output to a Log4j logger.
*/
private LoggingProxy stdErrProxy;
/**
* Logging categories to monitor and log within Kettle
*/
private String[] LOGS_TO_MONITOR = new String[] { "org.apache.sqoop", "org.apache.hadoop" };
/**
* Cache for the levels of loggers we changed so we can revert them when we remove our appender
*/
private Map<String, Level> logLevelCache = new HashMap<String, Level>();
/**
* Build a configuration object that contains all configuration settings for this job entry. This will be configured
* by {@link #createJobConfig} and is not intended to be used directly.
*
* @return a {@link SqoopConfig} object that contains all configuration settings for this job entry
*/
protected abstract S buildSqoopConfig();
/**
* Declare the {@link Sqoop} tool used in this job entry.
*
* @return the name of the sqoop tool to use, e.g. "import"
*/
protected abstract String getToolName();
public AbstractSqoopJobEntry() {
super();
}
protected AbstractSqoopJobEntry( LogChannelInterface logChannelInterface ) {
super( logChannelInterface );
}
/**
* @return a {@link SqoopConfig} that contains all configuration settings for this job entry
*/
@Override
protected final S createJobConfig() {
S config = buildSqoopConfig();
try {
HadoopShim shim = getActiveHadoopConfiguration().getHadoopShim();
Configuration hadoopConfig = shim.createConfiguration();
loadNamedCluster( config );
SqoopUtils.configureConnectionInformation( config, shim, hadoopConfig );
} catch ( Exception ex ) {
// Error loading connection information from Hadoop Configuration. Just log the error and leave the configuration
// as is.
logError( BaseMessages.getString( AbstractSqoopJobEntry.class, "ErrorLoadingHadoopConnectionInformation" ), ex );
}
return config;
}
@Override
public void loadXML( Node node, List<DatabaseMeta> databaseMetas, List<SlaveServer> slaveServers,
Repository repository ) throws KettleXMLException {
super.loadXML( node, databaseMetas, slaveServers, repository );
// sync up the advanced configuration if no database type is set
if ( getJobConfig().getDatabase() == null ) {
getJobConfig().copyConnectionInfoToAdvanced();
}
}
@Override
public void loadRep( Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
super.loadRep( rep, id_jobentry, databases, slaveServers );
// sync up the advanced configuration if no database type is set
if ( getJobConfig().getDatabase() == null ) {
getJobConfig().copyConnectionInfoToAdvanced();
}
}
/**
* Attach a log appender to all Loggers used by Sqoop so we can redirect the output to Kettle's logging facilities.
*/
@SuppressWarnings( "deprecation" )
public void attachLoggingAppenders() {
sqoopToKettleAppender = new org.pentaho.di.core.logging.KettleLogChannelAppender( log );
try {
// Redirect all stderr logging to the first log to monitor so it shows up in the Kettle LogChannel
Logger sqoopLogger = JobEntryUtils.findLogger( LOGS_TO_MONITOR[0] );
if ( sqoopLogger != null ) {
stdErrProxy = new LoggingProxy( System.err, sqoopLogger, Level.ERROR );
System.setErr( stdErrProxy );
}
JobEntryUtils.attachAppenderTo( sqoopToKettleAppender, getLogLevel(), logLevelCache, LOGS_TO_MONITOR );
} catch ( Exception ex ) {
logMinimal( BaseMessages.getString( AbstractSqoopJobEntry.class, "ErrorAttachingLogging" ) );
logDebug( Const.getStackTracker( ex ) );
// Attempt to clean up logging if we failed
try {
JobEntryUtils.removeAppenderFrom( sqoopToKettleAppender, logLevelCache, LOGS_TO_MONITOR );
} catch ( Exception e ) {
// Ignore any exceptions while trying to clean up
}
sqoopToKettleAppender = null;
}
}
/**
* Remove our log appender from all loggers used by Sqoop.
*/
public void removeLoggingAppenders() {
try {
if ( sqoopToKettleAppender != null ) {
JobEntryUtils.removeAppenderFrom( sqoopToKettleAppender, logLevelCache, LOGS_TO_MONITOR );
sqoopToKettleAppender = null;
}
if ( stdErrProxy != null ) {
System.setErr( stdErrProxy.getWrappedStream() );
stdErrProxy = null;
}
} catch ( Exception ex ) {
logError( BaseMessages.getString( AbstractSqoopJobEntry.class, "ErrorDetachingLogging" ) );
logError( Const.getStackTracker( ex ) );
}
}
/**
* Validate any configuration option we use directly that could be invalid at runtime.
*
* @param config
* Configuration to validate
* @return List of warning messages for any invalid configuration options we use directly in this job entry.
*/
@Override
public List<String> getValidationWarnings( SqoopConfig config ) {
List<String> warnings = new ArrayList<String>();
if ( StringUtil.isEmpty( config.getConnect() ) ) {
warnings.add( BaseMessages.getString( AbstractSqoopJobEntry.class, "ValidationError.Connect.Message", config
.getConnect() ) );
}
try {
JobEntryUtils.asLong( config.getBlockingPollingInterval(), variables );
} catch ( NumberFormatException ex ) {
warnings.add( BaseMessages.getString( AbstractSqoopJobEntry.class,
"ValidationError.BlockingPollingInterval.Message", config.getBlockingPollingInterval() ) );
}
return warnings;
}
/**
* Handle any clean up required when our execution thread encounters an unexpected {@link Exception}.
*
* @param t
* Thread that encountered the uncaught exception
* @param e
* Exception that was encountered
* @param jobResult
* Job result for the execution that spawned the thread
*/
@Override
protected void handleUncaughtThreadException( Thread t, Throwable e, Result jobResult ) {
logError( BaseMessages.getString( AbstractSqoopJobEntry.class, "ErrorRunningSqoopTool" ), e );
removeLoggingAppenders();
setJobResultFailed( jobResult );
}
/**
* @param shim
* Hadoop shim to load configuration from
* @return the Hadoop configuration object for this Sqoop execution
*/
protected Configuration getHadoopConfiguration( HadoopShim shim ) {
return shim.createConfiguration();
}
@Override
protected Runnable getExecutionRunnable( final Result jobResult ) throws KettleException {
try {
HadoopConfiguration activeConfig = getActiveHadoopConfiguration();
final HadoopShim hadoopShim = activeConfig.getHadoopShim();
final SqoopShim sqoopShim = activeConfig.getSqoopShim();
Runnable runnable = new Runnable() {
@Override
public void run() {
executeSqoop( hadoopShim, sqoopShim, getJobConfig(), getHadoopConfiguration( hadoopShim ), jobResult );
}
};
return runnable;
} catch ( ConfigurationException ex ) {
throw new KettleException( ex );
}
}
/**
* Executes Sqoop using the provided configuration objects. The {@code jobResult} will accurately reflect the
* completed execution state when finished.
*
* @param hadoopShim
* Hadoop Shim to use
* @param sqoopShim
* Sqoop Shim to use
* @param config
* Sqoop configuration settings
* @param hadoopConfig
* Hadoop configuration settings. This will be additionally configured using
* {@link #configure(org.apache.hadoop.conf.Configuration)}.
* @param jobResult
* Result to update based on feedback from the Sqoop tool
*/
protected void executeSqoop( HadoopShim hadoopShim, SqoopShim shim, S config, Configuration hadoopConfig,
Result jobResult ) {
// Make sure Sqoop throws exceptions instead of returning a status of 1
System.setProperty( "sqoop.throwOnError", "true" );
attachLoggingAppenders();
try {
configure( hadoopShim, config, hadoopConfig );
List<String> args = SqoopUtils.getCommandLineArgs( config, getVariables() );
args.add( 0, getToolName() ); // push the tool command-line argument on the top of the args list
int result = shim.runTool( args.toArray( new String[args.size()] ), hadoopConfig );
if ( result != 0 ) {
setJobResultFailed( jobResult );
}
} catch ( Exception ex ) {
logError( BaseMessages.getString( AbstractSqoopJobEntry.class, "ErrorRunningSqoopTool" ), ex );
setJobResultFailed( jobResult );
} finally {
removeLoggingAppenders();
}
}
/**
* Configure the Hadoop environment
*
* @param shim
* Hadoop Shim
* @param sqoopConfig
* Sqoop configuration settings
* @param conf
* Hadoop configuration
* @throws org.pentaho.di.core.exception.KettleException
*
*/
public void configure( HadoopShim shim, S sqoopConfig, Configuration conf ) throws KettleException {
configureDatabase( sqoopConfig );
configureShim( shim, sqoopConfig, conf );
}
/**
* Configure database connection information
* @param sqoopConfig - Sqoop configuration
*/
public void configureDatabase( S sqoopConfig ) throws KettleException {
DatabaseMeta databaseMeta = getParentJob().getJobMeta().findDatabase( sqoopConfig.getDatabase() );
// if databaseMeta == null we assume "USE_ADVANCED_MODE" is selected on QUICK_SETUP
if ( sqoopConfig.getModeAsEnum() == JobEntryMode.QUICK_SETUP && databaseMeta != null ) {
sqoopConfig.setConnectionInfo(
databaseMeta.getName(),
databaseMeta.getURL(),
databaseMeta.getUsername(),
databaseMeta.getPassword() );
}
}
/**
* Configure Hadoop related parameters
* @param shim - shim in use
* @param sqoopConfig - Sqoop configuration
* @param conf - Hadoop client configuration
*/
public void configureShim( HadoopShim shim, S sqoopConfig, Configuration conf ) throws KettleException {
try {
List<String> messages = new ArrayList<String>();
NamedCluster nc = loadNamedCluster( sqoopConfig );
if ( nc != null ) {
if ( nc.isMapr() ) {
shim.configureConnectionInformation( "", "", "", "", conf, messages );
} else {
shim.configureConnectionInformation(
environmentSubstitute( nc.getHdfsHost() ),
environmentSubstitute( nc.getHdfsPort() ),
environmentSubstitute( nc.getJobTrackerHost() ),
environmentSubstitute( nc.getJobTrackerPort() ), conf, messages );
}
} else {
shim.configureConnectionInformation( environmentSubstitute( sqoopConfig.getNamenodeHost() ),
environmentSubstitute( sqoopConfig.getNamenodePort() ), environmentSubstitute( sqoopConfig
.getJobtrackerHost() ), environmentSubstitute( sqoopConfig.getJobtrackerPort() ), conf, messages );
}
for ( String m : messages ) {
logBasic( m );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( AbstractSqoopJobEntry.class,
"ErrorConfiguringHadoopEnvironment" ), e );
}
}
/**
* Determine if a database type is supported.
*
* @param databaseType
* Database type to check for compatibility
* @return {@code true} if this database is supported for this tool
*/
public boolean isDatabaseSupported( Class<? extends DatabaseInterface> databaseType ) {
// For now all database types are supported
return true;
}
NamedCluster loadNamedCluster( S config ) {
if ( rep != null && !Const.isEmpty( config.getClusterName() ) ) {
try {
config.setNamedCluster( NamedClusterManager.getInstance().read( config.getClusterName(), rep.getMetaStore() ) );
} catch ( MetaStoreException e ) {
logError( BaseMessages.getString(
AbstractSqoopJobEntry.class, "ErrorLoadNamedCluster", config.getClusterName() ),
e );
}
}
return config.getNamedCluster();
}
protected HadoopConfiguration getActiveHadoopConfiguration() throws ConfigurationException {
return HadoopConfigurationBootstrap.getHadoopConfigurationProvider().getActiveConfiguration();
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.lms.commons.mediaresource;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.olat.data.commons.fileutil.FileUtils;
import org.olat.presentation.commons.session.UserSession;
import org.olat.presentation.framework.core.Windows;
import org.olat.presentation.framework.core.util.bandwidth.SlowBandWidthSimulator;
import org.olat.system.commons.Settings;
import org.olat.system.commons.StringHelper;
import org.olat.system.logging.log4j.LoggerHelper;
/**
* @author Felix Jost
*/
public class ServletUtil {
private static final Logger log = LoggerHelper.getLogger();
/**
* @param httpReq
* @param httpResp
* @param mr
*/
public static void serveResource(HttpServletRequest httpReq, HttpServletResponse httpResp, MediaResource mr) {
boolean debug = log.isDebugEnabled();
try {
Long lastModified = mr.getLastModified();
if (lastModified != null) {
// give browser a chance to cache images
long ifModifiedSince = httpReq.getDateHeader("If-Modified-Since");
// TODO: if no such header, what is the return value
long lastMod = lastModified.longValue();
if (ifModifiedSince >= (lastMod / 1000L) * 1000L) {
httpResp.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
httpResp.setDateHeader("Last-Modified", lastModified.longValue());
}
if (isFlashPseudoStreaming(httpReq)) {
httpResp.setContentType("video/x-flv");
pseudoStreamFlashResource(httpReq, httpResp, mr);
} else {
String mime = mr.getContentType();
if (mime != null) {
httpResp.setContentType(mime);
}
serveFullResource(httpReq, httpResp, mr);
}
// else there is no stream, but probably just headers
// like e.g. in case of a 302 http-redirect
} catch (Exception e) {
if (debug) {
log.warn("client browser abort when serving media resource", e);
}
} finally {
try {
mr.release();
} catch (Exception e) {
// we did our best here to clean up
}
}
}
private static boolean isFlashPseudoStreaming(HttpServletRequest httpReq) {
String start = httpReq.getParameter("undefined");
if (StringHelper.containsNonWhitespace(start)) {
return true;
}
start = httpReq.getParameter("start");
if (StringHelper.containsNonWhitespace(start)) {
return true;
}
return false;
}
private static void serveFullResource(HttpServletRequest httpReq, HttpServletResponse httpResp, MediaResource mr) {
boolean debug = log.isDebugEnabled();
InputStream in = null;
OutputStream out = null;
BufferedInputStream bis = null;
try {
Long size = mr.getSize();
// if the size is known, set it to make browser's life easier
if (size != null) {
httpResp.setContentLength(size.intValue());
}
// maybe some more preparations
mr.prepare(httpResp);
in = mr.getInputStream();
// serve the Resource
if (in != null) {
long rstart = 0;
if (debug) {
rstart = System.currentTimeMillis();
}
if (Settings.isDebuging()) {
SlowBandWidthSimulator sbs = Windows.getWindows(UserSession.getUserSession(httpReq)).getSlowBandWidthSimulator();
out = sbs.wrapOutputStream(httpResp.getOutputStream());
} else {
out = httpResp.getOutputStream();
}
// buffer input stream
bis = new BufferedInputStream(in);
FileUtils.copy(bis, out);
if (debug) {
long rstop = System.currentTimeMillis();
log.debug("time to serve (mr=" + mr.getClass().getName() + ") " + (size == null ? "n/a" : "" + size) + " bytes: " + (rstop - rstart));
}
}
} catch (IOException e) {
FileUtils.closeSafely(in);
FileUtils.closeSafely(bis);
FileUtils.closeSafely(out);
log.error("client browser probably abort when serving media resource", e);
}
}
private static void pseudoStreamFlashResource(HttpServletRequest httpReq, HttpServletResponse httpResp, MediaResource mr) {
Long range = getRange(httpReq);
long seekPos = range == null ? 0l : range.longValue();
long fileSize = mr.getSize() - ((seekPos > 0) ? seekPos + 1 : 0);
InputStream s = null;
OutputStream out = null;
try {
s = new BufferedInputStream(mr.getInputStream());
out = httpResp.getOutputStream();
if (seekPos == 0) {
httpResp.addHeader("Content-Length", Long.toString(fileSize));
} else {
httpResp.addHeader("Content-Length", Long.toString(fileSize + 13));
byte[] flvHeader = new byte[] { 70, 76, 86, 1, 1, 0, 0, 0, 9, 0, 0, 0, 9 };
out.write(flvHeader);
}
s.skip(seekPos);
int readSize = 0;
final int bufferSize = 1024 * 10;
long left = fileSize;
while (left > 0) {
int howMuch = bufferSize;
if (howMuch > left) {
howMuch = (int) left;
}
byte[] buf = new byte[howMuch];
int numRead = s.read(buf);
readSize += numRead;
out.write(buf, 0, numRead);
httpResp.flushBuffer();
if (numRead == -1) {
break;
}
left -= numRead;
}
} catch (Exception e) {
e.printStackTrace();
if (e.getClass().getName().contains("Eof")) {
// ignore
} else {
throw new RuntimeException(e);
}
} finally {
FileUtils.closeSafely(s);
}
}
private static Long getRange(HttpServletRequest httpReq) {
if (httpReq.getParameter("start") != null) {
return Long.parseLong(httpReq.getParameter("start"));
} else if (httpReq.getParameter("undefined") != null) {
return Long.parseLong(httpReq.getParameter("undefined"));
}
return null;
}
/**
* @param response
* @param result
*/
public static void serveStringResource(HttpServletRequest httpReq, HttpServletResponse response, String result) {
// we ignore the accept-charset from the request and always write in utf-8
// -> see comment below
response.setContentType("text/html;charset=utf-8");
// never allow to cache pages since they contain a timestamp valid only once
// HTTP 1.1
response.setHeader("Cache-Control", "private, no-cache, no-store, must-revalidate, proxy-revalidate, s-maxage=0, max-age=0");
// HTTP 1.0
response.setHeader("Pragma", "no-cache");
response.setDateHeader("Expires", 0);
// log the response headers prior to sending the output
boolean isDebug = log.isDebugEnabled();
if (isDebug) {
log.debug("\nResponse headers (some)\ncontent type:" + response.getContentType() + "\ncharacterencoding:" + response.getCharacterEncoding() + "\nlocale:"
+ response.getLocale());
}
try {
long rstart = 0;
if (isDebug) {
rstart = System.currentTimeMillis();
}
// make a ByteArrayOutputStream to be able to determine the length.
// buffer size: assume average length of a char in bytes is max 2
ByteArrayOutputStream baos = new ByteArrayOutputStream(result.length() * 2);
// we ignore the accept-charset from the request and always write in
// utf-8:
// we have lots of different languages (content) in one application to
// support, and more importantly,
// a blend of olat translations and content by authors which can be in
// different languages.
OutputStreamWriter osw = new OutputStreamWriter(baos, "utf-8");
osw.write(result);
osw.close();
// the data is now utf-8 encoded in the bytearray -> push it into the outputstream
int encLen = baos.size();
response.setContentLength(encLen);
OutputStream os;
if (Settings.isDebuging()) {
SlowBandWidthSimulator sbs = Windows.getWindows(UserSession.getUserSession(httpReq)).getSlowBandWidthSimulator();
os = sbs.wrapOutputStream(response.getOutputStream());
} else {
os = response.getOutputStream();
}
byte[] bout = baos.toByteArray();
os.write(bout);
os.close();
if (isDebug) {
long rstop = System.currentTimeMillis();
log.debug("time to serve inline-resource " + result.length() + " chars / " + encLen + " bytes: " + (rstop - rstart));
}
} catch (IOException e) {
if (isDebug) {
log.warn("client browser abort when serving inline", e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.graphgenerator;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.CarbonMetadata;
import org.apache.carbondata.core.metadata.encoder.Encoding;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.processing.api.dataloader.DataLoadModel;
import org.apache.carbondata.processing.api.dataloader.SchemaInfo;
import org.apache.carbondata.processing.csvreaderstep.BlockDetails;
import org.apache.carbondata.processing.csvreaderstep.CsvInputMeta;
import org.apache.carbondata.processing.graphgenerator.configuration.GraphConfigurationInfo;
import org.apache.carbondata.processing.mdkeygen.MDKeyGenStepMeta;
import org.apache.carbondata.processing.merger.step.CarbonSliceMergerStepMeta;
import org.apache.carbondata.processing.model.CarbonDataLoadSchema;
import org.apache.carbondata.processing.schema.metadata.TableOptionWrapper;
import org.apache.carbondata.processing.sortandgroupby.sortdatastep.SortKeyStepMeta;
import org.apache.carbondata.processing.surrogatekeysgenerator.csvbased.CarbonCSVBasedSeqGenMeta;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.carbondata.processing.util.CarbonSchemaParser;
import org.apache.carbondata.processing.util.NonDictionaryUtil;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.hadoopfileinput.HadoopFileInputMeta;
import org.pentaho.di.trans.steps.selectvalues.SelectMetadataChange;
import org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta;
import org.pentaho.di.trans.steps.tableinput.TableInputMeta;
public class GraphGenerator {
public static final HashMap<String, BlockDetails[]> blockInfo = new HashMap<>();
/**
* DEFAUL_BLOCKLET_SIZE
*/
private static final String DEFAUL_BLOCKLET_SIZE = "8192";
/**
* DEFAULE_MAX_BLOCKLET_IN_FILE
*/
private static final String DEFAULE_MAX_BLOCKLET_IN_FILE = "100";
/**
* DEFAULT_NUMBER_CORES
*/
private static final String DEFAULT_NUMBER_CORES = "2";
/**
* DEFAULT_BATCH_SIZE
*/
private static final String DEFAULT_BATCH_SIZE = "1000";
/**
* DEFAULT_SORT_SIZE
*/
private static final String DEFAULT_SORT_SIZE = "100000";
/**
* drivers
*/
private static final Map<String, String> DRIVERS;
/**
* Comment for <code>LOGGER</code>
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(GraphGenerator.class.getName());
/**
* kettleInitialized
*/
private static boolean kettleInitialized = false;
static {
DRIVERS = new HashMap<String, String>(1);
DRIVERS.put("oracle.jdbc.OracleDriver", CarbonCommonConstants.TYPE_ORACLE);
DRIVERS.put("com.mysql.jdbc.Driver", CarbonCommonConstants.TYPE_MYSQL);
DRIVERS.put("org.gjt.mm.mysql.Driver", CarbonCommonConstants.TYPE_MYSQL);
DRIVERS.put("com.microsoft.sqlserver.jdbc.SQLServerDriver", CarbonCommonConstants.TYPE_MSSQL);
DRIVERS.put("com.sybase.jdbc3.jdbc.SybDriver", CarbonCommonConstants.TYPE_SYBASE);
}
/**
* OUTPUT_LOCATION
*/
private String outputLocation = "";
/**
* xAxixLocation
*/
private int xAxixLocation = 50;
/**
* yAxixLocation
*/
private int yAxixLocation = 100;
/**
* databaseName
*/
private String databaseName;
/**
* table
*/
// private Table table;
/**
* instance
*/
private CarbonProperties instance;
/**
* schemaInfo
*/
private SchemaInfo schemaInfo;
/**
* Table name
*/
private String tableName;
/**
* Is CSV Load request
*/
private boolean isCSVLoad;
/**
* Modified dimension
*/
private String[] modifiedDimension;
/**
* isAutoAggRequest
*/
private boolean isAutoAggRequest;
/**
* schema
*/
private CarbonDataLoadSchema carbonDataLoadSchema;
/**
* isUpdateMemberRequest
*/
private boolean isUpdateMemberRequest;
/**
* If the CSV file is present in HDFS?
*/
private boolean isHDFSReadMode;
/**
* partitionID
*/
private String partitionID;
private boolean isColumnar;
private String factTableName;
private String factStoreLocation;
private String blocksID;
private String escapeCharacter;
private String quoteCharacter;
private String commentCharacter;
private String dateFormat;
/**
* task id, each spark task has a unique id
*/
private String taskNo;
/**
* load Id
*/
private String segmentId;
/**
* new load start time
*/
private String factTimeStamp;
/**
* max number of columns configured by user to be parsed in a row
*/
private String maxColumns;
private String rddIteratorKey;
public GraphGenerator(DataLoadModel dataLoadModel, String partitionID, String factStoreLocation,
CarbonDataLoadSchema carbonDataLoadSchema, String segmentId) {
CarbonMetadata.getInstance().addCarbonTable(carbonDataLoadSchema.getCarbonTable());
this.schemaInfo = dataLoadModel.getSchemaInfo();
this.tableName = dataLoadModel.getTableName();
this.isCSVLoad = dataLoadModel.isCsvLoad();
this.isAutoAggRequest = schemaInfo.isAutoAggregateRequest();
this.carbonDataLoadSchema = carbonDataLoadSchema;
this.databaseName = carbonDataLoadSchema.getCarbonTable().getDatabaseName();
this.partitionID = partitionID;
this.factStoreLocation = factStoreLocation;
this.isColumnar = Boolean.parseBoolean(CarbonCommonConstants.IS_COLUMNAR_STORAGE_DEFAULTVALUE);
this.blocksID = dataLoadModel.getBlocksID();
this.taskNo = dataLoadModel.getTaskNo();
this.quoteCharacter = dataLoadModel.getQuoteCharacter();
this.commentCharacter = dataLoadModel.getCommentCharacter();
this.dateFormat = dataLoadModel.getDateFormat();
this.factTimeStamp = dataLoadModel.getFactTimeStamp();
this.segmentId = segmentId;
this.escapeCharacter = dataLoadModel.getEscapeCharacter();
this.maxColumns = dataLoadModel.getMaxColumns();
initialise();
LOGGER.info("************* Is Columnar Storage" + isColumnar);
}
public GraphGenerator(DataLoadModel dataLoadModel, String partitionID, String factStoreLocation,
CarbonDataLoadSchema carbonDataLoadSchema, String segmentId, String outputLocation) {
this(dataLoadModel, partitionID, factStoreLocation, carbonDataLoadSchema, segmentId);
this.outputLocation = outputLocation;
this.rddIteratorKey = dataLoadModel.getRddIteratorKey();
}
/**
* Generate the graph file ...
*
* @param transMeta
* @param graphFile
* @throws KettleException
*/
private static void generateGraphFile(TransMeta transMeta, String graphFile)
throws GraphGeneratorException {
//
DataOutputStream dos = null;
try {
String xml = transMeta.getXML();
dos = new DataOutputStream(new FileOutputStream(new File(graphFile)));
dos.write(xml.getBytes(CarbonCommonConstants.DEFAULT_CHARSET));
} catch (KettleException kettelException) {
throw new GraphGeneratorException("Error while getting the graph XML", kettelException);
}
//
catch (FileNotFoundException e) {
throw new GraphGeneratorException("Unable to find the graph fileL", e);
}
//
catch (UnsupportedEncodingException ue) {
throw new GraphGeneratorException("Error while Converting the graph xml string to bytes", ue);
}
//
catch (IOException ioe) {
throw new GraphGeneratorException("Error while writing the graph file", ioe);
} finally {
//
if (dos != null) {
try {
dos.close();
} catch (IOException e) {
e.getMessage();
}
}
}
}
private void initialise() {
this.instance = CarbonProperties.getInstance();
//TO-DO need to take care while supporting aggregate table using new schema.
//aggregateTable = CarbonSchemaParser.getAggregateTable(table, schema);
this.factTableName = carbonDataLoadSchema.getCarbonTable().getFactTableName();
}
public void generateGraph() throws GraphGeneratorException {
validateAndInitialiseKettelEngine();
GraphConfigurationInfo graphConfigInfoForFact = getGraphConfigInfoForFact(carbonDataLoadSchema);
generateGraph(graphConfigInfoForFact, graphConfigInfoForFact.getTableName() + ": Graph",
isCSVLoad, graphConfigInfoForFact);
}
private void validateAndInitialiseKettelEngine() throws GraphGeneratorException {
File file = new File(
outputLocation + File.separator + schemaInfo.getDatabaseName() + File.separator
+ this.tableName + File.separator + this.segmentId + File.separator + this.taskNo
+ File.separator);
boolean isDirCreated = false;
if (!file.exists()) {
isDirCreated = file.mkdirs();
if (!isDirCreated) {
LOGGER.error(
"Unable to create directory or directory already exist" + file.getAbsolutePath());
throw new GraphGeneratorException("INTERNAL_SYSTEM_ERROR");
}
}
synchronized (DRIVERS) {
try {
if (!kettleInitialized) {
EnvUtil.environmentInit();
KettleEnvironment.init();
kettleInitialized = true;
}
} catch (KettleException kettlExp) {
LOGGER.error(kettlExp);
throw new GraphGeneratorException("Error While Initializing the Kettel Engine ", kettlExp);
}
}
}
private void generateGraph(GraphConfigurationInfo configurationInfo, String transName,
boolean isCSV, GraphConfigurationInfo configurationInfoForFact)
throws GraphGeneratorException {
TransMeta trans = new TransMeta();
trans.setName(transName);
if (!isCSV) {
trans.addDatabase(getDatabaseMeta(configurationInfo));
}
trans.setSizeRowset(Integer.parseInt(instance
.getProperty(CarbonCommonConstants.GRAPH_ROWSET_SIZE,
CarbonCommonConstants.GRAPH_ROWSET_SIZE_DEFAULT)));
StepMeta inputStep = null;
StepMeta carbonSurrogateKeyStep = null;
StepMeta selectValueToChangeTheDataType = null;
// get all step
if (isCSV) {
if (isHDFSReadMode) {
inputStep = getHadoopInputStep(configurationInfo);
} else {
inputStep = getCSVInputStep(configurationInfo);
}
} else {
inputStep = getTableInputStep(configurationInfo);
selectValueToChangeTheDataType = getSelectValueToChangeTheDataType(configurationInfo, 1);
}
carbonSurrogateKeyStep = getCarbonCSVBasedSurrogateKeyStep(configurationInfo);
StepMeta sortStep = getSortStep(configurationInfo);
StepMeta carbonMDKeyStep = getMDKeyStep(configurationInfo);
StepMeta carbonSliceMergerStep = null;
carbonSliceMergerStep = getSliceMeregerStep(configurationInfo, configurationInfoForFact);
// add all steps to trans
trans.addStep(inputStep);
if (!isCSV) {
trans.addStep(selectValueToChangeTheDataType);
}
trans.addStep(carbonSurrogateKeyStep);
trans.addStep(sortStep);
trans.addStep(carbonMDKeyStep);
trans.addStep(carbonSliceMergerStep);
TransHopMeta inputStepToSelectValueHop = null;
TransHopMeta tableInputToSelectValue = null;
if (isCSV) {
inputStepToSelectValueHop = new TransHopMeta(inputStep, carbonSurrogateKeyStep);
} else {
inputStepToSelectValueHop = new TransHopMeta(inputStep, selectValueToChangeTheDataType);
tableInputToSelectValue =
new TransHopMeta(selectValueToChangeTheDataType, carbonSurrogateKeyStep);
}
// create hop
TransHopMeta surrogateKeyToSortHop = new TransHopMeta(carbonSurrogateKeyStep, sortStep);
TransHopMeta sortToMDKeyHop = new TransHopMeta(sortStep, carbonMDKeyStep);
TransHopMeta mdkeyToSliceMerger = null;
mdkeyToSliceMerger = new TransHopMeta(carbonMDKeyStep, carbonSliceMergerStep);
if (isCSV) {
trans.addTransHop(inputStepToSelectValueHop);
} else {
trans.addTransHop(inputStepToSelectValueHop);
trans.addTransHop(tableInputToSelectValue);
}
trans.addTransHop(surrogateKeyToSortHop);
trans.addTransHop(sortToMDKeyHop);
trans.addTransHop(mdkeyToSliceMerger);
String graphFilePath =
outputLocation + File.separator + schemaInfo.getDatabaseName() + File.separator
+ this.tableName + File.separator + segmentId + File.separator + this.taskNo
+ File.separator + this.tableName + ".ktr";
generateGraphFile(trans, graphFilePath);
}
private StepMeta getHadoopInputStep(GraphConfigurationInfo graphConfiguration)
throws GraphGeneratorException {
HadoopFileInputMeta fileInputMeta = new HadoopFileInputMeta();
fileInputMeta.setFilenameField("filename");
fileInputMeta.setFileName(new String[] { "${csvInputFilePath}" });
fileInputMeta.setDefault();
fileInputMeta.setEncoding(CarbonCommonConstants.DEFAULT_CHARSET);
fileInputMeta.setEnclosure("\"");
fileInputMeta.setHeader(true);
fileInputMeta.setSeparator(",");
fileInputMeta.setAcceptingFilenames(true);
fileInputMeta.setAcceptingStepName("getFileNames");
fileInputMeta.setFileFormat("mixed");
StepMeta csvDataStep = new StepMeta("HadoopFileInputPlugin", (StepMetaInterface) fileInputMeta);
csvDataStep.setLocation(100, 100);
int copies = Integer.parseInt(instance.getProperty(CarbonCommonConstants.NUM_CORES_LOADING,
CarbonCommonConstants.DEFAULT_NUMBER_CORES));
if (copies > 1) {
csvDataStep.setCopies(4);
}
csvDataStep.setDraw(true);
csvDataStep.setDescription("Read raw data from " + GraphGeneratorConstants.CSV_INPUT);
return csvDataStep;
}
private StepMeta getCSVInputStep(GraphConfigurationInfo graphConfiguration)
throws GraphGeneratorException {
CsvInputMeta csvInputMeta = new CsvInputMeta();
// Init the Filename...
csvInputMeta.setFilename("${csvInputFilePath}");
csvInputMeta.setDefault();
csvInputMeta.setEncoding(CarbonCommonConstants.DEFAULT_CHARSET);
csvInputMeta.setEnclosure("\"");
csvInputMeta.setHeaderPresent(true);
csvInputMeta.setMaxColumns(maxColumns);
StepMeta csvDataStep =
new StepMeta(GraphGeneratorConstants.CSV_INPUT, (StepMetaInterface) csvInputMeta);
csvDataStep.setLocation(100, 100);
csvInputMeta.setFilenameField("filename");
csvInputMeta.setLazyConversionActive(false);
csvInputMeta.setBufferSize(instance.getProperty(CarbonCommonConstants.CSV_READ_BUFFER_SIZE,
CarbonCommonConstants.CSV_READ_BUFFER_SIZE_DEFAULT));
//set blocks info id
csvInputMeta.setBlocksID(this.blocksID);
csvInputMeta.setPartitionID(this.partitionID);
csvInputMeta.setEscapeCharacter(this.escapeCharacter);
csvInputMeta.setQuoteCharacter(this.quoteCharacter);
csvInputMeta.setCommentCharacter(this.commentCharacter);
csvInputMeta.setRddIteratorKey(this.rddIteratorKey == null ? "" : this.rddIteratorKey);
csvDataStep.setDraw(true);
csvDataStep.setDescription("Read raw data from " + GraphGeneratorConstants.CSV_INPUT);
return csvDataStep;
}
private StepMeta getSliceMeregerStep(GraphConfigurationInfo configurationInfo,
GraphConfigurationInfo graphjConfigurationForFact) {
CarbonSliceMergerStepMeta sliceMerger = new CarbonSliceMergerStepMeta();
sliceMerger.setDefault();
sliceMerger.setPartitionID(partitionID);
sliceMerger.setSegmentId(segmentId);
sliceMerger.setTaskNo(taskNo);
sliceMerger.setHeirAndKeySize(configurationInfo.getHeirAndKeySizeString());
sliceMerger.setMdkeySize(configurationInfo.getMdkeySize());
sliceMerger.setMeasureCount(configurationInfo.getMeasureCount());
sliceMerger.setTabelName(configurationInfo.getTableName());
sliceMerger.setTableName(schemaInfo.getTableName());
sliceMerger.setDatabaseName(schemaInfo.getDatabaseName());
sliceMerger.setGroupByEnabled(isAutoAggRequest + "");
if (isAutoAggRequest) {
String[] aggType = configurationInfo.getAggType();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < aggType.length - 1; i++) {
if (aggType[i].equals(CarbonCommonConstants.COUNT)) {
builder.append(CarbonCommonConstants.SUM);
} else {
builder.append(aggType[i]);
}
builder.append(CarbonCommonConstants.HASH_SPC_CHARACTER);
}
builder.append(aggType[aggType.length - 1]);
sliceMerger.setAggregatorString(builder.toString());
String[] aggClass = configurationInfo.getAggClass();
builder = new StringBuilder();
for (int i = 0; i < aggClass.length - 1; i++) {
builder.append(aggClass[i]);
builder.append(CarbonCommonConstants.HASH_SPC_CHARACTER);
}
builder.append(aggClass[aggClass.length - 1]);
sliceMerger.setAggregatorClassString(builder.toString());
} else {
sliceMerger.setAggregatorClassString(CarbonCommonConstants.HASH_SPC_CHARACTER);
sliceMerger.setAggregatorString(CarbonCommonConstants.HASH_SPC_CHARACTER);
}
sliceMerger.setFactDimLensString("");
sliceMerger.setLevelAnddataTypeString(configurationInfo.getLevelAnddataType());
StepMeta sliceMergerMeta =
new StepMeta(GraphGeneratorConstants.CARBON_SLICE_MERGER + configurationInfo.getTableName(),
(StepMetaInterface) sliceMerger);
sliceMergerMeta.setStepID(GraphGeneratorConstants.CARBON_SLICE_MERGER_ID);
xAxixLocation += 120;
sliceMergerMeta.setLocation(xAxixLocation, yAxixLocation);
sliceMergerMeta.setDraw(true);
sliceMergerMeta.setDescription(
"SliceMerger: " + GraphGeneratorConstants.CARBON_SLICE_MERGER + configurationInfo
.getTableName());
return sliceMergerMeta;
}
private DatabaseMeta getDatabaseMeta(GraphConfigurationInfo configurationInfo)
throws GraphGeneratorException {
return new DatabaseMeta();
}
private StepMeta getTableInputStep(GraphConfigurationInfo configurationInfo)
throws GraphGeneratorException {
TableInputMeta tableInput = new TableInputMeta();
tableInput.setDatabaseMeta(getDatabaseMeta(configurationInfo));
tableInput.setSQL(configurationInfo.getTableInputSqlQuery());
//
StepMeta tableInputStep =
new StepMeta(GraphGeneratorConstants.TABLE_INPUT, (StepMetaInterface) tableInput);
xAxixLocation += 120;
tableInputStep.setLocation(xAxixLocation, yAxixLocation);
//
tableInputStep.setDraw(true);
tableInputStep
.setDescription("Read Data From Fact Table: " + GraphGeneratorConstants.TABLE_INPUT);
return tableInputStep;
}
private StepMeta getCarbonCSVBasedSurrogateKeyStep(GraphConfigurationInfo graphConfiguration) {
//
CarbonCSVBasedSeqGenMeta seqMeta = new CarbonCSVBasedSeqGenMeta();
seqMeta.setPartitionID(partitionID);
seqMeta.setSegmentId(segmentId);
seqMeta.setTaskNo(taskNo);
seqMeta.setCarbondim(graphConfiguration.getDimensionString());
seqMeta.setComplexTypeString(graphConfiguration.getComplexTypeString());
seqMeta.setColumnPropertiesString(graphConfiguration.getColumnPropertiesString());
seqMeta.setBatchSize(Integer.parseInt(graphConfiguration.getBatchSize()));
seqMeta.setNoDictionaryDims(graphConfiguration.getNoDictionaryDims());
seqMeta.setDimensionColumnsDataType(graphConfiguration.getDimensionColumnsDataType());
seqMeta.setTableName(schemaInfo.getTableName());
seqMeta.setDatabaseName(schemaInfo.getDatabaseName());
seqMeta.setComplexDelimiterLevel1(schemaInfo.getComplexDelimiterLevel1());
seqMeta.setComplexDelimiterLevel2(schemaInfo.getComplexDelimiterLevel2());
seqMeta.setCarbonmsr(graphConfiguration.getMeasuresString());
seqMeta.setCarbonProps(graphConfiguration.getPropertiesString());
seqMeta.setCarbonhier(graphConfiguration.getHiersString());
seqMeta.setCarbonhierColumn(graphConfiguration.getHierColumnString());
seqMeta.setDimensionColumnIds(graphConfiguration.getDimensionColumnIds());
seqMeta.setMetaMetaHeirSQLQueries(graphConfiguration.getDimensionSqlQuery());
seqMeta.setColumnAndTableNameColumnMapForAggString(
graphConfiguration.getColumnAndTableNameColumnMapForAgg());
seqMeta.setForgienKeyPrimayKeyString(graphConfiguration.getForgienKeyAndPrimaryKeyMapString());
seqMeta.setTableName(graphConfiguration.getTableName());
seqMeta.setDateFormat(dateFormat);
seqMeta.setModifiedDimension(modifiedDimension);
seqMeta.setForeignKeyHierarchyString(graphConfiguration.getForeignKeyHierarchyString());
seqMeta.setPrimaryKeysString(graphConfiguration.getPrimaryKeyString());
seqMeta.setCarbonMeasureNames(graphConfiguration.getMeasureNamesString());
seqMeta.setHeirNadDimsLensString(graphConfiguration.getHeirAndDimLens());
seqMeta.setActualDimNames(graphConfiguration.getActualDimensionColumns());
seqMeta.setNormHiers(graphConfiguration.getNormHiers());
seqMeta.setHeirKeySize(graphConfiguration.getHeirAndKeySizeString());
seqMeta.setColumnSchemaDetails(graphConfiguration.getColumnSchemaDetails().toString());
seqMeta.setTableOption(graphConfiguration.getTableOptionWrapper().toString());
String[] aggType = graphConfiguration.getAggType();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < aggType.length; i++) {
builder.append(aggType[i]);
builder.append(CarbonCommonConstants.SEMICOLON_SPC_CHARACTER);
}
seqMeta.setMsrAggregatorString(builder.toString());
seqMeta.setDriverClass(graphConfiguration.getDriverclass());
seqMeta.setConnectionURL(graphConfiguration.getConnectionUrl());
seqMeta.setUserName(graphConfiguration.getUsername());
seqMeta.setPassword(graphConfiguration.getPassword());
seqMeta.setMeasureDataType(graphConfiguration.getMeasureDataTypeInfo());
seqMeta.setDenormColumNames(graphConfiguration.getDenormColumns());
seqMeta.setAggregate(graphConfiguration.isAGG());
seqMeta.setTableNames(graphConfiguration.getDimensionTableNames());
StepMeta mdkeyStepMeta = new StepMeta(GraphGeneratorConstants.CARBON_SURROGATE_KEY_GENERATOR,
(StepMetaInterface) seqMeta);
mdkeyStepMeta.setStepID(GraphGeneratorConstants.CARBON_CSV_BASED_SURROAGATEGEN_ID);
xAxixLocation += 120;
//
mdkeyStepMeta.setLocation(xAxixLocation, yAxixLocation);
mdkeyStepMeta.setDraw(true);
mdkeyStepMeta.setDescription("Generate Surrogate For Table Data: "
+ GraphGeneratorConstants.CARBON_SURROGATE_KEY_GENERATOR);
return mdkeyStepMeta;
}
private StepMeta getMDKeyStep(GraphConfigurationInfo graphConfiguration) {
MDKeyGenStepMeta carbonMdKey = new MDKeyGenStepMeta();
carbonMdKey.setIsUseInvertedIndex(
NonDictionaryUtil.convertBooleanArrToString(graphConfiguration.getIsUseInvertedIndex()));
carbonMdKey.setPartitionID(partitionID);
carbonMdKey.setSegmentId(segmentId);
carbonMdKey.setNumberOfCores(graphConfiguration.getNumberOfCores());
carbonMdKey.setTableName(graphConfiguration.getTableName());
carbonMdKey.setDatabaseName(schemaInfo.getDatabaseName());
carbonMdKey.setTableName(schemaInfo.getTableName());
carbonMdKey.setComplexTypeString(graphConfiguration.getComplexTypeString());
carbonMdKey.setAggregateLevels(CarbonDataProcessorUtil
.getLevelCardinalitiesString(graphConfiguration.getDimCardinalities(),
graphConfiguration.getDimensions()));
carbonMdKey.setNoDictionaryDimsMapping(NonDictionaryUtil
.convertBooleanArrToString(graphConfiguration.getIsNoDictionaryDimMapping()));
carbonMdKey.setMeasureCount(graphConfiguration.getMeasureCount() + "");
carbonMdKey.setColumnGroupsString(graphConfiguration.getColumnGroupsString());
carbonMdKey.setDimensionCount(graphConfiguration.getActualDims().length + "");
carbonMdKey.setComplexDimsCount(graphConfiguration.getComplexTypeString().isEmpty() ?
"0" :
graphConfiguration.getComplexTypeString()
.split(CarbonCommonConstants.SEMICOLON_SPC_CHARACTER).length + "");
carbonMdKey.setMeasureDataType(graphConfiguration.getMeasureDataTypeInfo());
carbonMdKey.setTaskNo(taskNo);
carbonMdKey.setFactTimeStamp(factTimeStamp);
StepMeta mdkeyStepMeta =
new StepMeta(GraphGeneratorConstants.MDKEY_GENERATOR + graphConfiguration.getTableName(),
(StepMetaInterface) carbonMdKey);
mdkeyStepMeta
.setName(GraphGeneratorConstants.MDKEY_GENERATOR_ID + graphConfiguration.getTableName());
mdkeyStepMeta.setStepID(GraphGeneratorConstants.MDKEY_GENERATOR_ID);
//
xAxixLocation += 120;
mdkeyStepMeta.setLocation(xAxixLocation, yAxixLocation);
mdkeyStepMeta.setDraw(true);
mdkeyStepMeta.setDescription(
"Generate MDKey For Table Data: " + GraphGeneratorConstants.MDKEY_GENERATOR
+ graphConfiguration.getTableName());
carbonMdKey.setNoDictionaryDims(graphConfiguration.getNoDictionaryDims());
return mdkeyStepMeta;
}
private StepMeta getSelectValueToChangeTheDataType(GraphConfigurationInfo graphConfiguration,
int counter) {
//
SelectValuesMeta selectValues = new SelectValuesMeta();
selectValues.allocate(0, 0, 0);
StepMeta selectValueMeta = new StepMeta(
GraphGeneratorConstants.SELECT_REQUIRED_VALUE + "Change Dimension And Measure DataType"
+ System.currentTimeMillis() + counter, (StepMetaInterface) selectValues);
xAxixLocation += 120;
selectValueMeta.setName("SelectValueToChangeChangeData");
selectValueMeta.setLocation(xAxixLocation, yAxixLocation);
selectValueMeta.setDraw(true);
selectValueMeta.setDescription(
"Change The Data Type For Measures: " + GraphGeneratorConstants.SELECT_REQUIRED_VALUE);
String inputQuery = graphConfiguration.getTableInputSqlQuery();
String[] columns = parseQueryAndReturnColumns(inputQuery);
SelectMetadataChange[] changeMeta = new SelectMetadataChange[columns.length];
Map<String, Boolean> measureDatatypeMap =
getMeasureDatatypeMap(graphConfiguration.getMeasureDataTypeInfo());
String[] measures = graphConfiguration.getMeasures();
String dimensionString = graphConfiguration.getActualDimensionColumns();
String[] dimension = dimensionString.split(CarbonCommonConstants.AMPERSAND_SPC_CHARACTER);
for (int i = 0; i < columns.length; i++) {
changeMeta[i] = new SelectMetadataChange(selectValues);
changeMeta[i].setName(columns[i]);
changeMeta[i].setType(2);
if (isMeasureColumn(measures, columns[i]) && isNotDimesnionColumn(dimension, columns[i])) {
Boolean isString = measureDatatypeMap.get(columns[i]);
if (isString != null && isString) {
changeMeta[i].setType(2);
} else {
changeMeta[i].setType(6);
}
}
changeMeta[i].setStorageType(0);
}
//
selectValues.setMeta(changeMeta);
return selectValueMeta;
}
private boolean isMeasureColumn(String[] measures, String column) {
for (int i = 0; i < measures.length; i++) {
if (measures[i].equals(column)) {
return true;
}
}
return false;
}
private boolean isNotDimesnionColumn(String[] dimension, String column) {
for (int i = 0; i < dimension.length; i++) {
if (dimension[i].equals(column)) {
return false;
}
}
return true;
}
private Map<String, Boolean> getMeasureDatatypeMap(String measureDataType) {
if (measureDataType == null || "".equals(measureDataType)) {
return new HashMap<String, Boolean>(1);
}
Map<String, Boolean> resultMap = new HashMap<String, Boolean>(1);
String[] measures = measureDataType.split(CarbonCommonConstants.AMPERSAND_SPC_CHARACTER);
String[] measureValue = null;
for (int i = 0; i < measures.length; i++) {
measureValue = measures[i].split(CarbonCommonConstants.COLON_SPC_CHARACTER);
resultMap.put(measureValue[0], Boolean.valueOf(measureValue[1]));
}
return resultMap;
}
/**
* @param inputQuery
* @return
*/
private String[] parseQueryAndReturnColumns(String inputQuery) {
Set<String> cols = new LinkedHashSet<String>();
String columnString =
inputQuery.substring(inputQuery.indexOf("SELECT") + 6, inputQuery.indexOf("FROM"));
String[] columns = columnString.split(",");
for (int i = 0; i < columns.length; i++) {
if (columns[i].indexOf("\"") > -1) {
columns[i] = columns[i].replace("\"", "");
if (columns[i].contains(".")) {
columns[i] = columns[i].split("\\.")[1];
}
}
cols.add(columns[i].replaceAll(System.getProperty("line.separator"), "").trim());
}
return cols.toArray(new String[cols.size()]);
}
private StepMeta getSortStep(GraphConfigurationInfo graphConfiguration)
throws GraphGeneratorException {
String[] actualMeasures = graphConfiguration.getMeasures();
SortKeyStepMeta sortRowsMeta = new SortKeyStepMeta();
sortRowsMeta.setPartitionID(partitionID);
sortRowsMeta.setSegmentId(segmentId);
sortRowsMeta.setTaskNo(taskNo);
sortRowsMeta.setTabelName(graphConfiguration.getTableName());
sortRowsMeta.setTableName(schemaInfo.getTableName());
sortRowsMeta.setDatabaseName(schemaInfo.getDatabaseName());
sortRowsMeta.setOutputRowSize(actualMeasures.length + 1 + "");
sortRowsMeta.setDimensionCount(graphConfiguration.getDimensions().length + "");
sortRowsMeta.setComplexDimensionCount(graphConfiguration.getComplexTypeString().isEmpty() ?
"0" :
graphConfiguration.getComplexTypeString()
.split(CarbonCommonConstants.SEMICOLON_SPC_CHARACTER).length + "");
sortRowsMeta.setIsUpdateMemberRequest(isUpdateMemberRequest + "");
sortRowsMeta.setMeasureCount(graphConfiguration.getMeasureCount() + "");
sortRowsMeta.setNoDictionaryDims(graphConfiguration.getNoDictionaryDims());
sortRowsMeta.setMeasureDataType(graphConfiguration.getMeasureDataTypeInfo());
sortRowsMeta.setNoDictionaryDimsMapping(NonDictionaryUtil
.convertBooleanArrToString(graphConfiguration.getIsNoDictionaryDimMapping()));
StepMeta sortRowsStep = new StepMeta(
GraphGeneratorConstants.SORT_KEY_AND_GROUPBY + graphConfiguration.getTableName(),
(StepMetaInterface) sortRowsMeta);
xAxixLocation += 120;
sortRowsStep.setDraw(true);
sortRowsStep.setLocation(xAxixLocation, yAxixLocation);
sortRowsStep.setStepID(GraphGeneratorConstants.SORTKEY_ID);
sortRowsStep.setDescription(
"Sort Key: " + GraphGeneratorConstants.SORT_KEY + graphConfiguration.getTableName());
sortRowsStep.setName(
"Sort Key: " + GraphGeneratorConstants.SORT_KEY + graphConfiguration.getTableName());
return sortRowsStep;
}
private GraphConfigurationInfo getGraphConfigInfoForFact(
CarbonDataLoadSchema carbonDataLoadSchema) throws GraphGeneratorException {
//
GraphConfigurationInfo graphConfiguration = new GraphConfigurationInfo();
List<CarbonDimension> dimensions = carbonDataLoadSchema.getCarbonTable()
.getDimensionByTableName(carbonDataLoadSchema.getCarbonTable().getFactTableName());
prepareIsUseInvertedIndex(dimensions, graphConfiguration);
graphConfiguration
.setDimensions(CarbonSchemaParser.getTableDimensions(dimensions, carbonDataLoadSchema));
graphConfiguration
.setActualDims(CarbonSchemaParser.getTableDimensions(dimensions, carbonDataLoadSchema));
graphConfiguration
.setColumnPropertiesString(CarbonSchemaParser.getColumnPropertiesString(dimensions));
graphConfiguration.setComplexTypeString(CarbonSchemaParser.getComplexTypeString(dimensions));
prepareNoDictionaryMapping(dimensions, graphConfiguration);
graphConfiguration
.setColumnSchemaDetails(CarbonSchemaParser.getColumnSchemaDetails(dimensions));
graphConfiguration.setTableOptionWrapper(getTableOptionWrapper());
String factTableName = carbonDataLoadSchema.getCarbonTable().getFactTableName();
graphConfiguration.setTableName(factTableName);
StringBuilder dimString = new StringBuilder();
//
int currentCount =
CarbonSchemaParser.getDimensionString(dimensions, dimString, 0, carbonDataLoadSchema);
StringBuilder noDictionarydimString = new StringBuilder();
CarbonSchemaParser
.getNoDictionaryDimensionString(dimensions, noDictionarydimString, 0, carbonDataLoadSchema);
graphConfiguration.setNoDictionaryDims(noDictionarydimString.toString());
String tableString =
CarbonSchemaParser.getTableNameString(dimensions, carbonDataLoadSchema);
String dimensionColumnIds = CarbonSchemaParser.getColumnIdString(dimensions);
graphConfiguration.setDimensionTableNames(tableString);
graphConfiguration.setDimensionString(dimString.toString());
graphConfiguration.setDimensionColumnIds(dimensionColumnIds);
graphConfiguration
.setForignKey(CarbonSchemaParser.getForeignKeyForTables(dimensions, carbonDataLoadSchema));
List<CarbonMeasure> measures = carbonDataLoadSchema.getCarbonTable()
.getMeasureByTableName(carbonDataLoadSchema.getCarbonTable().getFactTableName());
graphConfiguration
.setMeasuresString(CarbonSchemaParser.getMeasureString(measures, currentCount));
graphConfiguration
.setHiersString(CarbonSchemaParser.getHierarchyString(dimensions, carbonDataLoadSchema));
graphConfiguration.setHierColumnString(
CarbonSchemaParser.getHierarchyStringWithColumnNames(dimensions, carbonDataLoadSchema));
graphConfiguration.setMeasureUniqueColumnNamesString(
CarbonSchemaParser.getMeasuresUniqueColumnNamesString(measures));
graphConfiguration.setForeignKeyHierarchyString(CarbonSchemaParser
.getForeignKeyHierarchyString(dimensions, carbonDataLoadSchema, factTableName));
graphConfiguration.setConnectionName("target");
graphConfiguration.setHeirAndDimLens(
CarbonSchemaParser.getHeirAndCardinalityString(dimensions, carbonDataLoadSchema));
//setting dimension store types
graphConfiguration.setColumnGroupsString(CarbonSchemaParser.getColumnGroups(dimensions));
graphConfiguration.setPrimaryKeyString(
CarbonSchemaParser.getPrimaryKeyString(dimensions, carbonDataLoadSchema));
graphConfiguration
.setDenormColumns(CarbonSchemaParser.getDenormColNames(dimensions, carbonDataLoadSchema));
graphConfiguration.setLevelAnddataType(
CarbonSchemaParser.getLevelAndDataTypeMapString(dimensions, carbonDataLoadSchema));
graphConfiguration.setForgienKeyAndPrimaryKeyMapString(CarbonSchemaParser
.getForeignKeyAndPrimaryKeyMapString(carbonDataLoadSchema.getDimensionRelationList()));
graphConfiguration.setMdkeySize(CarbonSchemaParser.getMdkeySizeForFact(dimensions));
Set<String> measureColumn = new HashSet<String>(measures.size());
for (int i = 0; i < measures.size(); i++) {
measureColumn.add(measures.get(i).getColName());
}
char[] type = new char[measureColumn.size()];
Arrays.fill(type, 'n');
graphConfiguration.setType(type);
graphConfiguration.setMeasureCount(measureColumn.size() + "");
graphConfiguration.setHeirAndKeySizeString(
CarbonSchemaParser.getHeirAndKeySizeMapForFact(dimensions, carbonDataLoadSchema));
graphConfiguration.setAggType(CarbonSchemaParser.getMeasuresAggragatorArray(measures));
graphConfiguration.setMeasureNamesString(CarbonSchemaParser.getMeasuresNamesString(measures));
graphConfiguration
.setActualDimensionColumns(CarbonSchemaParser.getActualDimensions(dimensions));
graphConfiguration
.setDimensionColumnsDataType(CarbonSchemaParser.getDimensionsDataTypes(dimensions));
//graphConfiguration.setNormHiers(CarbonSchemaParser.getNormHiers(table, schema));
graphConfiguration.setMeasureDataTypeInfo(CarbonSchemaParser.getMeasuresDataType(measures));
graphConfiguration.setStoreLocation(
this.databaseName + '/' + carbonDataLoadSchema.getCarbonTable().getFactTableName());
graphConfiguration.setBlockletSize(
(instance.getProperty("com.huawei.unibi.carbon.blocklet.size", DEFAUL_BLOCKLET_SIZE)));
graphConfiguration.setMaxBlockletInFile(
(instance.getProperty("carbon.max.blocklet.in.file", DEFAULE_MAX_BLOCKLET_IN_FILE)));
graphConfiguration.setNumberOfCores(
(instance.getProperty(CarbonCommonConstants.NUM_CORES_LOADING, DEFAULT_NUMBER_CORES)));
// check quotes required in query or Not
boolean isQuotesRequired = true;
String quote = CarbonSchemaParser.QUOTES;
graphConfiguration.setTableInputSqlQuery(CarbonSchemaParser
.getTableInputSQLQuery(dimensions, measures,
carbonDataLoadSchema.getCarbonTable().getFactTableName(), isQuotesRequired,
carbonDataLoadSchema));
graphConfiguration
.setBatchSize((instance.getProperty("carbon.batch.size", DEFAULT_BATCH_SIZE)));
graphConfiguration.setSortSize((instance.getProperty("carbon.sort.size", DEFAULT_SORT_SIZE)));
graphConfiguration.setDimensionSqlQuery(CarbonSchemaParser
.getDimensionSQLQueries(dimensions, carbonDataLoadSchema, isQuotesRequired, quote));
graphConfiguration.setMetaHeirString(
CarbonSchemaParser.getMetaHeirString(dimensions, carbonDataLoadSchema.getCarbonTable()));
graphConfiguration
.setDimCardinalities(CarbonSchemaParser.getCardinalities(dimensions, carbonDataLoadSchema));
graphConfiguration.setMeasures(CarbonSchemaParser.getMeasures(measures));
graphConfiguration.setAGG(false);
return graphConfiguration;
}
/**
* the method returns the table option wrapper
*
* @return
*/
private TableOptionWrapper getTableOptionWrapper() {
TableOptionWrapper tableOptionWrapper = TableOptionWrapper.getTableOptionWrapperInstance();
tableOptionWrapper.setTableOption(schemaInfo.getSerializationNullFormat());
tableOptionWrapper.setTableOption(schemaInfo.getBadRecordsLoggerEnable());
tableOptionWrapper.setTableOption(schemaInfo.getBadRecordsLoggerAction());
return tableOptionWrapper;
}
public CarbonTable getTable() {
return carbonDataLoadSchema.getCarbonTable();
}
/**
* Preparing the boolean [] to map whether the dimension is no Dictionary or not.
*
* @param dims
* @param graphConfig
*/
private void prepareNoDictionaryMapping(List<CarbonDimension> dims,
GraphConfigurationInfo graphConfig) {
List<Boolean> noDictionaryMapping = new ArrayList<Boolean>();
for (CarbonDimension dimension : dims) {
// for complex type need to break the loop
if (dimension.getNumberOfChild() > 0) {
break;
}
if (!dimension.getEncoder().contains(Encoding.DICTIONARY)) {
noDictionaryMapping.add(true);
//NoDictionaryMapping[index] = true;
} else {
noDictionaryMapping.add(false);
}
}
graphConfig.setIsNoDictionaryDimMapping(
noDictionaryMapping.toArray(new Boolean[noDictionaryMapping.size()]));
}
/**
* Preparing the boolean [] to map whether the dimension use inverted index or not.
*
* @param dims
* @param graphConfig
*/
private void prepareIsUseInvertedIndex(List<CarbonDimension> dims,
GraphConfigurationInfo graphConfig) {
List<Boolean> isUseInvertedIndexList = new ArrayList<Boolean>();
for (CarbonDimension dimension : dims) {
if (dimension.isUseInvertedIndex()) {
isUseInvertedIndexList.add(true);
} else {
isUseInvertedIndexList.add(false);
}
}
graphConfig.setIsUseInvertedIndex(
isUseInvertedIndexList.toArray(new Boolean[isUseInvertedIndexList.size()]));
}
}
| |
package kademlia.operation;
import kademlia.message.Receiver;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import kademlia.JKademliaNode;
import kademlia.dht.GetParameter;
import kademlia.KadConfiguration;
import kademlia.KadServer;
import kademlia.dht.JKademliaStorageEntry;
import kademlia.dht.KademliaStorageEntry;
import kademlia.exceptions.ContentNotFoundException;
import kademlia.exceptions.RoutingException;
import kademlia.exceptions.UnknownMessageException;
import kademlia.message.ContentLookupMessage;
import kademlia.message.ContentMessage;
import kademlia.message.Message;
import kademlia.message.NodeReplyMessage;
import kademlia.node.KeyComparator;
import kademlia.node.Node;
import kademlia.util.RouteLengthChecker;
/**
* Looks up a specified identifier and returns the value associated with it
*
* @author Joshua Kissoon
* @since 20140226
*/
public class ContentLookupOperation implements Operation, Receiver
{
/* Constants */
private static final Byte UNASKED = (byte) 0x00;
private static final Byte AWAITING = (byte) 0x01;
private static final Byte ASKED = (byte) 0x02;
private static final Byte FAILED = (byte) 0x03;
private final KadServer server;
private final JKademliaNode localNode;
private JKademliaStorageEntry contentFound = null;
private final KadConfiguration config;
private final ContentLookupMessage lookupMessage;
private boolean isContentFound;
private final SortedMap<Node, Byte> nodes;
/* Tracks messages in transit and awaiting reply */
private final Map<Integer, Node> messagesTransiting;
/* Used to sort nodes */
private final Comparator comparator;
/* Statistical information */
private final RouteLengthChecker routeLengthChecker;
{
messagesTransiting = new HashMap<>();
isContentFound = false;
routeLengthChecker = new RouteLengthChecker();
}
/**
* @param server
* @param localNode
* @param params The parameters to search for the content which we need to find
* @param config
*/
public ContentLookupOperation(KadServer server, JKademliaNode localNode, GetParameter params, KadConfiguration config)
{
/* Construct our lookup message */
this.lookupMessage = new ContentLookupMessage(localNode.getNode(), params);
this.server = server;
this.localNode = localNode;
this.config = config;
/**
* We initialize a TreeMap to store nodes.
* This map will be sorted by which nodes are closest to the lookupId
*/
this.comparator = new KeyComparator(params.getKey());
this.nodes = new TreeMap(this.comparator);
}
/**
* @throws java.io.IOException
* @throws kademlia.exceptions.RoutingException
*/
@Override
public synchronized void execute() throws IOException, RoutingException
{
try
{
/* Set the local node as already asked */
nodes.put(this.localNode.getNode(), ASKED);
/**
* We add all nodes here instead of the K-Closest because there may be the case that the K-Closest are offline
* - The operation takes care of looking at the K-Closest.
*/
List<Node> allNodes = this.localNode.getRoutingTable().getAllNodes();
this.addNodes(allNodes);
/* Also add the initial set of nodes to the routeLengthChecker */
this.routeLengthChecker.addInitialNodes(allNodes);
/**
* If we haven't found the requested amount of content as yet,
* keey trying until config.operationTimeout() time has expired
*/
int totalTimeWaited = 0;
int timeInterval = 10; // We re-check every n milliseconds
while (totalTimeWaited < this.config.operationTimeout())
{
if (!this.askNodesorFinish() && !isContentFound)
{
wait(timeInterval);
totalTimeWaited += timeInterval;
}
else
{
break;
}
}
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
/**
* Add nodes from this list to the set of nodes to lookup
*
* @param list The list from which to add nodes
*/
public void addNodes(List<Node> list)
{
for (Node o : list)
{
/* If this node is not in the list, add the node */
if (!nodes.containsKey(o))
{
nodes.put(o, UNASKED);
}
}
}
/**
* Asks some of the K closest nodes seen but not yet queried.
* Assures that no more than DefaultConfiguration.CONCURRENCY messages are in transit at a time
*
* This method should be called every time a reply is received or a timeout occurs.
*
* If all K closest nodes have been asked and there are no messages in transit,
* the algorithm is finished.
*
* @return <code>true</code> if finished OR <code>false</code> otherwise
*/
private boolean askNodesorFinish() throws IOException
{
/* If >= CONCURRENCY nodes are in transit, don't do anything */
if (this.config.maxConcurrentMessagesTransiting() <= this.messagesTransiting.size())
{
return false;
}
/* Get unqueried nodes among the K closest seen that have not FAILED */
List<Node> unasked = this.closestNodesNotFailed(UNASKED);
if (unasked.isEmpty() && this.messagesTransiting.isEmpty())
{
/* We have no unasked nodes nor any messages in transit, we're finished! */
return true;
}
/* Sort nodes according to criteria */
Collections.sort(unasked, this.comparator);
/**
* Send messages to nodes in the list;
* making sure than no more than CONCURRENCY messsages are in transit
*/
for (int i = 0; (this.messagesTransiting.size() < this.config.maxConcurrentMessagesTransiting()) && (i < unasked.size()); i++)
{
Node n = (Node) unasked.get(i);
int comm = server.sendMessage(n, lookupMessage, this);
this.nodes.put(n, AWAITING);
this.messagesTransiting.put(comm, n);
}
/* We're not finished as yet, return false */
return false;
}
/**
* Find The K closest nodes to the target lookupId given that have not FAILED.
* From those K, get those that have the specified status
*
* @param status The status of the nodes to return
*
* @return A List of the closest nodes
*/
private List<Node> closestNodesNotFailed(Byte status)
{
List<Node> closestNodes = new ArrayList<>(this.config.k());
int remainingSpaces = this.config.k();
for (Map.Entry e : this.nodes.entrySet())
{
if (!FAILED.equals(e.getValue()))
{
if (status.equals(e.getValue()))
{
/* We got one with the required status, now add it */
closestNodes.add((Node) e.getKey());
}
if (--remainingSpaces == 0)
{
break;
}
}
}
return closestNodes;
}
@Override
public synchronized void receive(Message incoming, int comm) throws IOException, RoutingException
{
if (this.isContentFound)
{
return;
}
if (incoming instanceof ContentMessage)
{
/* The reply received is a content message with the required content, take it in */
ContentMessage msg = (ContentMessage) incoming;
/* Add the origin node to our routing table */
this.localNode.getRoutingTable().insert(msg.getOrigin());
/* Get the Content and check if it satisfies the required parameters */
JKademliaStorageEntry content = msg.getContent();
this.contentFound = content;
this.isContentFound = true;
}
else
{
/* The reply received is a NodeReplyMessage with nodes closest to the content needed */
NodeReplyMessage msg = (NodeReplyMessage) incoming;
/* Add the origin node to our routing table */
Node origin = msg.getOrigin();
this.localNode.getRoutingTable().insert(origin);
/* Set that we've completed ASKing the origin node */
this.nodes.put(origin, ASKED);
/* Remove this msg from messagesTransiting since it's completed now */
this.messagesTransiting.remove(comm);
/* Add the received nodes to the routeLengthChecker */
this.routeLengthChecker.addNodes(msg.getNodes(), origin);
/* Add the received nodes to our nodes list to query */
this.addNodes(msg.getNodes());
this.askNodesorFinish();
}
}
/**
* A node does not respond or a packet was lost, we set this node as failed
*
* @param comm
*
* @throws java.io.IOException
*/
@Override
public synchronized void timeout(int comm) throws IOException
{
/* Get the node associated with this communication */
Node n = this.messagesTransiting.get(new Integer(comm));
if (n == null)
{
throw new UnknownMessageException("Unknown comm: " + comm);
}
/* Mark this node as failed and inform the routing table that it's unresponsive */
this.nodes.put(n, FAILED);
this.localNode.getRoutingTable().setUnresponsiveContact(n);
this.messagesTransiting.remove(comm);
this.askNodesorFinish();
}
/**
* @return Whether the content was found or not.
*/
public boolean isContentFound()
{
return this.isContentFound;
}
/**
* @return The list of all content found during the lookup operation
*
* @throws kademlia.exceptions.ContentNotFoundException
*/
public JKademliaStorageEntry getContentFound() throws ContentNotFoundException
{
if (this.isContentFound)
{
return this.contentFound;
}
else
{
throw new ContentNotFoundException("No Value was found for the given key.");
}
}
/**
* @return How many hops it took in order to get to the content.
*/
public int routeLength()
{
return this.routeLengthChecker.getRouteLength();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for GlobalOperations.Wait. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.WaitGlobalOperationRequest}
*/
public final class WaitGlobalOperationRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.WaitGlobalOperationRequest)
WaitGlobalOperationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use WaitGlobalOperationRequest.newBuilder() to construct.
private WaitGlobalOperationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WaitGlobalOperationRequest() {
operation_ = "";
project_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WaitGlobalOperationRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private WaitGlobalOperationRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 416721722:
{
java.lang.String s = input.readStringRequireUtf8();
operation_ = s;
break;
}
case 1820481738:
{
java.lang.String s = input.readStringRequireUtf8();
project_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_WaitGlobalOperationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_WaitGlobalOperationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.WaitGlobalOperationRequest.class,
com.google.cloud.compute.v1.WaitGlobalOperationRequest.Builder.class);
}
public static final int OPERATION_FIELD_NUMBER = 52090215;
private volatile java.lang.Object operation_;
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The operation.
*/
@java.lang.Override
public java.lang.String getOperation() {
java.lang.Object ref = operation_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
operation_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for operation.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOperationBytes() {
java.lang.Object ref = operation_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
operation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
private volatile java.lang.Object project_;
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(operation_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 52090215, operation_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(operation_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(52090215, operation_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.WaitGlobalOperationRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.WaitGlobalOperationRequest other =
(com.google.cloud.compute.v1.WaitGlobalOperationRequest) obj;
if (!getOperation().equals(other.getOperation())) return false;
if (!getProject().equals(other.getProject())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + OPERATION_FIELD_NUMBER;
hash = (53 * hash) + getOperation().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.WaitGlobalOperationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for GlobalOperations.Wait. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.WaitGlobalOperationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.WaitGlobalOperationRequest)
com.google.cloud.compute.v1.WaitGlobalOperationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_WaitGlobalOperationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_WaitGlobalOperationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.WaitGlobalOperationRequest.class,
com.google.cloud.compute.v1.WaitGlobalOperationRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.WaitGlobalOperationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
operation_ = "";
project_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_WaitGlobalOperationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.WaitGlobalOperationRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.WaitGlobalOperationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.WaitGlobalOperationRequest build() {
com.google.cloud.compute.v1.WaitGlobalOperationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.WaitGlobalOperationRequest buildPartial() {
com.google.cloud.compute.v1.WaitGlobalOperationRequest result =
new com.google.cloud.compute.v1.WaitGlobalOperationRequest(this);
result.operation_ = operation_;
result.project_ = project_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.WaitGlobalOperationRequest) {
return mergeFrom((com.google.cloud.compute.v1.WaitGlobalOperationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.WaitGlobalOperationRequest other) {
if (other == com.google.cloud.compute.v1.WaitGlobalOperationRequest.getDefaultInstance())
return this;
if (!other.getOperation().isEmpty()) {
operation_ = other.operation_;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.WaitGlobalOperationRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.compute.v1.WaitGlobalOperationRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object operation_ = "";
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The operation.
*/
public java.lang.String getOperation() {
java.lang.Object ref = operation_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
operation_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for operation.
*/
public com.google.protobuf.ByteString getOperationBytes() {
java.lang.Object ref = operation_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
operation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The operation to set.
* @return This builder for chaining.
*/
public Builder setOperation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearOperation() {
operation_ = getDefaultInstance().getOperation();
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the Operations resource to return.
* </pre>
*
* <code>string operation = 52090215 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for operation to set.
* @return This builder for chaining.
*/
public Builder setOperationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
operation_ = value;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.WaitGlobalOperationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.WaitGlobalOperationRequest)
private static final com.google.cloud.compute.v1.WaitGlobalOperationRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.WaitGlobalOperationRequest();
}
public static com.google.cloud.compute.v1.WaitGlobalOperationRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WaitGlobalOperationRequest> PARSER =
new com.google.protobuf.AbstractParser<WaitGlobalOperationRequest>() {
@java.lang.Override
public WaitGlobalOperationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new WaitGlobalOperationRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<WaitGlobalOperationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WaitGlobalOperationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.WaitGlobalOperationRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*******************************************************************************
* Copyright 2013 Marcel Walch, Florian Schaub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.uulm.graphicalpasswords.openpassgo;
import de.uulm.graphicalpasswords.R;
import android.os.Bundle;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.support.v4.app.NavUtils;
import android.annotation.TargetApi;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceManager;
public class PassGoCreatePasswordActivity extends Activity implements PassGo {
private PatternView patternView;
private Button save;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pass_go_create_password);
SharedPreferences sharedPref = PreferenceManager
.getDefaultSharedPreferences(this);
int length = Integer.parseInt(sharedPref
.getString("passgo_length", "6"));
patternView = (PatternView) findViewById(R.id.passgo_patternview);
patternView.setActivity((PassGo) this);
patternView.setLength(length);
save = (Button) findViewById(R.id.passgo_save);
setReady(false);
Bundle bundle = new Bundle();
bundle.putInt("length", length);
DialogFragment intro = new IntroDialogFragment();
intro.setArguments(bundle);
intro.show(getFragmentManager(), "intro");
// Show the Up button in the action bar.
setupActionBar();
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void setupActionBar() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.pass_go_create_password, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
public void clearAll(View view) {
DialogFragment dialog = new DeletePatternDialogFragment();
dialog.show(getFragmentManager(), "delete");
}
public void clearAll() {
patternView.clear();
}
public void submit(View view) {
SharedPreferences sharedPref = PreferenceManager
.getDefaultSharedPreferences(this);
SharedPreferences.Editor edit = sharedPref.edit();
edit.putString("passgo_pw", patternView.getInput().toString());
edit.commit();
DialogFragment dialog = new RememberPasswordDialogFragment();
dialog.show(getFragmentManager(), "remember");
}
public static class IntroDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
String message = getString(
R.string.msg_dialog_pass_go_createpw,
getArguments().getInt("length"));
builder.setTitle(R.string.title_dialog_pass_go_createpw)
.setMessage(message)
.setNeutralButton(R.string.btn_ok, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
return builder.create();
}
}
public static class DeletePatternDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setMessage(
R.string.msg_dialog_pass_go_deletepattern)
.setCancelable(false)
.setPositiveButton(R.string.btn_yes, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
((PassGoCreatePasswordActivity) getActivity())
.clearAll();
}
})
.setNegativeButton(R.string.btn_no, new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
return builder.create();
}
}
public static class RememberPasswordDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
LayoutInflater layoutInflater = (LayoutInflater) getActivity()
.getSystemService(LAYOUT_INFLATER_SERVICE);
View layout = layoutInflater.inflate(
R.layout.passgo_rememberpw_dialog,
(ViewGroup) getActivity().findViewById(
R.id.passgo_rememberpw_layout));
PatternView patternView = ((PassGoCreatePasswordActivity) getActivity()).patternView;
DialogPatternView patternViewDialog = (DialogPatternView) layout
.findViewById(R.id.passgo_rememberpw_patternView);
patternViewDialog.setFixedPath(patternView.getFixedPath());
patternViewDialog.setDotPath(patternView.getDotPath());
patternViewDialog.setOriginalDimensions(patternView.getWidth(),
patternView.getHeight());
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setView(layout)
.setTitle(R.string.title_dialog_pass_go_rememberpw)
.setPositiveButton(R.string.btn_done,
new OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.dismiss();
getActivity().finish();
}
});
return builder.create();
}
}
public void setReady(boolean ready) {
save.setClickable(ready);
save.setEnabled(ready);
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p4beta1/image_annotator.proto
package com.google.cloud.vision.v1p4beta1;
/**
*
*
* <pre>
* Multiple async file annotation requests are batched into a single service
* call.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest}
*/
public final class AsyncBatchAnnotateFilesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)
AsyncBatchAnnotateFilesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use AsyncBatchAnnotateFilesRequest.newBuilder() to construct.
private AsyncBatchAnnotateFilesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AsyncBatchAnnotateFilesRequest() {
requests_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AsyncBatchAnnotateFilesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private AsyncBatchAnnotateFilesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
requests_ =
new java.util.ArrayList<
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest>();
mutable_bitField0_ |= 0x00000001;
}
requests_.add(
input.readMessage(
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.parser(),
extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
requests_ = java.util.Collections.unmodifiableList(requests_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_AsyncBatchAnnotateFilesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_AsyncBatchAnnotateFilesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.class,
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.Builder.class);
}
public static final int REQUESTS_FIELD_NUMBER = 1;
private java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest> requests_;
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest>
getRequestsList() {
return requests_;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder>
getRequestsOrBuilderList() {
return requests_;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getRequestsCount() {
return requests_.size();
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest getRequests(int index) {
return requests_.get(index);
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder getRequestsOrBuilder(
int index) {
return requests_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < requests_.size(); i++) {
output.writeMessage(1, requests_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < requests_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, requests_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest other =
(com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest) obj;
if (!getRequestsList().equals(other.getRequestsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRequestsCount() > 0) {
hash = (37 * hash) + REQUESTS_FIELD_NUMBER;
hash = (53 * hash) + getRequestsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Multiple async file annotation requests are batched into a single service
* call.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_AsyncBatchAnnotateFilesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_AsyncBatchAnnotateFilesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.class,
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.Builder.class);
}
// Construct using com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRequestsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (requestsBuilder_ == null) {
requests_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
requestsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_AsyncBatchAnnotateFilesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest
getDefaultInstanceForType() {
return com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest build() {
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest buildPartial() {
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest result =
new com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest(this);
int from_bitField0_ = bitField0_;
if (requestsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
requests_ = java.util.Collections.unmodifiableList(requests_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.requests_ = requests_;
} else {
result.requests_ = requestsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest) {
return mergeFrom((com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest other) {
if (other
== com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest.getDefaultInstance())
return this;
if (requestsBuilder_ == null) {
if (!other.requests_.isEmpty()) {
if (requests_.isEmpty()) {
requests_ = other.requests_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRequestsIsMutable();
requests_.addAll(other.requests_);
}
onChanged();
}
} else {
if (!other.requests_.isEmpty()) {
if (requestsBuilder_.isEmpty()) {
requestsBuilder_.dispose();
requestsBuilder_ = null;
requests_ = other.requests_;
bitField0_ = (bitField0_ & ~0x00000001);
requestsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRequestsFieldBuilder()
: null;
} else {
requestsBuilder_.addAllMessages(other.requests_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest> requests_ =
java.util.Collections.emptyList();
private void ensureRequestsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
requests_ =
new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest>(
requests_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder>
requestsBuilder_;
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest>
getRequestsList() {
if (requestsBuilder_ == null) {
return java.util.Collections.unmodifiableList(requests_);
} else {
return requestsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getRequestsCount() {
if (requestsBuilder_ == null) {
return requests_.size();
} else {
return requestsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest getRequests(int index) {
if (requestsBuilder_ == null) {
return requests_.get(index);
} else {
return requestsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRequests(
int index, com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.set(index, value);
onChanged();
} else {
requestsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRequests(
int index,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.set(index, builderForValue.build());
onChanged();
} else {
requestsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.add(value);
onChanged();
} else {
requestsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
int index, com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.add(index, value);
onChanged();
} else {
requestsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.add(builderForValue.build());
onChanged();
} else {
requestsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
int index,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.add(index, builderForValue.build());
onChanged();
} else {
requestsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllRequests(
java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest>
values) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, requests_);
onChanged();
} else {
requestsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRequests() {
if (requestsBuilder_ == null) {
requests_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
requestsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeRequests(int index) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.remove(index);
onChanged();
} else {
requestsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder getRequestsBuilder(
int index) {
return getRequestsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder getRequestsOrBuilder(
int index) {
if (requestsBuilder_ == null) {
return requests_.get(index);
} else {
return requestsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<
? extends com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder>
getRequestsOrBuilderList() {
if (requestsBuilder_ != null) {
return requestsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(requests_);
}
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder addRequestsBuilder() {
return getRequestsFieldBuilder()
.addBuilder(
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder addRequestsBuilder(
int index) {
return getRequestsFieldBuilder()
.addBuilder(
index,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. Individual async file annotation requests for this batch.
* </pre>
*
* <code>
* repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest requests = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder>
getRequestsBuilderList() {
return getRequestsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder>
getRequestsFieldBuilder() {
if (requestsBuilder_ == null) {
requestsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder,
com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder>(
requests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
requests_ = null;
}
return requestsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest)
private static final com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest();
}
public static com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AsyncBatchAnnotateFilesRequest> PARSER =
new com.google.protobuf.AbstractParser<AsyncBatchAnnotateFilesRequest>() {
@java.lang.Override
public AsyncBatchAnnotateFilesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AsyncBatchAnnotateFilesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AsyncBatchAnnotateFilesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AsyncBatchAnnotateFilesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package edu.washington.nsre.crawl;
import com.google.gson.Gson;
import de.l3s.boilerpipe.extractors.ArticleExtractor;
import org.xml.sax.InputSource;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
class BroilNews {
public Date date;
public String title;
public String querytitle;
public String url;
public String desc;
public String text;
public String source;
public BroilNews(HtmlNews hn) {
this.date = hn.date;
this.title = hn.title;
this.querytitle = hn.querytitle;
this.url = hn.url;
this.desc = hn.desc;
this.source = hn.source;
}
}
class HtmlNews {
Date date;
String title;
String querytitle;
String url;
String desc;
String html;
String source;
public HtmlNews(BingNews bn) {
this.date = bn.date;
this.title = bn.title;
this.querytitle = bn.querytitle;
this.url = bn.url;
this.desc = bn.desc;
this.source = bn.source;
}
}
public class CrawlHtmlWithBroil {
public static String normTitle(String title) {
String ret = title;
{
int temp = ret.lastIndexOf("-");
if (temp > 0) {
ret = ret.substring(0, temp);
}
}
{
int temp = ret.indexOf(":");
if (temp > 0) {
String head = ret.substring(0, temp);
if (!head.contains(" ")) {
ret = ret.substring(temp + 1);
}
}
}
{
int temp = ret.indexOf("http");
if (temp > 0) {
ret = ret.substring(0, temp);
}
}
return ret;
}
public static void main(String[] args) {
String root = args[0];
boolean restart = false;
if (args.length > 1) {
restart = Boolean.parseBoolean(args[1]);
}
Gson gson = new Gson();
String readfrom = "bing";
String outputto = "broil";
String index = "index_html";
{
File f = new File(root + File.separator + outputto);
if (!f.exists()) {
f.mkdir();
}
}
HashSet<String> crawledurl = new HashSet<String>();
{
List<String> already_list = new ArrayList<String>();
Util.leafFiles(root + File.separator + index, already_list);
for (String f : already_list) {
try {
BufferedReader br = new BufferedReader(
new InputStreamReader(new FileInputStream(f),
"utf-8"));
String l;
while ((l = br.readLine()) != null) {
try {
crawledurl.add(l);
} catch (Exception e) {
}
}
br.close();
} catch (Exception e) {
// e.printStackTrace();
}
}
}
System.err.println("crawledurl\t" + crawledurl.size());
while (true) {
int target = 0;
try {
System.err.println("run again!");
Date date = new Date();
SimpleDateFormat dateformatYYYYMMDD = new SimpleDateFormat(
"yyyyMMdd");
String today = dateformatYYYYMMDD.format(date);
String outputdir = root + File.separator + outputto
+ File.separator + today;
String output_index_dir = root + File.separator + index
+ File.separator + today;
if (!new File(outputdir).exists()) {
new File(outputdir).mkdirs();
}
if (!new File(output_index_dir).exists()) {
new File(output_index_dir).mkdirs();
}
List<String> inputs = new ArrayList<String>();
Util.leafFiles(root + File.separator + readfrom, inputs);
List<BingNews> buffer = new ArrayList<BingNews>();
for (String f : inputs) {
try {
BufferedReader br = new BufferedReader(
new InputStreamReader(new FileInputStream(f),
"utf-8"));
String l;
while ((l = br.readLine()) != null) {
try {
BingNews on = gson.fromJson(l, BingNews.class);
if (!crawledurl.contains(on.url)) {
buffer.add(on);
crawledurl.add(on.url);
target++;
}
} catch (Exception e) {
// e.printStackTrace();
}
}
br.close();
} catch (Exception e) {
// e.printStackTrace();
}
}
date = new Date();
System.err.println("buffer size:" + buffer.size());
BufferedWriter bw = null;
BufferedWriter bw_index = null;
for (int i = 0; i < buffer.size(); i++) {
if (i % 10000 == 0) {
if (bw != null) {
bw.close();
// bw_index.close();
}
bw = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(outputdir + File.separator
+ date.getTime()), "utf-8"));
bw_index = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(output_index_dir
+ File.separator + date.getTime()),
"utf-8"));
}
try {
BingNews on = buffer.get(i);
System.err.println(on.url + "\t" + on.title + "\t"
+ (new Date()).toString() + "\t" + i + "\t"
+ target);
HtmlNews hn = new HtmlNews(on);
// BroilNews broilnews = new BroilNews(on);
try {
hn.html = Util.readUrl(hn.url);
BroilNews bn = new BroilNews(hn);
crawledurl.add(on.url);
String html = hn.html;
String text = "";
try {
InputStream is = new ByteArrayInputStream(
html.getBytes("utf-8"));
text = ArticleExtractor.INSTANCE
.getText(new InputSource(is));
} catch (Exception e) {
e.printStackTrace();
}
bn.text = text;
bw.write(gson.toJson(bn) + "\n");
bw_index.write(hn.url + "\n");
System.err.println(bn.title + "\t"
+ (new Date()).toString());
bw.flush();
} catch (Exception e) {
hn.html = "";
// e.printStackTrace();
}
// bw.write(gson.toJson(hn) + "\n");
//
} catch (Exception e) {
// e.printStackTrace();
}
}
// if (bw != null) {
// bw.close();
// bw_index.close();
// }
} catch (Exception e) {
e.printStackTrace();
}
try {
System.err.println("start sleeping!\t" + target);
Thread.sleep(3600 * 1000);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
| |
/* JAT: Java Astrodynamics Toolkit
*
* Copyright (c) 2003 The JAT Project. All rights reserved.
*
* This file is part of JAT. JAT is free software; you can
* redistribute it and/or modify it under the terms of the
* NASA Open Source Agreement, version 1.3 or later.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* NASA Open Source Agreement for more details.
*
* You should have received a copy of the NASA Open Source Agreement
* along with this program; if not, write to the NASA Goddard
* Space Flight Center at opensource@gsfc.nasa.gov.
*
*
* File Created on Sep 25, 2003
*/
package jat.gps.filters.absolute;
import jat.matvec.data.*;
import jat.alg.estimators.*;
import jat.alg.integrators.*;
//import jat.cm.*;
import jat.gps.*;
import jat.gps.filters.*;
//import jat.gps_ins.*;
import jat.forces.*;
import jat.timeRef.*;
/**
* The GPS_EOM provides the equations of motion for the
* GPS-only EKF without a thruster model.
*
* @author <a href="mailto:dgaylor@users.sourceforge.net">Dave Gaylor
* @version 1.0
*/
public class GPS_EOM implements Derivatives {
private IonoModel iono;
private ReceiverFilterModel rcvr;
private double t_mjd0 = 51969.0;
private double stsMass = 104328.0;
private double stsArea = 454.4;
private double stsCd = 2.0;
private CIRA_ExponentialDrag sts_ced = new CIRA_ExponentialDrag(this.stsCd, this.stsArea, this.stsMass);
private URE_Model ure;
private int nsv;
private int numberOfStates;
/**
* Constructor
* @param nsat number of GPS SVs
* @param nstates number of states
* @param io IonoModel
* @param rc ReceiverFilterModel
* @param ur URE_Model
*/
public GPS_EOM(int nsat, int nstates, IonoModel io, ReceiverFilterModel rc, URE_Model ur) {
this.nsv = nsat;
this.numberOfStates = nstates;
this.ure = ur;
this.iono = io;
this.rcvr = rc;
}
/**
* @see jat.alg.integrators.Derivatives#derivs(double, double[])
*/
public double[] derivs(double t, double[] x) {
int n = this.numberOfStates;
VectorN out = new VectorN(x.length);
// strip out the incoming data
VectorN r = new VectorN(x[0], x[1], x[2]);
VectorN v = new VectorN(x[3], x[4], x[5]);
EstSTM stm = new EstSTM(x, n);
Matrix phi = stm.phi();
// strip off clock states
VectorN clock1 = new VectorN(2);
clock1.set(0, x[6]);
clock1.set(1, x[7]);
double stsdrag = x[8];
double del_iono = x[9]; // iono state
// strip off incoming ure states
VectorN urevec = new VectorN(this.nsv);
for (int i = 0; i < this.nsv; i++) {
urevec.set(i, x[i+10]);
}
// position derivatives
out.set(0, v);
// velocity derivatives
J2Gravity j2chaser = new J2Gravity(r);
VectorN g = j2chaser.local_gravity();
double Mjd = this.t_mjd0 + t/86400.0;
EarthRef ref = new EarthRef(Mjd);
sts_ced.compute(ref, r, v);
VectorN sts_drag0 = sts_ced.dragAccel();
double dragfactor1 = 1.0 + stsdrag;
VectorN drag = sts_drag0.times(dragfactor1);
VectorN vdot = g.plus(drag);
out.set(3, vdot);
// GPS clock model derivatives
VectorN bcdot = rcvr.biasProcess(clock1);
out.set(6, bcdot);
double dragdot = DragProcessModel.dragProcess(stsdrag);
out.set(8, dragdot);
// iono derivs
double ionodot = iono.ionoProcess(del_iono);
out.set(9, ionodot);
//ure derivs
VectorN uredot = ure.ureProcess(urevec);
out.set(10, uredot);
// integer ambiguity derivs = 0
// A matrix
Matrix A = new Matrix(n, n);
// position rows
Matrix eye = new Matrix(3);
A.setMatrix(0, 3, eye);
// velocity rows
Matrix G = j2chaser.gravityGradient();
Matrix D = sts_ced.partialR().times(dragfactor1);
Matrix GD = G.plus(D);
A.setMatrix(3, 0, GD);
D = sts_ced.partialV().times(dragfactor1);
A.setMatrix(3, 3, D);
// partials of drag accel wrt drag state
A.set(3, 8, sts_drag0.x[0]);
A.set(4, 8, sts_drag0.x[1]);
A.set(5, 8, sts_drag0.x[2]);
//clock drift row
A.set(6, 7, 1.0);
// drag rows
double tau_drag = -1.0/DragProcessModel.correlationTime;
A.set(8, 8, tau_drag);
// iono row
double tau_iono = -1.0/iono.correlationTime;
A.set(9, 9, tau_iono);
// ure part
Matrix bigeye = new Matrix(this.nsv);
Matrix tau_ure = eye.times(-1.0/URE_Model.correlationTime);
A.setMatrix(10, 10, tau_ure);
// phi derivatives
Matrix phidot = A.times(phi);
// put phi derivatives into output array
int k = n;
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
out.x[k] = phidot.A[i][j];
k = k + 1;
}
}
return out.x;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote;
import org.apache.camel.spi.CamelLogger;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StopWatch;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.TimeUtils;
import org.apache.commons.net.io.CopyStreamEvent;
import org.apache.commons.net.io.CopyStreamListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DefaultFtpClientActivityListener implements FtpClientActivityListener, CopyStreamListener {
private static final Logger LOG = LoggerFactory.getLogger(FtpClientActivityListener.class);
private final CamelLogger logger;
private final String host;
private final FtpEndpoint endpoint;
private boolean download = true;
private boolean resume;
private long resumeOffset;
private String fileName;
private long fileSize;
private String fileSizeText;
private String lastLogActivity;
private String lastVerboseLogActivity;
private long lastLogActivityTimestamp = -1;
private long lastVerboseLogActivityTimestamp = -1;
private long transferredBytes;
private final StopWatch watch = new StopWatch();
private final StopWatch interval = new StopWatch();
public DefaultFtpClientActivityListener(FtpEndpoint endpoint, String host) {
this.logger = new CamelLogger(LOG);
this.endpoint = endpoint;
this.host = host;
}
@Override
public void setDownload(boolean download) {
this.download = download;
}
@Override
public void setRemoteFileName(String fileName) {
this.fileName = fileName;
}
@Override
public void setRemoteFileSize(long fileSize) {
this.fileSize = fileSize;
this.fileSizeText = StringHelper.humanReadableBytes(fileSize);
}
@Override
public String getLastLogActivity() {
return lastLogActivity;
}
@Override
public long getLastLogActivityTimestamp() {
return lastLogActivityTimestamp;
}
@Override
public String getLastVerboseLogActivity() {
return lastVerboseLogActivity;
}
@Override
public long getLastVerboseLogActivityTimestamp() {
return lastVerboseLogActivityTimestamp;
}
@Override
public void onGeneralError(String host, String errorMessage) {
doLogVerbose("General error when communicating with host: " + host + " error: " + errorMessage);
}
@Override
public void onConnecting(String host) {
doLogVerbose("Connecting to host: " + host);
}
@Override
public void onConnected(String host) {
doLogVerbose("Connected to host: " + host);
}
@Override
public void onLogin(String host) {
doLogVerbose("Login on host: " + host);
}
@Override
public void onLoginComplete(String host) {
doLogVerbose("Login on host: " + host + " complete");
}
@Override
public void onLoginFailed(int replyCode, String replyMessage) {
doLogVerbose("Login on host: " + host + " failed (code: " + replyCode + ", message: " + replyMessage + ")");
}
@Override
public void onDisconnecting(String host) {
doLogVerbose("Disconnecting from host: " + host);
}
@Override
public void onDisconnected(String host) {
doLogVerbose("Disconnected from host: " + host);
}
@Override
public void onScanningForFiles(String host, String directory) {
if (ObjectHelper.isEmpty(directory)) {
doLogVerbose("Scanning for new files to download from host: " + host);
} else {
doLogVerbose("Scanning for new files to download from host: " + host + " in directory: " + directory);
}
}
@Override
public void onBeginDownloading(String host, String file) {
download = true;
resume = false;
resumeOffset = 0;
watch.restart();
interval.restart();
String msg = "Downloading from host: " + host + " file: " + file + " starting "; // add
// extra
// space
// to
// align
// with
// completed
if (fileSize > 0) {
msg += " (size: " + fileSizeText + ")";
}
doLog(msg);
}
@Override
public void onResumeDownloading(String host, String file, long position) {
download = true;
resume = true;
resumeOffset = position;
watch.restart();
interval.restart();
String msg = "Resume downloading from host: " + host + " file: " + file + " at position: " + position + " bytes/"
+ StringHelper.humanReadableBytes(position);
if (fileSize > 0) {
float percent = ((float) resumeOffset / (float) fileSize) * 100L;
String num = String.format("%.1f", percent);
msg += "/" + num + "% (size: " + fileSizeText + ")";
}
doLog(msg);
}
@Override
public void onDownload(String host, String file, long chunkSize, long totalChunkSize, long fileSize) {
totalChunkSize = totalChunkSize + resumeOffset;
transferredBytes = totalChunkSize;
String prefix = resume ? "Resume downloading" : "Downloading";
String msg
= prefix + " from host: " + host + " file: " + file + " chunk (" + chunkSize + "/" + totalChunkSize + " bytes)";
if (fileSize > 0) {
float percent = ((float) totalChunkSize / (float) fileSize) * 100L;
String num = String.format("%.1f", percent);
// avoid 100.0 as its only done when we get the onDownloadComplete
if (totalChunkSize < fileSize && "100.0".equals(num)) {
num = "99.9";
}
String size = StringHelper.humanReadableBytes(totalChunkSize);
msg += " (progress: " + size + "/" + num + "%)";
} else {
// okay we do not know the total size, but then make what we have
// download so-far human readable
String size = StringHelper.humanReadableBytes(totalChunkSize);
msg += " (downloaded: " + size + ")";
}
doLogVerbose(msg);
// however if the operation is slow then log once in a while
if (interval.taken() > endpoint.getTransferLoggingIntervalSeconds() * 1000) {
doLog(msg);
interval.restart();
}
}
@Override
public void onDownloadComplete(String host, String file) {
String prefix = resume ? "Resume downloading" : "Downloading";
String msg = prefix + " from host: " + host + " file: " + file + " completed";
if (transferredBytes > 0) {
msg += " (size: " + StringHelper.humanReadableBytes(transferredBytes) + ")";
}
long taken = watch.taken();
String time = TimeUtils.printDuration(taken);
msg += " (took: " + time + ")";
doLog(msg);
}
@Override
public void onBeginUploading(String host, String file) {
download = false;
watch.restart();
interval.restart();
String msg = "Uploading to host: " + host + " file: " + file + " starting";
if (fileSize > 0) {
msg += " (size: " + fileSizeText + ")";
}
doLog(msg);
}
@Override
public void onUpload(String host, String file, long chunkSize, long totalChunkSize, long fileSize) {
transferredBytes = totalChunkSize;
String msg
= "Uploading to host: " + host + " file: " + file + " chunk (" + chunkSize + "/" + totalChunkSize + " bytes)";
if (fileSize > 0) {
float percent = ((float) totalChunkSize / (float) fileSize) * 100L;
String num = String.format("%.1f", percent);
// avoid 100.0 as its only done when we get the onUploadComplete
if (totalChunkSize < fileSize && "100.0".equals(num)) {
num = "99.9";
}
String size = StringHelper.humanReadableBytes(totalChunkSize);
msg += " (progress: " + size + "/" + num + "%)";
} else {
// okay we do not know the total size, but then make what we have
// uploaded so-far human readable
String size = StringHelper.humanReadableBytes(totalChunkSize);
msg += " (uploaded: " + size + ")";
}
// each chunk is verbose
doLogVerbose(msg);
// however if the operation is slow then log once in a while
if (interval.taken() > endpoint.getTransferLoggingIntervalSeconds() * 1000) {
doLog(msg);
interval.restart();
}
}
@Override
public void onUploadComplete(String host, String file) {
String msg = "Uploading to host: " + host + " file: " + file + " completed";
if (transferredBytes > 0) {
msg += " (size: " + StringHelper.humanReadableBytes(transferredBytes) + ")";
}
long taken = watch.taken();
String time = TimeUtils.printDuration(taken);
msg += " (took: " + time + ")";
doLog(msg);
}
@Override
public void bytesTransferred(CopyStreamEvent event) {
// not in use
}
@Override
public void bytesTransferred(long totalBytesTransferred, int bytesTransferred, long streamSize) {
// stream size is always -1, so use pre-calculated fileSize instead
if (download) {
onDownload(host, fileName, bytesTransferred, totalBytesTransferred, fileSize);
} else {
onUpload(host, fileName, bytesTransferred, totalBytesTransferred, fileSize);
}
}
protected void doLog(String message) {
lastLogActivity = message;
lastLogActivityTimestamp = System.currentTimeMillis();
// verbose implies regular log as well
lastVerboseLogActivity = lastLogActivity;
lastVerboseLogActivityTimestamp = lastLogActivityTimestamp;
logger.log(message, endpoint.getTransferLoggingLevel());
}
protected void doLogVerbose(String message) {
lastVerboseLogActivity = message;
lastVerboseLogActivityTimestamp = System.currentTimeMillis();
if (endpoint.isTransferLoggingVerbose()) {
logger.log(message, endpoint.getTransferLoggingLevel());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.pageStore;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.pageStore.PageWindowManager.PageWindow;
import org.apache.wicket.util.file.Files;
import org.apache.wicket.util.io.IOUtils;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.lang.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A data store implementation which stores the data on disk (in a file system)
*/
public class DiskDataStore implements IDataStore
{
private static final Logger log = LoggerFactory.getLogger(DiskDataStore.class);
private static final String INDEX_FILE_NAME = "DiskDataStoreIndex";
private final String applicationName;
private final Bytes maxSizePerPageSession;
private final File fileStoreFolder;
private final ConcurrentMap<String, SessionEntry> sessionEntryMap;
/**
* Construct.
*
* @param applicationName
* @param fileStoreFolder
* @param maxSizePerSession
*/
public DiskDataStore(final String applicationName, final File fileStoreFolder,
final Bytes maxSizePerSession)
{
this.applicationName = applicationName;
this.fileStoreFolder = fileStoreFolder;
maxSizePerPageSession = Args.notNull(maxSizePerSession, "maxSizePerSession");
sessionEntryMap = new ConcurrentHashMap<String, SessionEntry>();
try
{
if (this.fileStoreFolder.exists() || this.fileStoreFolder.mkdirs())
{
loadIndex();
}
else
{
log.warn("Cannot create file store folder for some reason.");
}
}
catch (SecurityException e)
{
throw new WicketRuntimeException(
"SecurityException occurred while creating DiskDataStore. Consider using a non-disk based IDataStore implementation. "
+ "See org.apache.wicket.Application.setPageManagerProvider(IPageManagerProvider)",
e);
}
}
/**
* @see org.apache.wicket.pageStore.IDataStore#destroy()
*/
@Override
public void destroy()
{
log.debug("Destroying...");
saveIndex();
log.debug("Destroyed.");
}
/**
* @see org.apache.wicket.pageStore.IDataStore#getData(java.lang.String, int)
*/
@Override
public byte[] getData(final String sessionId, final int id)
{
byte[] pageData = null;
SessionEntry sessionEntry = getSessionEntry(sessionId, false);
if (sessionEntry != null)
{
pageData = sessionEntry.loadPage(id);
}
log.debug("Returning data{} for page with id '{}' in session with id '{}'", new Object[] {
pageData != null ? "" : "(null)", id, sessionId });
return pageData;
}
/**
* @see org.apache.wicket.pageStore.IDataStore#isReplicated()
*/
@Override
public boolean isReplicated()
{
return false;
}
/**
* @see org.apache.wicket.pageStore.IDataStore#removeData(java.lang.String, int)
*/
@Override
public void removeData(final String sessionId, final int id)
{
SessionEntry sessionEntry = getSessionEntry(sessionId, false);
if (sessionEntry != null)
{
log.debug("Removing data for page with id '{}' in session with id '{}'", new Object[] {
id, sessionId });
sessionEntry.removePage(id);
}
}
/**
* @see org.apache.wicket.pageStore.IDataStore#removeData(java.lang.String)
*/
@Override
public void removeData(final String sessionId)
{
SessionEntry sessionEntry = getSessionEntry(sessionId, false);
if (sessionEntry != null)
{
log.debug("Removing data for pages in session with id '{}'", sessionId);
synchronized (sessionEntry)
{
sessionEntryMap.remove(sessionEntry.sessionId);
sessionEntry.unbind();
}
}
}
/**
* @see org.apache.wicket.pageStore.IDataStore#storeData(java.lang.String, int, byte[])
*/
@Override
public void storeData(final String sessionId, final int id, final byte[] data)
{
SessionEntry sessionEntry = getSessionEntry(sessionId, true);
if (sessionEntry != null)
{
log.debug("Storing data for page with id '{}' in session with id '{}'", new Object[] {
id, sessionId });
sessionEntry.savePage(id, data);
}
}
/**
*
* @param sessionId
* @param create
* @return the session entry
*/
protected SessionEntry getSessionEntry(final String sessionId, final boolean create)
{
if (!create)
{
return sessionEntryMap.get(sessionId);
}
SessionEntry entry = new SessionEntry(this, sessionId);
SessionEntry existing = sessionEntryMap.putIfAbsent(sessionId, entry);
return existing != null ? existing : entry;
}
/**
* Load the index
*/
@SuppressWarnings("unchecked")
private void loadIndex()
{
File storeFolder = getStoreFolder();
File index = new File(storeFolder, INDEX_FILE_NAME);
if (index.exists() && index.length() > 0)
{
try
{
InputStream stream = new FileInputStream(index);
ObjectInputStream ois = new ObjectInputStream(stream);
try
{
Map<String, SessionEntry> map = (Map<String, SessionEntry>)ois.readObject();
sessionEntryMap.clear();
sessionEntryMap.putAll(map);
for (Entry<String, SessionEntry> entry : sessionEntryMap.entrySet())
{
// initialize the diskPageStore reference
SessionEntry sessionEntry = entry.getValue();
sessionEntry.diskDataStore = this;
}
} finally {
stream.close();
ois.close();
}
}
catch (Exception e)
{
log.error("Couldn't load DiskDataStore index from file " + index + ".", e);
}
}
Files.remove(index);
}
/**
*
*/
private void saveIndex()
{
File storeFolder = getStoreFolder();
if (storeFolder.exists())
{
File index = new File(storeFolder, INDEX_FILE_NAME);
Files.remove(index);
try
{
OutputStream stream = new FileOutputStream(index);
ObjectOutputStream oos = new ObjectOutputStream(stream);
try
{
Map<String, SessionEntry> map = new HashMap<String, SessionEntry>(
sessionEntryMap.size());
for (Entry<String, SessionEntry> e : sessionEntryMap.entrySet())
{
if (e.getValue().unbound == false)
{
map.put(e.getKey(), e.getValue());
}
}
oos.writeObject(map);
} finally {
stream.close();
oos.close();
}
}
catch (Exception e)
{
log.error("Couldn't write DiskDataStore index to file " + index + ".", e);
}
}
}
/**
*
*/
protected static class SessionEntry implements Serializable
{
private static final long serialVersionUID = 1L;
private final String sessionId;
private transient DiskDataStore diskDataStore;
private String fileName;
private PageWindowManager manager;
private boolean unbound = false;
protected SessionEntry(DiskDataStore diskDataStore, String sessionId)
{
this.diskDataStore = diskDataStore;
this.sessionId = sessionId;
}
public PageWindowManager getManager()
{
if (manager == null)
{
manager = new PageWindowManager(diskDataStore.maxSizePerPageSession.bytes());
}
return manager;
}
private String getFileName()
{
if (fileName == null)
{
fileName = diskDataStore.getSessionFileName(sessionId, true);
}
return fileName;
}
/**
* @return session id
*/
public String getSessionId()
{
return sessionId;
}
/**
* Saves the serialized page to appropriate file.
*
* @param pageId
* @param data
*/
public synchronized void savePage(int pageId, byte data[])
{
if (unbound)
{
return;
}
// only save page that has some data
if (data != null)
{
// allocate window for page
PageWindow window = getManager().createPageWindow(pageId, data.length);
FileChannel channel = getFileChannel(true);
if (channel != null)
{
try
{
// write the content
channel.write(ByteBuffer.wrap(data), window.getFilePartOffset());
}
catch (IOException e)
{
log.error("Error writing to a channel " + channel, e);
}
finally
{
IOUtils.closeQuietly(channel);
}
}
else
{
log.warn(
"Cannot save page with id '{}' because the data file cannot be opened.",
pageId);
}
}
}
/**
* Removes the page from pagemap file.
*
* @param pageId
*/
public synchronized void removePage(int pageId)
{
if (unbound)
{
return;
}
getManager().removePage(pageId);
}
/**
* Loads the part of pagemap file specified by the given PageWindow.
*
* @param window
* @return serialized page data
*/
public byte[] loadPage(PageWindow window)
{
byte[] result = null;
FileChannel channel = getFileChannel(false);
if (channel != null)
{
ByteBuffer buffer = ByteBuffer.allocate(window.getFilePartSize());
try
{
channel.read(buffer, window.getFilePartOffset());
if (buffer.hasArray())
{
result = buffer.array();
}
}
catch (IOException e)
{
log.error("Error reading from file channel " + channel, e);
}
finally
{
IOUtils.closeQuietly(channel);
}
}
return result;
}
private FileChannel getFileChannel(boolean create)
{
FileChannel channel = null;
File file = new File(getFileName());
if (create || file.exists())
{
String mode = create ? "rw" : "r";
try
{
RandomAccessFile randomAccessFile = new RandomAccessFile(file, mode);
channel = randomAccessFile.getChannel();
}
catch (FileNotFoundException fnfx)
{
// can happen if the file is locked. WICKET-4176
log.error(fnfx.getMessage(), fnfx);
}
}
return channel;
}
/**
* Loads the specified page data.
*
* @param id
* @return page data or null if the page is no longer in pagemap file
*/
public synchronized byte[] loadPage(int id)
{
if (unbound)
{
return null;
}
byte[] result = null;
PageWindow window = getManager().getPageWindow(id);
if (window != null)
{
result = loadPage(window);
}
return result;
}
/**
* Deletes all files for this session.
*/
public synchronized void unbind()
{
File sessionFolder = diskDataStore.getSessionFolder(sessionId, false);
if (sessionFolder.exists())
{
Files.removeFolder(sessionFolder);
}
unbound = true;
}
}
/**
* Returns the file name for specified session. If the session folder (folder that contains the
* file) does not exist and createSessionFolder is true, the folder will be created.
*
* @param sessionId
* @param createSessionFolder
* @return file name for pagemap
*/
private String getSessionFileName(String sessionId, boolean createSessionFolder)
{
File sessionFolder = getSessionFolder(sessionId, createSessionFolder);
return new File(sessionFolder, "data").getAbsolutePath();
}
/**
* This folder contains sub-folders named as the session id for which they hold the data.
*
* @return the folder where the pages are stored
*/
protected File getStoreFolder()
{
return new File(fileStoreFolder, applicationName + "-filestore");
}
/**
* Returns the folder for the specified sessions. If the folder doesn't exist and the create
* flag is set, the folder will be created.
*
* @param sessionId
* @param create
* @return folder used to store session data
*/
protected File getSessionFolder(String sessionId, final boolean create)
{
File storeFolder = getStoreFolder();
sessionId = sessionId.replace('*', '_');
sessionId = sessionId.replace('/', '_');
sessionId = sessionId.replace(':', '_');
File sessionFolder = new File(storeFolder, sessionId);
if (create && sessionFolder.exists() == false)
{
Files.mkdirs(sessionFolder);
}
return sessionFolder;
}
@Override
public boolean canBeAsynchronous()
{
return true;
}
}
| |
// Modifications copyright (C) 2017, Baidu.com, Inc.
// Copyright 2017 The Apache Software Foundation
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.baidu.palo.catalog;
import com.baidu.palo.common.DdlException;
import com.baidu.palo.common.FeConstants;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.easymock.PowerMock;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
@PowerMockIgnore("org.apache.log4j.*")
@PrepareForTest(Catalog.class)
public class MysqlTableTest {
private List<Column> columns;
private Map<String, String> properties;
private Catalog catalog;
@Before
public void setUp() {
columns = Lists.newArrayList();
Column column = new Column("col1", PrimitiveType.BIGINT);
column.setIsKey(true);
columns.add(column);
properties = Maps.newHashMap();
properties.put("host", "127.0.0.1");
properties.put("port", "3306");
properties.put("user", "root");
properties.put("password", "root");
properties.put("database", "db");
properties.put("table", "tbl");
catalog = EasyMock.createMock(Catalog.class);
PowerMock.mockStatic(Catalog.class);
EasyMock.expect(Catalog.getInstance()).andReturn(catalog).anyTimes();
EasyMock.expect(Catalog.getCurrentCatalogJournalVersion()).andReturn(FeConstants.meta_version).anyTimes();
PowerMock.replay(Catalog.class);
}
@Test
public void testNormal() throws DdlException, IOException {
MysqlTable mysqlTable = new MysqlTable(1000, "mysqlTable", columns, properties);
Assert.assertEquals("tbl", mysqlTable.getMysqlTableName());
String dirString = "mysqlTableFamilyGroup";
File dir = new File(dirString);
if (!dir.exists()) {
dir.mkdir();
} else {
File[] files = dir.listFiles();
for (File file : files) {
if (file.isFile()) {
file.delete();
}
}
}
File file = new File(dir, "image");
file.createNewFile();
DataOutputStream dos = new DataOutputStream(new FileOutputStream(file));
mysqlTable.write(dos);
dos.close();
DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
MysqlTable table1 = (MysqlTable) Table.read(dis);
Assert.assertEquals(mysqlTable.toThrift(), table1.toThrift());
dis.close();
dir = new File(dirString);
if (dir.exists()) {
File[] files = dir.listFiles();
for (File aFile : files) {
if (aFile.isFile()) {
aFile.delete();
}
}
dir.delete();
}
}
@Test(expected = DdlException.class)
public void testNoHost() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("host")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoPort() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("port")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testPortNotNumber() throws DdlException {
Map<String, String> pro = Maps.transformEntries(properties,
new Maps.EntryTransformer<String, String, String>() {
@Override
public String transformEntry(String s, String s2) {
if (s.equalsIgnoreCase("port")) {
return "abc";
}
return s2;
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoUser() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("user")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoPass() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("password")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoDb() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("database")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoTbl() throws DdlException {
Map<String, String> pro = Maps.filterKeys(properties, new Predicate<String>() {
@Override
public boolean apply(String s) {
if (s.equalsIgnoreCase("table")) {
return false;
} else {
return true;
}
}
});
new MysqlTable(1000, "mysqlTable", columns, pro);
Assert.fail("No exception throws.");
}
@Test(expected = DdlException.class)
public void testNoPro() throws DdlException {
new MysqlTable(1000, "mysqlTable", columns, null);
Assert.fail("No exception throws.");
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.impl.local;
import com.intellij.diagnostic.PluginException;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfoRt;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.StringUtilRt;
import com.intellij.openapi.vfs.VFileProperty;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFilePointerCapableFileSystem;
import com.intellij.openapi.vfs.newvfs.ManagingFS;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.RefreshQueue;
import com.intellij.openapi.vfs.newvfs.VfsImplUtil;
import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static java.util.Objects.requireNonNullElse;
public class LocalFileSystemImpl extends LocalFileSystemBase implements Disposable, VirtualFilePointerCapableFileSystem {
private static final int STATUS_UPDATE_PERIOD = 1000;
private final ManagingFS myManagingFS;
private final FileWatcher myWatcher;
private final WatchRootsManager myWatchRootsManager;
private final Runnable myAfterMarkDirtyCallback;
private volatile boolean myDisposed;
public LocalFileSystemImpl() {
this(null);
}
public LocalFileSystemImpl(@Nullable Runnable afterMarkDirtyCallback) {
myAfterMarkDirtyCallback = afterMarkDirtyCallback;
myManagingFS = ManagingFS.getInstance();
myWatcher = new FileWatcher(myManagingFS, () -> {
AppExecutorUtil.getAppScheduledExecutorService().scheduleWithFixedDelay(() -> {
if (!ApplicationManager.getApplication().isDisposed()) {
if (storeRefreshStatusToFiles() && myAfterMarkDirtyCallback != null) {
myAfterMarkDirtyCallback.run();
}
}
},
STATUS_UPDATE_PERIOD, STATUS_UPDATE_PERIOD, TimeUnit.MILLISECONDS);
});
for (PluggableLocalFileSystemContentLoader contentLoader : PLUGGABLE_CONTENT_LOADER_EP_NAME.getExtensionList()) {
try {
contentLoader.initialize();
Disposer.register(this, contentLoader);
}
catch (Exception e) {
LOG.error(PluginException.createByClass(e, contentLoader.getClass()));
}
}
myWatchRootsManager = new WatchRootsManager(myWatcher, this);
Disposer.register(ApplicationManager.getApplication(), this);
new SymbolicLinkRefresher(this);
}
public @NotNull FileWatcher getFileWatcher() {
return myWatcher;
}
@Override
public void dispose() {
myDisposed = true;
myWatcher.dispose();
}
private boolean storeRefreshStatusToFiles() {
if (myWatcher.isOperational()) {
FileWatcher.DirtyPaths dirtyPaths = myWatcher.getDirtyPaths();
markPathsDirty(dirtyPaths.dirtyPaths);
markFlatDirsDirty(dirtyPaths.dirtyDirectories);
markRecursiveDirsDirty(dirtyPaths.dirtyPathsRecursive);
return !dirtyPaths.dirtyPaths.isEmpty() || !dirtyPaths.dirtyDirectories.isEmpty() || !dirtyPaths.dirtyPathsRecursive.isEmpty();
}
return false;
}
private void markPathsDirty(@NotNull Iterable<String> dirtyPaths) {
for (String dirtyPath : dirtyPaths) {
VirtualFile file = findFileByPathIfCached(dirtyPath);
if (file instanceof NewVirtualFile) {
((NewVirtualFile)file).markDirty();
}
}
}
private void markFlatDirsDirty(@NotNull Iterable<String> dirtyPaths) {
for (String dirtyPath : dirtyPaths) {
Pair<NewVirtualFile, NewVirtualFile> pair = VfsImplUtil.findCachedFileByPath(this, dirtyPath);
if (pair.first != null) {
pair.first.markDirty();
for (VirtualFile child : pair.first.getCachedChildren()) {
((NewVirtualFile)child).markDirty();
}
}
else if (pair.second != null) {
pair.second.markDirty();
}
}
}
private void markRecursiveDirsDirty(@NotNull Iterable<String> dirtyPaths) {
for (String dirtyPath : dirtyPaths) {
Pair<NewVirtualFile, NewVirtualFile> pair = VfsImplUtil.findCachedFileByPath(this, dirtyPath);
if (pair.first != null) {
pair.first.markDirtyRecursively();
}
else if (pair.second != null) {
pair.second.markDirty();
}
}
}
public void markSuspiciousFilesDirty(@NotNull List<? extends VirtualFile> files) {
storeRefreshStatusToFiles();
if (myWatcher.isOperational()) {
for (String root : myWatcher.getManualWatchRoots()) {
VirtualFile suspiciousRoot = findFileByPathIfCached(root);
if (suspiciousRoot != null) {
((NewVirtualFile)suspiciousRoot).markDirtyRecursively();
}
}
}
else {
for (VirtualFile file : files) {
if (file.getFileSystem() == this) {
((NewVirtualFile)file).markDirtyRecursively();
}
}
}
}
@Override
public @NotNull Iterable<@NotNull VirtualFile> findCachedFilesForPath(@NotNull String path) {
return ContainerUtil.mapNotNull(getAliasedPaths(path), this::findFileByPathIfCached);
}
// Finds paths that denote the same physical file (canonical path + symlinks)
// Returns [canonical_path + symlinks], if path is canonical
// [path], otherwise
private @NotNull List<@NotNull @SystemDependent String> getAliasedPaths(@NotNull String path) {
path = FileUtil.toSystemDependentName(path);
List<@NotNull String> aliases = new ArrayList<>(getFileWatcher().mapToAllSymlinks(path));
assert !aliases.contains(path);
aliases.add(0, path);
return aliases;
}
@Override
public @NotNull Set<WatchRequest> replaceWatchedRoots(@NotNull Collection<WatchRequest> watchRequestsToRemove,
@Nullable Collection<String> recursiveRootsToAdd,
@Nullable Collection<String> flatRootsToAdd) {
if (myDisposed) return Collections.emptySet();
Collection<WatchRequest> nonNullWatchRequestsToRemove = ContainerUtil.skipNulls(watchRequestsToRemove);
LOG.assertTrue(nonNullWatchRequestsToRemove.size() == watchRequestsToRemove.size(), "watch requests collection should not contain `null` elements");
return myWatchRootsManager.replaceWatchedRoots(nonNullWatchRequestsToRemove,
requireNonNullElse(recursiveRootsToAdd, Collections.emptyList()),
requireNonNullElse(flatRootsToAdd, Collections.emptyList()));
}
@Override
public void refreshWithoutFileWatcher(final boolean asynchronous) {
Runnable heavyRefresh = () -> {
for (VirtualFile root : myManagingFS.getRoots(this)) {
((NewVirtualFile)root).markDirtyRecursively();
}
refresh(asynchronous);
};
if (asynchronous && myWatcher.isOperational()) {
RefreshQueue.getInstance().refresh(true, true, heavyRefresh, myManagingFS.getRoots(this));
}
else {
heavyRefresh.run();
}
}
@ApiStatus.Internal
public final void symlinkUpdated(int fileId,
@Nullable VirtualFile parent,
@NotNull CharSequence name,
@NotNull String linkPath,
@Nullable String linkTarget) {
if (linkTarget == null || !isRecursiveOrCircularSymlink(parent, name, linkTarget)) {
myWatchRootsManager.updateSymlink(fileId, linkPath, linkTarget);
}
}
@ApiStatus.Internal
public final void symlinkRemoved(int fileId) {
myWatchRootsManager.removeSymlink(fileId);
}
@Override
public String toString() {
return "LocalFileSystem";
}
@Override
@TestOnly
public void cleanupForNextTest() {
super.cleanupForNextTest();
myWatchRootsManager.clear();
}
private static boolean isRecursiveOrCircularSymlink(@Nullable VirtualFile parent,
@NotNull CharSequence name,
@NotNull String symlinkTarget) {
if (startsWith(parent, name, symlinkTarget)) return true;
if (!(parent instanceof VirtualFileSystemEntry)) {
return false;
}
// check if it's circular - any symlink above resolves to my target too
for (VirtualFileSystemEntry p = (VirtualFileSystemEntry)parent; p != null; p = p.getParent()) {
// optimization: when the file has no symlinks up the hierarchy, it's not circular
if (!p.thisOrParentHaveSymlink()) return false;
if (p.is(VFileProperty.SYMLINK)) {
String parentResolved = p.getCanonicalPath();
if (symlinkTarget.equals(parentResolved)) {
return true;
}
}
}
return false;
}
private static boolean startsWith(@Nullable VirtualFile parent,
@NotNull CharSequence name,
@NotNull String symlinkTarget) {
if (parent != null) {
String symlinkTargetParent = StringUtil.trimEnd(symlinkTarget, "/" + name);
return VfsUtilCore.isAncestorOrSelf(symlinkTargetParent, parent);
}
// parent == null means name is root
return StringUtilRt.equal(name, symlinkTarget, SystemInfoRt.isFileSystemCaseSensitive);
}
}
| |
/*
* Copyright 2001-2008 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.juddi.config;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import java.util.Properties;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.MapConfiguration;
import org.apache.commons.configuration.SystemConfiguration;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.juddi.ClassUtil;
import org.apache.juddi.Registry;
import org.apache.juddi.keygen.KeyGenerator;
import org.apache.juddi.model.UddiEntityPublisher;
/**
* Handles the application level configuration for jUDDI. By default it first
* looks at system properties (juddi.propertiesFile)
* @author <a href="mailto:kstam@apache.org">Kurt T Stam</a>
* @author <a href="mailto:jfaath@apache.org">Jeff Faath</a>
*/
public class AppConfig
{
/**
* This system property's value should be a path to a configuration file
*/
public static final String JUDDI_CONFIGURATION_FILE_SYSTEM_PROPERTY="juddi.propertiesFile";
/**
* The default configuration file name for juddi
*/
public static final String JUDDI_PROPERTIES = "juddiv3.xml";
private Log log = LogFactory.getLog(AppConfig.class);
private Configuration config;
private static AppConfig instance=null;
private static URL loadedFrom=null;
private static XMLConfiguration propConfig=null;
/**
* Enables an administrator to identify the physical location of the configuration file from which it was loaded.<br>
* Always call via the singleton function AppConfig.getInstance().getConfigFileURL()
* @since 3.2
* @return may return null if no config file was found
*/
public static URL getConfigFileURL()
{
return loadedFrom;
}
/**
* Constructor (note Singleton pattern).
* @throws ConfigurationException
*/
private AppConfig() throws ConfigurationException
{
loadConfiguration();
}
public static void setJuddiProperty(String key, Object val) throws ConfigurationException{
if (instance==null) {
instance = new AppConfig();
}
propConfig.setProperty(key, val);
propConfig.save();
}
public static void saveConfiguration() throws ConfigurationException{
getConfiguration(); //findbugs will flag this as useless, but its not
propConfig.save();
}
/**
* Does the actual work of reading the configuration from System
* Properties and/or juddiv3.xml file. When the juddiv3.xml
* file is updated the file will be reloaded. By default the reloadDelay is
* set to 1 second to prevent excessive date stamp checking.
*/
private void loadConfiguration() throws ConfigurationException
{
//Properties from system properties
CompositeConfiguration compositeConfig = new CompositeConfiguration();
compositeConfig.addConfiguration(new SystemConfiguration());
//Properties from file
//changed 7-19-2013 AO for JUDDI-627
propConfig = null;
final String filename = System.getProperty(JUDDI_CONFIGURATION_FILE_SYSTEM_PROPERTY);
if (filename != null) {
propConfig = new XMLConfiguration (filename);
try {
loadedFrom = new File(filename).toURI().toURL();
// propConfig = new PropertiesConfiguration(filename);
} catch (MalformedURLException ex) {
try {
loadedFrom = new URL("file://" + filename);
} catch (MalformedURLException ex1) {
log.warn("unable to get an absolute path to " + filename + ". This may be ignorable if everything works properly.", ex1);
}
}
} else {
//propConfig = new PropertiesConfiguration(JUDDI_PROPERTIES);
propConfig = new XMLConfiguration(JUDDI_PROPERTIES);
loadedFrom = ClassUtil.getResource(JUDDI_PROPERTIES, this.getClass());
}
//Hey! this may break things
propConfig.setAutoSave(true);
log.info("Reading from jUDDI config file from: " + loadedFrom);
long refreshDelay = propConfig.getLong(Property.JUDDI_CONFIGURATION_RELOAD_DELAY, 1000l);
log.debug("Setting refreshDelay to " + refreshDelay);
FileChangedReloadingStrategy fileChangedReloadingStrategy = new FileChangedReloadingStrategy();
fileChangedReloadingStrategy.setRefreshDelay(refreshDelay);
propConfig.setReloadingStrategy(fileChangedReloadingStrategy);
compositeConfig.addConfiguration(propConfig);
Properties properties = new Properties();
if ("Hibernate".equals(propConfig.getString(Property.PERSISTENCE_PROVIDER))) {
if (propConfig.containsKey(Property.DATASOURCE))
properties.put("hibernate.connection.datasource",propConfig.getString(Property.DATASOURCE));
if (propConfig.containsKey(Property.HBM_DDL_AUTO))
properties.put("hibernate.hbm2ddl.auto",propConfig.getString(Property.HBM_DDL_AUTO));
if (propConfig.containsKey(Property.DEFAULT_SCHEMA))
properties.put("hibernate.default_schema",propConfig.getString(Property.DEFAULT_SCHEMA));
if (propConfig.containsKey(Property.HIBERNATE_DIALECT))
properties.put("hibernate.dialect",propConfig.getString(Property.HIBERNATE_DIALECT));
}
// initialize the entityManagerFactory.
PersistenceManager.initializeEntityManagerFactory(propConfig.getString(Property.JUDDI_PERSISTENCEUNIT_NAME), properties);
// Properties from the persistence layer
MapConfiguration persistentConfig = new MapConfiguration(getPersistentConfiguration(compositeConfig));
compositeConfig.addConfiguration(persistentConfig);
//Making the new configuration globally accessible.
config = compositeConfig;
}
/*
* This method will build any "persisted" properties. Persisted properties are those that are stored in the database. These values
* should be stored when the application is installed. If they don't exist, then an error should occur.
*/
private Properties getPersistentConfiguration(Configuration config) throws ConfigurationException {
Properties result = new Properties();
EntityManager em = PersistenceManager.getEntityManager();
EntityTransaction tx = em.getTransaction();
try {
boolean seedAlways = config.getBoolean("juddi.seed.always",false);
if (seedAlways || !Install.alreadyInstalled(config)) {
if (seedAlways) {
log.info("Installing UDDI seed data, loading...");
} else {
log.info("The 'root' publisher was not found, loading...");
}
try {
Install.install(config);
} catch (Exception e) {
throw new ConfigurationException(e);
} catch (Throwable t) {
throw new ConfigurationException(t);
}
}
tx.begin();
String rootPublisherStr = config.getString(Property.JUDDI_ROOT_PUBLISHER);
UddiEntityPublisher rootPublisher = new UddiEntityPublisher(rootPublisherStr);
rootPublisher.populateKeyGeneratorKeys(em);
List<String> rootKeyGenList = rootPublisher.getKeyGeneratorKeys();
if (rootKeyGenList == null || rootKeyGenList.size() == 0)
throw new ConfigurationException("The 'root' publisher key generator was not found. Please make sure that the application is properly installed.");
String rootKeyGen = rootKeyGenList.iterator().next();
//rootKeyGen = rootKeyGen.substring((KeyGenerator.UDDI_SCHEME + KeyGenerator.PARTITION_SEPARATOR).length());
rootKeyGen = rootKeyGen.substring(0, rootKeyGen.length() - (KeyGenerator.PARTITION_SEPARATOR + KeyGenerator.KEYGENERATOR_SUFFIX).length());
log.debug("root partition: " + rootKeyGen);
result.setProperty(Property.JUDDI_ROOT_PARTITION, rootKeyGen);
// The node Id is defined as the business key of the business entity categorized as a node. This entity is saved as part of the install.
// Only one business entity should be categorized as a node.
String nodeId = config.getString(Property.JUDDI_NODE_ID);
if (nodeId==null)
log.fatal("Error! " + Property.JUDDI_NODE_ID + " is not defined in the config!");
else
result.setProperty(Property.JUDDI_NODE_ID, nodeId);
/*
CategoryBag categoryBag = new CategoryBag();
KeyedReference keyedRef = new KeyedReference();
keyedRef.setTModelKey(Constants.NODE_CATEGORY_TMODEL);
keyedRef.setKeyValue(Constants.NODE_KEYVALUE);
categoryBag.getKeyedReference().add(keyedRef);
List<?> keyList = FindBusinessByCategoryQuery.select(em, new FindQualifiers(), categoryBag, null);
if (keyList != null && keyList.size() > 1)
{
StringBuilder sb = new StringBuilder();
Iterator<?> iterator = keyList.iterator();
while(iterator.hasNext()){
sb.append(iterator.next()).append(",");
}
//
//throw new ConfigurationException("Only one business entity can be categorized as the node. Config loaded from " + loadedFrom + " Key's listed at the node: " + sb.toString());
//unless of course, we are in a replicated environment
}
if (keyList != null && keyList.size() > 0) {
nodeId = (String)keyList.get(0);
}
else
throw new ConfigurationException("A node business entity was not found. Please make sure that the application is properly installed.");
*/
String rootbiz=config.getString(Property.JUDDI_NODE_ROOT_BUSINESS);
if (rootbiz==null)
log.fatal("Error! " + Property.JUDDI_NODE_ROOT_BUSINESS + " is not defined in the config");
else
result.setProperty(Property.JUDDI_NODE_ROOT_BUSINESS, rootbiz);
tx.commit();
return result;
} finally {
if (tx.isActive()) {
tx.rollback();
}
em.close();
}
}
/**
* Obtains the reference to the Singleton instance.
*
* @return the APplicationConfuration Singleton Instance.
* @throws ConfigurationException
*/
public static AppConfig getInstance() throws ConfigurationException
{
if (instance==null) {
instance = new AppConfig();
}
return instance;
}
/**
* Hook to receive configuration reload events from an external application.
*
* @throws ConfigurationException
*/
public static void reloadConfig() throws ConfigurationException
{
Registry.stop();
getInstance().loadConfiguration();
Registry.start();
}
public static void triggerReload() throws ConfigurationException{
getInstance().loadConfiguration();
}
/**
* The object from which property values can be obtained.
* @return the commons Configuration interface
* @throws ConfigurationException
*/
public static Configuration getConfiguration() throws ConfigurationException
{
return getInstance().config;
}
}
| |
/*
* Copyright (C) 2016. Jared Rummler <jared.rummler@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.jaredrummler.android.processes;
import android.app.ActivityManager;
import android.app.ActivityManager.RunningAppProcessInfo;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Build;
import android.util.Log;
import com.jaredrummler.android.processes.models.AndroidAppProcess;
import com.jaredrummler.android.processes.models.AndroidProcess;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
/**
* <p>Helper class to get a list of processes on Android.</p>
* <hr>
* <p><strong>Usage:</strong></p>
*
* <p>Get a list of running apps:</p>
* <pre>
* List<AndroidAppProcess> processes = AndroidProcesses.getRunningAppProcesses();
* </pre>
*
* <p>Get some information about a process:</p>
* <pre>
* AndroidAppProcess process = processes.get(location);
* String processName = process.name;
*
* Stat stat = process.stat();
* int pid = stat.getPid();
* int parentProcessId = stat.ppid();
* long startTime = stat.stime();
* int policy = stat.policy();
* char state = stat.state();
*
* Statm statm = process.statm();
* long totalSizeOfProcess = statm.getSize();
* long residentSetSize = statm.getResidentSetSize();
*
* PackageInfo packageInfo = process.getPackageInfo(context, 0);
* String appName = packageInfo.applicationInfo.loadLabel(pm).toString();
* </pre>
*
* <p>Check if your app is in the foreground:</p>
* <pre>
* if (AndroidProcesses.isMyProcessInTheForeground()) {
* // do stuff
* }
* </pre>
*
* <p>Get a list of application processes that are running on the device:</p>
* <pre>
* List<ActivityManager.RunningAppProcessInfo> processes = AndroidProcesses.getRunningAppProcessInfo(context);
* </pre>
*
* <hr>
* <p><strong>Limitations</strong></p>
*
* <p>System apps may not be visible because they have a higher SELinux context than third party apps.</p>
* <p>Some information that was available through {@link ActivityManager#getRunningAppProcesses()} is not available
* using this library
* ({@link RunningAppProcessInfo#pkgList},
* {@link RunningAppProcessInfo#lru},
* {@link RunningAppProcessInfo#importance},
* etc.).</p>
* <p>This is currently not working on the N developer preview.</p>
* <hr>
* <p><b>Note:</b> You should avoid running methods from this class on the UI thread.</p>
*/
public class AndroidProcesses {
public static final String TAG = "AndroidProcesses";
private static final int AID_READPROC = 3009;
private static boolean loggingEnabled;
/**
* Toggle whether debug logging is enabled.
*
* @param enabled
* {@code true} to enable logging. This should be only be used for debugging purposes.
* @see #isLoggingEnabled()
* @see #log(String, Object...)
* @see #log(Throwable, String, Object...)
*/
public static void setLoggingEnabled(boolean enabled) {
loggingEnabled = enabled;
}
/**
* @return {@code true} if logging is enabled.
* @see #setLoggingEnabled(boolean)
*/
public static boolean isLoggingEnabled() {
return loggingEnabled;
}
/**
* Send a log message if logging is enabled.
*
* @param message
* the message to log
* @param args
* list of arguments to pass to the formatter
*/
public static void log(String message, Object... args) {
if (loggingEnabled) {
Log.d(TAG, args.length == 0 ? message : String.format(message, args));
}
}
/**
* Send a log message if logging is enabled.
*
* @param error
* An exception to log
* @param message
* the message to log
* @param args
* list of arguments to pass to the formatter
*/
public static void log(Throwable error, String message, Object... args) {
if (loggingEnabled) {
Log.d(TAG, args.length == 0 ? message : String.format(message, args), error);
}
}
/**
* On Android 7.0+ the procfs filesystem is now mounted with hidepid=2, eliminating access to the /proc/PID
* directories of other users. There's a group ("readproc") for making exceptions but it's not exposed as a
* permission. To get a list of processes on Android 7.0+ you must use {@link android.app.usage.UsageStatsManager}
* or have root access.
*
* @return {@code true} if procfs is mounted with hidepid=2
*/
public static boolean isProcessInfoHidden() {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader("/proc/mounts"));
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
String[] columns = line.split("\\s+");
if (columns.length == 6 && columns[1].equals("/proc")) {
return columns[3].contains("hidepid=1") || columns[3].contains("hidepid=2");
}
}
} catch (IOException e) {
Log.d(TAG, "Error reading /proc/mounts. Checking if UID 'readproc' exists.");
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignored) {
}
}
}
return android.os.Process.getUidForName("readproc") == AID_READPROC;
}
/**
* @return a list of <i>all</i> processes running on the device.
*/
public static List<AndroidProcess> getRunningProcesses() {
List<AndroidProcess> processes = new ArrayList<>();
File[] files = new File("/proc").listFiles();
for (File file : files) {
if (file.isDirectory()) {
int pid;
try {
pid = Integer.parseInt(file.getName());
} catch (NumberFormatException e) {
continue;
}
try {
processes.add(new AndroidProcess(pid));
} catch (IOException e) {
log(e, "Error reading from /proc/%d.", pid);
// System apps will not be readable on Android 5.0+ if SELinux is enforcing.
// You will need root access or an elevated SELinux context to read all files under /proc.
}
}
}
return processes;
}
/**
* @return a list of all running app processes on the device.
*/
public static List<AndroidAppProcess> getRunningAppProcesses() {
List<AndroidAppProcess> processes = new ArrayList<>();
File[] files = new File("/proc").listFiles();
for (File file : files) {
if (file.isDirectory()) {
int pid;
try {
pid = Integer.parseInt(file.getName());
} catch (NumberFormatException e) {
continue;
}
try {
processes.add(new AndroidAppProcess(pid));
} catch (AndroidAppProcess.NotAndroidAppProcessException ignored) {
} catch (IOException e) {
log(e, "Error reading from /proc/%d.", pid);
// System apps will not be readable on Android 5.0+ if SELinux is enforcing.
// You will need root access or an elevated SELinux context to read all files under /proc.
}
}
}
return processes;
}
/**
* Get a list of user apps running in the foreground.
*
* @param context
* the application context
* @return a list of user apps that are in the foreground.
*/
public static List<AndroidAppProcess> getRunningForegroundApps(Context context) {
List<AndroidAppProcess> processes = new ArrayList<>();
File[] files = new File("/proc").listFiles();
PackageManager pm = context.getPackageManager();
for (File file : files) {
if (file.isDirectory()) {
int pid;
try {
pid = Integer.parseInt(file.getName());
} catch (NumberFormatException e) {
continue;
}
try {
AndroidAppProcess process = new AndroidAppProcess(pid);
if (process.foreground
// ignore system processes. First app user starts at 10000.
&& (process.uid < 1000 || process.uid > 9999)
// ignore processes that are not running in the default app process.
&& !process.name.contains(":")
// Ignore processes that the user cannot launch.
&& pm.getLaunchIntentForPackage(process.getPackageName()) != null) {
processes.add(process);
}
} catch (AndroidAppProcess.NotAndroidAppProcessException ignored) {
} catch (IOException e) {
log(e, "Error reading from /proc/%d.", pid);
// System apps will not be readable on Android 5.0+ if SELinux is enforcing.
// You will need root access or an elevated SELinux context to read all files under /proc.
}
}
}
return processes;
}
/**
* @return {@code true} if this process is in the foreground.
*/
public static boolean isMyProcessInTheForeground() {
try {
return new AndroidAppProcess(android.os.Process.myPid()).foreground;
} catch (Exception e) {
log(e, "Error finding our own process");
}
return false;
}
/**
* Returns a list of application processes that are running on the device.
*
* <p><b>NOTE:</b> On Lollipop (SDK 22) this does not provide
* {@link RunningAppProcessInfo#pkgList},
* {@link RunningAppProcessInfo#importance},
* {@link RunningAppProcessInfo#lru},
* {@link RunningAppProcessInfo#importanceReasonCode},
* {@link RunningAppProcessInfo#importanceReasonComponent},
* {@link RunningAppProcessInfo#importanceReasonPid},
* etc. If you need more process information try using
* {@link #getRunningAppProcesses()} or {@link android.app.usage.UsageStatsManager}</p>
*
* @param context
* the application context
* @return a list of RunningAppProcessInfo records, or null if there are no
* running processes (it will not return an empty list). This list ordering is not
* specified.
*/
public static List<RunningAppProcessInfo> getRunningAppProcessInfo(Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1) {
List<AndroidAppProcess> runningAppProcesses = AndroidProcesses.getRunningAppProcesses();
List<RunningAppProcessInfo> appProcessInfos = new ArrayList<>();
for (AndroidAppProcess process : runningAppProcesses) {
RunningAppProcessInfo info = new RunningAppProcessInfo(process.name, process.pid, null);
info.uid = process.uid;
appProcessInfos.add(info);
}
return appProcessInfos;
}
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
return am.getRunningAppProcesses();
}
/* package */ AndroidProcesses() {
throw new AssertionError("no instances");
}
/**
* A {@link Comparator} to list processes by name
*/
public static final class ProcessComparator implements Comparator<AndroidProcess> {
@Override public int compare(AndroidProcess p1, AndroidProcess p2) {
return p1.name.compareToIgnoreCase(p2.name);
}
}
}
| |
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.voiceengine;
import java.lang.Thread;
import java.nio.ByteBuffer;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Process;
import android.util.Log;
class WebRtcAudioTrack {
private static final boolean DEBUG = false;
private static final String TAG = "WebRtcAudioTrack";
// Default audio data format is PCM 16 bit per sample.
// Guaranteed to be supported by all devices.
private static final int BITS_PER_SAMPLE = 16;
// Requested size of each recorded buffer provided to the client.
private static final int CALLBACK_BUFFER_SIZE_MS = 10;
// Average number of callbacks per second.
private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
private final Context context;
private final long nativeAudioTrack;
private final AudioManager audioManager;
private ByteBuffer byteBuffer;
private AudioTrack audioTrack = null;
private AudioTrackThread audioThread = null;
/**
* Audio thread which keeps calling AudioTrack.write() to stream audio.
* Data is periodically acquired from the native WebRTC layer using the
* nativeGetPlayoutData callback function.
* This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
*/
private class AudioTrackThread extends Thread {
private volatile boolean keepAlive = true;
public AudioTrackThread(String name) {
super(name);
}
@Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
Logd("AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
try {
// In MODE_STREAM mode we can optionally prime the output buffer by
// writing up to bufferSizeInBytes (from constructor) before starting.
// This priming will avoid an immediate underrun, but is not required.
// TODO(henrika): initial tests have shown that priming is not required.
audioTrack.play();
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
} catch (IllegalStateException e) {
Loge("AudioTrack.play failed: " + e.getMessage());
return;
}
// Fixed size in bytes of each 10ms block of audio data that we ask for
// using callbacks to the native WebRTC client.
final int sizeInBytes = byteBuffer.capacity();
while (keepAlive) {
// Get 10ms of PCM data from the native WebRTC client. Audio data is
// written into the common ByteBuffer using the address that was
// cached at construction.
nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
// Write data until all data has been written to the audio sink.
// Upon return, the buffer position will have been advanced to reflect
// the amount of data that was successfully written to the AudioTrack.
assertTrue(sizeInBytes <= byteBuffer.remaining());
int bytesWritten = 0;
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
bytesWritten = audioTrack.write(byteBuffer,
sizeInBytes,
AudioTrack.WRITE_BLOCKING);
} else {
bytesWritten = audioTrack.write(byteBuffer.array(),
byteBuffer.arrayOffset(),
sizeInBytes);
}
if (bytesWritten != sizeInBytes) {
Loge("AudioTrack.write failed: " + bytesWritten);
if (bytesWritten == AudioTrack.ERROR_INVALID_OPERATION) {
keepAlive = false;
}
}
// The byte buffer must be rewinded since byteBuffer.position() is
// increased at each call to AudioTrack.write(). If we don't do this,
// next call to AudioTrack.write() will fail.
byteBuffer.rewind();
// TODO(henrika): it is possible to create a delay estimate here by
// counting number of written frames and subtracting the result from
// audioTrack.getPlaybackHeadPosition().
}
try {
audioTrack.stop();
} catch (IllegalStateException e) {
Loge("AudioTrack.stop failed: " + e.getMessage());
}
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
audioTrack.flush();
}
public void joinThread() {
keepAlive = false;
while (isAlive()) {
try {
join();
} catch (InterruptedException e) {
// Ignore.
}
}
}
}
WebRtcAudioTrack(Context context, long nativeAudioTrack) {
Logd("ctor" + WebRtcAudioUtils.getThreadInfo());
this.context = context;
this.nativeAudioTrack = nativeAudioTrack;
audioManager = (AudioManager) context.getSystemService(
Context.AUDIO_SERVICE);
if (DEBUG) {
WebRtcAudioUtils.logDeviceInfo(TAG);
}
}
private int InitPlayout(int sampleRate, int channels) {
Logd("InitPlayout(sampleRate=" + sampleRate + ", channels=" +
channels + ")");
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
byteBuffer = byteBuffer.allocateDirect(
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
Logd("byteBuffer.capacity: " + byteBuffer.capacity());
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
// Get the minimum buffer size required for the successful creation of an
// AudioTrack object to be created in the MODE_STREAM mode.
// Note that this size doesn't guarantee a smooth playback under load.
// TODO(henrika): should we extend the buffer size to avoid glitches?
final int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
Logd("AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
assertTrue(audioTrack == null);
// For the streaming mode, data must be written to the audio sink in
// chunks of size (given by byteBuffer.capacity()) less than or equal
// to the total buffer size |minBufferSizeInBytes|.
assertTrue(byteBuffer.capacity() < minBufferSizeInBytes);
try {
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSizeInBytes,
AudioTrack.MODE_STREAM);
} catch (IllegalArgumentException e) {
Logd(e.getMessage());
return -1;
}
assertTrue(audioTrack.getState() == AudioTrack.STATE_INITIALIZED);
assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
assertTrue(audioTrack.getStreamType() == AudioManager.STREAM_VOICE_CALL);
// Return a delay estimate in milliseconds given the minimum buffer size.
// TODO(henrika): improve estimate and use real measurements of total
// latency instead. We can most likely ignore this value.
return (1000 * (minBufferSizeInBytes / bytesPerFrame) / sampleRate);
}
private boolean StartPlayout() {
Logd("StartPlayout");
assertTrue(audioTrack != null);
assertTrue(audioThread == null);
audioThread = new AudioTrackThread("AudioTrackJavaThread");
audioThread.start();
return true;
}
private boolean StopPlayout() {
Logd("StopPlayout");
assertTrue(audioThread != null);
audioThread.joinThread();
audioThread = null;
if (audioTrack != null) {
audioTrack.release();
audioTrack = null;
}
return true;
}
/** Get max possible volume index for a phone call audio stream. */
private int GetStreamMaxVolume() {
Logd("GetStreamMaxVolume");
assertTrue(audioManager != null);
return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
}
/** Set current volume level for a phone call audio stream. */
private boolean SetStreamVolume(int volume) {
Logd("SetStreamVolume(" + volume + ")");
assertTrue(audioManager != null);
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
if (audioManager.isVolumeFixed()) {
Loge("The device implements a fixed volume policy.");
return false;
}
}
audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
return true;
}
/** Get current volume level for a phone call audio stream. */
private int GetStreamVolume() {
Logd("GetStreamVolume");
assertTrue(audioManager != null);
return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
}
/** Helper method which throws an exception when an assertion has failed. */
private static void assertTrue(boolean condition) {
if (!condition) {
throw new AssertionError("Expected condition to be true");
}
}
private static void Logd(String msg) {
Log.d(TAG, msg);
}
private static void Loge(String msg) {
Log.e(TAG, msg);
}
private native void nativeCacheDirectBufferAddress(
ByteBuffer byteBuffer, long nativeAudioRecord);
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tuscany.sdo.helper;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.tuscany.sdo.impl.DynamicDataObjectImpl;
import org.apache.tuscany.sdo.util.DataObjectUtil;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EModelElement;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.impl.EPackageRegistryImpl;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.ecore.util.ExtendedMetaData;
import org.eclipse.xsd.XSDSchema;
import org.eclipse.xsd.ecore.XSDEcoreBuilder;
import org.eclipse.xsd.util.XSDResourceFactoryImpl;
import org.eclipse.xsd.util.XSDResourceImpl;
import org.xml.sax.InputSource;
import commonj.sdo.Property;
import commonj.sdo.Type;
import commonj.sdo.helper.HelperContext;
import commonj.sdo.helper.TypeHelper;
import commonj.sdo.helper.XSDHelper;
/**
* Provides access to additional information when the Type or Property is
* defined by an XML Schema (XSD). Methods return null/false otherwise or if the
* information is unavailable. Defines Types from an XSD.
*/
public class XSDHelperImpl implements XSDHelper {
protected boolean extensibleNamespaces = false;
protected HelperContext helperContext;
protected SDOXSDEcoreBuilder nondelegatingEcoreBuilder = null;
protected HashMap tcclToEcoreBuilderMap = null;
private ExtendedMetaData extendedMetaData;
public XSDHelperImpl(HelperContext hc, String redefineBuiltIn, boolean extensibleNamespaces) {
this.helperContext = hc;
this.extensibleNamespaces = extensibleNamespaces;
extendedMetaData = ((HelperContextImpl)helperContext).extendedMetaData;
SDOXSDEcoreBuilder ecoreBuilder = createEcoreBuilder();
if (extendedMetaData instanceof SDOExtendedMetaDataImpl && ((SDOExtendedMetaDataImpl)extendedMetaData)
.getRegistry() instanceof EPackageRegistryImpl.Delegator) {
tcclToEcoreBuilderMap = new HashMap();
putTCCLEcoreBuilder(ecoreBuilder);
} else {
nondelegatingEcoreBuilder = ecoreBuilder;
}
if (redefineBuiltIn != null) { // Redefining/regenerating this built-in
// model
ecoreBuilder.getTargetNamespaceToEPackageMap().remove(redefineBuiltIn);
}
}
public XSDHelperImpl(HelperContext hc) {
this(hc, null, false);
}
/**
* Redefine/regenerating the built-in model
* @param redefineBuiltIn
*/
public void setRedefineBuiltIn(String redefineBuiltIn) {
if (redefineBuiltIn != null) {
getEcoreBuilder().getTargetNamespaceToEPackageMap().remove(redefineBuiltIn);
}
}
public void setExtensibleNamespaces(boolean extensibleNamespaces) {
this.extensibleNamespaces = extensibleNamespaces;
}
protected SDOXSDEcoreBuilder createEcoreBuilder() {
SDOXSDEcoreBuilder ecoreBuilder = new SDOXSDEcoreBuilder(extendedMetaData, extensibleNamespaces);
// Add the built-in models to the targetNamespaceToEPackageMap so they
// can't be (re)defined/overridden
for (Iterator iter = TypeHelperImpl.getBuiltInModels().iterator(); iter.hasNext();) {
EPackage ePackage = (EPackage)iter.next();
ecoreBuilder.getTargetNamespaceToEPackageMap().put(ePackage.getNsURI(), ePackage);
}
return ecoreBuilder;
}
protected void putTCCLEcoreBuilder(XSDEcoreBuilder ecoreBuilder) {
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
if (tcclToEcoreBuilderMap.get(tccl) == null) {
tcclToEcoreBuilderMap.put(tccl, ecoreBuilder);
}
}
protected SDOXSDEcoreBuilder getEcoreBuilder() {
if (nondelegatingEcoreBuilder != null)
return nondelegatingEcoreBuilder;
SDOXSDEcoreBuilder result = null;
try {
for (ClassLoader tccl = Thread.currentThread().getContextClassLoader(); tccl != null; tccl =
tccl.getParent()) {
result = (SDOXSDEcoreBuilder)tcclToEcoreBuilderMap.get(tccl);
if (result != null)
return result;
} // for
} catch (SecurityException exception) {
// exception.printStackTrace();
}
result = createEcoreBuilder();
putTCCLEcoreBuilder(result);
return result;
}
public String getLocalName(Type type) {
return extendedMetaData.getName((EClassifier)type);
}
public String getLocalName(Property property) {
return extendedMetaData.getName((EStructuralFeature)property);
}
public String getNamespaceURI(Property property) {
return extendedMetaData.getNamespace((EStructuralFeature)property);
}
public boolean isAttribute(Property property) {
return extendedMetaData.getFeatureKind((EStructuralFeature)property) == ExtendedMetaData.ATTRIBUTE_FEATURE;
}
public boolean isElement(Property property) {
return extendedMetaData.getFeatureKind((EStructuralFeature)property) == ExtendedMetaData.ELEMENT_FEATURE;
}
public boolean isMixed(Type type) {
if (type instanceof EClass) {
return extendedMetaData.getContentKind((EClass)type) == ExtendedMetaData.MIXED_CONTENT;
} else {
return false;
}
}
public boolean isXSD(Type type) {
return ((EModelElement)type).getEAnnotation(ExtendedMetaData.ANNOTATION_URI) != null;
}
public Property getGlobalProperty(String uri, String propertyName, boolean isElement) {
if (isElement) {
return (Property)extendedMetaData.getElement(uri, propertyName);
} else {
return (Property)extendedMetaData.getAttribute(uri, propertyName);
}
}
public String getAppinfo(Type type, String source) {
return getAppinfo((EModelElement)type, source);
}
public String getAppinfo(Property property, String source) {
return getAppinfo((EModelElement)property, source);
}
protected String getAppinfo(EModelElement eModelElement, String source) {
return (String)eModelElement.getEAnnotation(source).getDetails().get("appinfo");
}
public List /* Type */define(String xsd) {
InputStream inputStream = new ByteArrayInputStream(xsd.getBytes());
return define(inputStream, "*.xsd");
}
public List /* Type */define(Reader xsdReader, String schemaLocation) {
InputSource inputSource = new InputSource(xsdReader);
return define(inputSource, schemaLocation);
}
public List /* Type */define(InputStream xsdInputStream, String schemaLocation) {
InputSource inputSource = new InputSource(xsdInputStream);
return define(inputSource, schemaLocation);
}
protected List /* Type */define(InputSource inputSource, String schemaLocation) {
try {
SDOXSDEcoreBuilder ecoreBuilder = getEcoreBuilder();
ResourceSet resourceSet = ecoreBuilder.createResourceSet();
Resource model =
resourceSet.createResource(URI.createURI(schemaLocation != null ? schemaLocation : "null.xsd"));
((XSDResourceImpl)model).load(inputSource, null);
List newTypes = new ArrayList();
for (Iterator schemaIter = model.getContents().iterator(); schemaIter.hasNext();) {
XSDSchema schema = (XSDSchema)schemaIter.next();
String targetNamespace = schema.getTargetNamespace();
EPackage ePackage = extendedMetaData.getPackage(targetNamespace);
if (extensibleNamespaces || ePackage == null || TypeHelperImpl.getBuiltInModels().contains(ePackage)) {
Map targetNamespaceToEPackageMap = ecoreBuilder.getTargetNamespaceToEPackageMap();
targetNamespaceToEPackageMap.remove(targetNamespace);
Collection originalEPackages = new HashSet(targetNamespaceToEPackageMap.values());
ecoreBuilder.generate(schema);
Collection newEPackages = ecoreBuilder.getTargetNamespaceToEPackageMap().values();
for (Iterator iter = newEPackages.iterator(); iter.hasNext();) {
EPackage currentPackage = (EPackage)iter.next();
if (!originalEPackages.contains(currentPackage)) {
currentPackage.setEFactoryInstance(new DynamicDataObjectImpl.FactoryImpl());
EcoreUtil.freeze(currentPackage);
newTypes.addAll(currentPackage.getEClassifiers());
}
}
}
}
return newTypes;
} catch (Exception e) {
e.printStackTrace();
throw new IllegalArgumentException(e.getMessage());
}
}
public String generate(List /* Type */types) throws IllegalArgumentException {
return generate(types, new Hashtable());
}
public String generate(List /* Type */types, Map /* String, String */namespaceToSchemaLocation)
throws IllegalArgumentException {
if (types != null && !types.isEmpty()) {
Hashtable schemaMap = new Hashtable();
Hashtable nsPrefixMap = new Hashtable();
TypeTable typeTable = new TypeTable();
SchemaBuilder schemaBuilder =
new SchemaBuilder(schemaMap, nsPrefixMap, typeTable, namespaceToSchemaLocation);
Iterator iterator = types.iterator();
Type dataType = null;
try {
while (iterator.hasNext()) {
dataType = (Type)iterator.next();
schemaBuilder.buildSchema(dataType);
}
XSDSchema xmlSchema = null;
iterator = schemaMap.values().iterator();
StringWriter writer = new StringWriter();
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OutputKeys.METHOD, "xml");
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
while (iterator.hasNext()) {
xmlSchema = (XSDSchema)iterator.next();
if (xmlSchema.getElement() == null) {
xmlSchema.updateElement();
}
transformer.transform(new DOMSource(xmlSchema.getElement().getOwnerDocument()),
new StreamResult(writer));
}
writer.close();
return writer.getBuffer().toString();
} catch (Exception e) {
// System.out.println("Unable to generate schema due to ..." +
// e);
// e.printStackTrace();
throw new IllegalArgumentException(e.getMessage());
}
} else {
// System.out.println("No SDO Types to generate schema ...");
return "";
}
}
public HelperContext getHelperContext() {
return helperContext;
}
}
| |
/**
* Copyright (C) 2012 Ness Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opentable.jackson;
import java.text.DateFormat;
import java.util.Set;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import com.fasterxml.jackson.datatype.guava.GuavaModule;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import com.fasterxml.jackson.module.mrbean.MrBeanModule;
import com.fasterxml.jackson.module.paramnames.ParameterNamesModule;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class OpenTableJacksonConfiguration
{
public enum JacksonTimeFormat {
MILLIS, ISO8601;
}
@Value("${ot.jackson.time-format:ISO8601}")
JacksonTimeFormat timeFormat = JacksonTimeFormat.ISO8601;
@Value("${ot.jackson.afterburner:#{false}}")
private boolean enableAfterBurner;
@Value("${ot.jackson.mrbean:#{false}}")
private boolean enableMrBean;
@Value("${ot.jackson.relaxed-parser:#{false}}")
private boolean relaxedParser = false;
@Value("${ot.jackson.register-module-beans:#{true}}")
private boolean registerModuleBeans;
@Autowired
private ObjectProvider<Module> modules = null;
@Autowired(required = false)
private Set<OpenTableJacksonCustomizer> customizerSet;
// See https://github.com/FasterXML/jackson-databind/issues/2643 for why the custom dateformat
private DateFormat dateFormat = new StdDateFormat().withColonInTimeZone(false);
@Bean
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper().setDateFormat(dateFormat);
mapper.registerModules( guavaModule(),
javaTimeModule(),
jdk8Module(),
parameterNamesModule());
if (enableMrBean) {
mapper.registerModule(mrBeanModule());
}
if (enableAfterBurner) {
mapper.registerModule(afterburnerModule());
}
if (registerModuleBeans && (modules != null)) {
mapper.registerModules(modules);
}
// This needs to be set, otherwise the mapper will fail on every new property showing up.
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
// Don't write out nulls by default -- if you really want them, you can change it with setOptions later.
//MJB: old deprecated version
//mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
mapper.configOverride(Map.class).setInclude(JsonInclude.Value.construct(Include.NON_NULL, Include.NON_NULL));
// No need to flush after every value, which cuts throughput by ~30%
mapper.configure(SerializationFeature.FLUSH_AFTER_WRITE_VALUE, false);
// Awful JAXB shit
mapper.configure(MapperFeature.USE_GETTERS_AS_SETTERS, false);
switch(timeFormat) {
case MILLIS:
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, Boolean.TRUE);
break;
case ISO8601:
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, Boolean.FALSE);
break;
default:
throw new IllegalStateException("Unknown time format: " +timeFormat);
}
// by default, don't serialize null values.
mapper.setSerializationInclusion(Include.NON_NULL);
// Relaxed parsing
if (relaxedParser) {
// Single quotes
mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
// Unquoted field names
mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
}
if (customizerSet != null && !customizerSet.isEmpty()) {
customizerSet.forEach(customizer -> {
customizer.accept(mapper);
});
}
return mapper;
}
GuavaModule guavaModule() {
return new GuavaModule();
}
JavaTimeModule javaTimeModule() {
return new JavaTimeModule();
}
MrBeanModule mrBeanModule() {
return new MrBeanModule();
}
AfterburnerModule afterburnerModule() {
return new AfterburnerModule();
}
Jdk8Module jdk8Module() {
return new Jdk8Module();
}
ParameterNamesModule parameterNamesModule() {
return new ParameterNamesModule();
}
public OpenTableJacksonConfiguration setEnableAfterBurner(final boolean enableAfterBurner) {
this.enableAfterBurner = enableAfterBurner;
return this;
}
public OpenTableJacksonConfiguration setEnableMrBean(final boolean enableMrBean) {
this.enableMrBean = enableMrBean;
return this;
}
public OpenTableJacksonConfiguration setRelaxedParser(final boolean relaxedParser) {
this.relaxedParser = relaxedParser;
return this;
}
public OpenTableJacksonConfiguration setTimeFormat(final JacksonTimeFormat timeFormat) {
this.timeFormat = timeFormat;
return this;
}
public OpenTableJacksonConfiguration setDateFormat(final DateFormat dateFormat) {
this.dateFormat = dateFormat;
return this;
}
public OpenTableJacksonConfiguration setRegisterModuleBeans(boolean registerModuleBeans) {
this.registerModuleBeans = registerModuleBeans;
return this;
}
public OpenTableJacksonConfiguration setModules(ObjectProvider<Module> modules) {
this.modules = modules;
return this;
}
public OpenTableJacksonConfiguration setCustomizerSet(Set<OpenTableJacksonCustomizer> customizerSet) {
this.customizerSet = customizerSet;
return this;
}
// Encourage folks to use this version instead of new OpenTableJacksonConfiguration which
// places limitations on the customization choices we can make
public static ObjectMapper testing() {
return new OpenTableJacksonConfiguration().objectMapper();
}
}
| |
package org.apache.maven.scm.provider.svn.svnjava;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.scm.CommandParameters;
import org.apache.maven.scm.ScmException;
import org.apache.maven.scm.ScmFileSet;
import org.apache.maven.scm.ScmVersion;
import org.apache.maven.scm.command.list.ListScmResult;
import org.apache.maven.scm.command.remoteinfo.RemoteInfoScmResult;
import org.apache.maven.scm.provider.ScmProviderRepository;
import org.apache.maven.scm.provider.svn.AbstractSvnScmProvider;
import org.apache.maven.scm.provider.svn.command.SvnCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.add.SvnJavaAddCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.blame.SvnJavaBlameCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.branch.SvnJavaBranchCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.changelog.SvnJavaChangeLogCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.checkin.SvnJavaCheckInCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.checkout.SvnJavaCheckOutCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.diff.SvnJavaDiffCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.export.SvnJavaExportCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.info.SvnJavaInfoCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.list.SvnJavaListCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.mkdir.SvnJavaMkdirCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.remoteinfo.SvnJavaRemoteInfoCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.remove.SvnJavaRemoveCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.status.SvnJavaStatusCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.tag.SvnTagCommand;
import org.apache.maven.scm.provider.svn.svnjava.command.update.SvnJavaUpdateCommand;
import org.apache.maven.scm.provider.svn.svnjava.repository.SvnJavaScmProviderRepository;
import org.apache.maven.scm.repository.ScmRepository;
import org.apache.maven.scm.repository.ScmRepositoryException;
import org.codehaus.plexus.util.StringUtils;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory;
import org.tmatesoft.svn.core.internal.io.fs.FSRepositoryFactory;
import org.tmatesoft.svn.core.internal.io.svn.SVNRepositoryFactoryImpl;
import java.io.File;
import java.net.URI;
import java.util.Collections;
/**
* @author <a href="mailto:dh-maven@famhq.com">David Hawkins</a>
* @version $Id: SvnJavaScmProvider.java 439 2010-06-19 17:21:10Z oliver.lamy $
* @plexus.component role="org.apache.maven.scm.provider.ScmProvider" role-hint="javasvn"
*/
public class SvnJavaScmProvider
extends AbstractSvnScmProvider
{
public static final String COMMAND_LINE = "JavaSVN Library";
private static boolean initialized = false;
/**
* {@inheritDoc}
*/
public ScmProviderRepository makeProviderScmRepository( String scmSpecificUrl, char delimiter )
throws ScmRepositoryException
{
initialize();
try
{
File f;
if ( scmSpecificUrl.trim().startsWith( "file" ) )
{
try
{
f = new File( new URI( scmSpecificUrl ) );
}
catch ( Exception e )
{
// nop
f = new File( scmSpecificUrl );
}
}
else
{
f = new File( scmSpecificUrl );
}
SVNURL url;
if ( f.exists() )
{
url = SVNURL.fromFile( f );
}
else
{
url = SVNURL.parseURIEncoded( scmSpecificUrl );
}
// The existing svn provider pattern is to strip the username (if any)
// from the url.
String strUrl = url.toString();
if ( url.getUserInfo() != null )
{
strUrl = StringUtils.replace( strUrl, url.getUserInfo() + "@", "" );
}
return new SvnJavaScmProviderRepository( url, strUrl );
}
catch ( SVNException e )
{
throw new ScmRepositoryException( "The scm url is invalid: " + e.getMessage(),
Collections.singletonList( e.getMessage() ) );
}
}
/**
* {@inheritDoc}
*/
protected SvnCommand getAddCommand()
{
return new SvnJavaAddCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getChangeLogCommand()
{
return new SvnJavaChangeLogCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getCheckInCommand()
{
return new SvnJavaCheckInCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getCheckOutCommand()
{
return new SvnJavaCheckOutCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getDiffCommand()
{
return new SvnJavaDiffCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getRemoveCommand()
{
return new SvnJavaRemoveCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getStatusCommand()
{
return new SvnJavaStatusCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getTagCommand()
{
return new SvnTagCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getUpdateCommand()
{
return new SvnJavaUpdateCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getBranchCommand()
{
return new SvnJavaBranchCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getExportCommand()
{
return new SvnJavaExportCommand();
}
/**
* {@inheritDoc}
*/
protected SvnCommand getInfoCommand()
{
return new SvnJavaInfoCommand();
}
/**
* {@inheritDoc}
* @since 2.0.6
*/
protected SvnCommand getListCommand()
{
return new SvnJavaListCommand();
}
@Override
protected SvnCommand getBlameCommand()
{
return new SvnJavaBlameCommand();
}
/**
* {@inheritDoc}
*/
protected String getRepositoryURL( File path )
throws ScmException
{
try
{
return SVNURL.fromFile( path ).getURIEncodedPath();
}
catch ( SVNException e )
{
throw new IllegalArgumentException( e.getMessage() );
}
}
/**
* Initializes the library to work with a repository either via svn:// (and
* svn+ssh://) or via http:// (and https://)
*/
private static void initialize()
{
if ( initialized )
{
return;
}
/*
* for DAV (over http and https)
*/
DAVRepositoryFactory.setup();
/*
* for svn (over svn and svn+ssh)
*/
SVNRepositoryFactoryImpl.setup();
/*
* for file
*/
FSRepositoryFactory.setup();
initialized = true;
}
@Override
protected SvnCommand getMkdirCommand()
{
return new SvnJavaMkdirCommand();
}
@Override
public RemoteInfoScmResult remoteInfo( ScmProviderRepository repository, ScmFileSet fileSet,
CommandParameters parameters )
throws ScmException
{
return new SvnJavaRemoteInfoCommand().executeRemoteInfoCommand( repository, fileSet, parameters );
}
@Override
public boolean remoteUrlExist( ScmProviderRepository repository, CommandParameters parameters )
throws ScmException
{
return new SvnJavaRemoteInfoCommand().remoteUrlExist( repository, parameters );
}
}
| |
package org.nutz.dao.impl.sql;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.nutz.dao.Condition;
import org.nutz.dao.entity.Entity;
import org.nutz.dao.impl.sql.pojo.AbstractPItem;
import org.nutz.dao.impl.sql.pojo.StaticPItem;
import org.nutz.dao.jdbc.Jdbcs;
import org.nutz.dao.jdbc.ValueAdaptor;
import org.nutz.dao.pager.Pager;
import org.nutz.dao.sql.DaoStatement;
import org.nutz.dao.sql.PItem;
import org.nutz.dao.sql.Sql;
import org.nutz.dao.sql.SqlCallback;
import org.nutz.dao.sql.VarIndex;
import org.nutz.dao.sql.VarSet;
import org.nutz.dao.util.Pojos;
import org.nutz.lang.Each;
import org.nutz.lang.Lang;
import org.nutz.lang.Strings;
public class NutSql extends NutStatement implements Sql {
protected String sourceSql;
protected VarSet vars;
protected List<VarSet> rows;
protected VarSet params;
protected SqlCallback callback;
protected VarIndex varIndex;
protected VarIndex paramIndex;
protected Map<String, ValueAdaptor> customValueAdaptor;
protected List<PItem> items;
public NutSql(String source) {
this(source, null);
}
public NutSql(String source, SqlCallback callback) {
this.setSourceSql(source);
this.callback = callback;
this.vars = new SimpleVarSet();
this.rows = new ArrayList<VarSet>();
this.params = new SimpleVarSet();
this.rows.add(params);
customValueAdaptor = new HashMap<String, ValueAdaptor>();
}
public void setSourceSql(String sql) {
this.sourceSql = sql;
SqlLiteral literal = new SqlLiteral().valueOf(sql);
this.varIndex = literal.getVarIndexes();
this.paramIndex = literal.getParamIndexes();
if (getSqlType() == null)
setSqlType(literal.getType());
String[] ss = literal.stack.cloneChain();
PItem[] tmp = new PItem[ss.length];
for (String var : varIndex.getOrders()) {
int[] is = varIndex.indexesOf(var);
if (is != null) {
for (int i : is) {
tmp[i] = new SqlVarPItem(var);
}
}
}
for (String param : paramIndex.getOrders()) {
int[] is = paramIndex.indexesOf(param);
if (is != null) {
for (int i : is) {
tmp[i] = new SqlParamPItem(param);
}
}
}
for (int i = 0; i < tmp.length; i++) {
if (tmp[i] == null) {
tmp[i] = new StaticPItem(ss[i], true);
}
}
this.items = Arrays.asList(tmp);
}
protected int _params_count() {
int count = 0;
Entity<?> en = getEntity();
for (PItem item : items) {
count += item.paramCount(en);
}
return count;
}
public ValueAdaptor[] getAdaptors() {
ValueAdaptor[] adaptors = new ValueAdaptor[_params_count()];
int i = 0;
for (PItem item : items)
i = item.joinAdaptor(getEntity(), adaptors, i);
return adaptors;
}
public Object[][] getParamMatrix() {
int pc = _params_count();
int row_count = rows.size();
if (rows.size() > 1 && params.size() == 0 && rows.get(0).size() != 0) {
row_count--;
}
Object[][] re = new Object[row_count][pc];
for (int z = 0; z < row_count; z++) {
VarSet row = rows.get(z);
int i = 0;
for (PItem item : items)
i = item.joinParams(getEntity(), row, re[z], i);
}
return re;
}
public String toPreparedStatement() {
StringBuilder sb = new StringBuilder();
for (PItem item : items)
item.joinSql(getEntity(), sb);
return sb.toString();
}
public void onBefore(Connection conn) throws SQLException {}
public void onAfter(Connection conn, ResultSet rs) throws SQLException {
if (callback != null)
getContext().setResult(callback.invoke(conn, rs, this));
}
public DaoStatement setPager(Pager pager) {
getContext().setPager(pager);
return this;
}
public VarSet vars() {
return vars;
}
public VarSet params() {
return params;
}
public void setValueAdaptor(String name, ValueAdaptor adaptor) {
this.customValueAdaptor.put(name, adaptor);
}
public VarIndex varIndex() {
return varIndex;
}
public VarIndex paramIndex() {
return paramIndex;
}
public void addBatch() {
params = new SimpleVarSet();
rows.add(params);
}
public void clearBatch() {
params = new SimpleVarSet();
rows.clear();
rows.add(params);
}
public Sql setEntity(Entity<?> entity) {
super.setEntity(entity);
return this;
}
public Sql setCallback(SqlCallback callback) {
this.callback = callback;
return this;
}
public Sql setCondition(Condition cnd) {
vars.set("condition", cnd);
return this;
}
public Sql duplicate() {
return new NutSql(sourceSql, callback);
}
public String getSourceSql() {
return sourceSql;
}
public String toString() {
return super.toStatement(this.getParamMatrix(), this.toPreparedStatement());
}
public String forPrint() {
return super.toString();
}
class SqlVarPItem extends AbstractPItem {
public String name;
public SqlVarPItem(String name) {
this.name = name;
}
public void joinSql(Entity<?> en, StringBuilder sb) {
Object val = vars.get(name);
if (val != null) {
if (val instanceof PItem) {
((PItem) val).joinSql(en, sb);
}
else if (val instanceof Condition) {
sb.append(' ').append(Pojos.formatCondition(en, (Condition) val));
} else {
sb.append(val);
}
}
}
public int joinAdaptor(Entity<?> en, ValueAdaptor[] adaptors, int off) {
Object val = vars.get(name);
if (val != null) {
if (val instanceof PItem) {
return ((PItem) val).joinAdaptor(en, adaptors, off);
}
}
return off;
}
public int paramCount(Entity<?> en) {
Object val = vars.get(name);
if (val != null) {
if (val instanceof PItem) {
return ((PItem) val).paramCount(en);
}
}
return 0;
}
public int joinParams(Entity<?> en, Object obj, Object[] params, int off) {
Object val = vars.get(name);
if (val != null) {
if (val instanceof PItem) {
return ((PItem) val).joinParams(en, obj, params, off);
}
}
return off;
}
}
class SqlParamPItem extends AbstractPItem {
public String name;
public SqlParamPItem(String name) {
this.name = name;
}
public void joinSql(Entity<?> en, StringBuilder sb) {
Object val = rows.get(0).get(name);
if (val == null) {
sb.append("?");
} else if (val instanceof PItem) {
((PItem) val).joinSql(en, sb);
} else if (val.getClass().isArray()) {
sb.append(Strings.dup("?,", Lang.length(val)));
sb.setLength(sb.length() - 1);
} else if (val instanceof Condition) {
sb.append(' ').append(Pojos.formatCondition(en, (Condition) val));
} else {
sb.append("?");
}
}
public int joinAdaptor(final Entity<?> en, final ValueAdaptor[] adaptors, final int off) {
if (!customValueAdaptor.isEmpty()) {
ValueAdaptor custom = customValueAdaptor.get(name);
if (custom != null) {
adaptors[off] = custom;
return off + 1;
}
}
Object val = rows.get(0).get(name);
if (val == null) {
adaptors[off] = Jdbcs.getAdaptorBy(null);
return off + 1;
} else if (val instanceof PItem) {
return ((PItem) val).joinAdaptor(en, adaptors, off);
} else if (val.getClass().isArray()) {
int len = Lang.length(val);
Lang.each(val, new Each<Object>() {
public void invoke(int index, Object ele, int length) {
adaptors[off + index] = Jdbcs.getAdaptorBy(ele);
}
});
return off + len;
// } else if (val instanceof Condition) {
} else {
adaptors[off] = Jdbcs.getAdaptorBy(val);
return off + 1;
}
}
public int joinParams(Entity<?> en, Object obj, final Object[] params, final int off) {
VarSet row = (VarSet) obj;
Object val = row.get(name);
if (val == null) {
return off + 1;
} else if (val instanceof PItem) {
return ((PItem) val).joinParams(en, null, params, off);
} else if (val.getClass().isArray()) {
int len = Lang.length(val);
Lang.each(val, new Each<Object>() {
public void invoke(int index, Object ele, int length) {
params[off + index] = ele;
}
});
return off + len;
// } else if (val instanceof Condition) {
} else {
params[off] = val;
return off + 1;
}
}
public int paramCount(Entity<?> en) {
Object val = rows.get(0).get(name);
if (val == null) {
return 1;
} else if (val instanceof PItem) {
return ((PItem) val).paramCount(en);
} else if (val.getClass().isArray()) {
return Lang.length(val);
} else if (val instanceof Condition) {
return 0;
} else {
return 1;
}
}
}
SqlLiteral literal() {
return new SqlLiteral().valueOf(sourceSql);
}
}
| |
package backend.ir;
/*
* Extremely Compiler Collection
* Copyright (c) 2015-2020, Jianping Zeng.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import backend.support.LLVMContext;
import backend.transform.utils.ConstantFolder;
import backend.type.Type;
import backend.value.*;
import backend.value.Instruction.*;
import backend.value.Instruction.CmpInst.Predicate;
import tools.Util;
import java.util.ArrayList;
import java.util.LinkedList;
import static backend.value.Instruction.CmpInst.Predicate.*;
/**
* This file defines a class "IRBuilder" that responsible for converting each
* kind of AST node (Expression or Statement) into LLVM IR code. For instance,
* converting all of statement (except for expression statement) into control
* flow IR, e.g. br, icmp, fcmp, goto etc. In the another greatly important aspect,
* the handling of Expression is the focus. We uses some arithmetic, logical,
* comparison, shift-wise operation to accompolish this purpose.
*
* @author Jianping Zeng
* @version 0.4
*/
public class IRBuilder {
/**
* The basic block where all instruction will be inserted.
*/
private BasicBlock curBB;
private Instruction insertPtr;
private LLVMContext context;
public IRBuilder(LLVMContext ctx) {
super();
context = ctx;
}
public LLVMContext getLLVMContext() { return context; }
public IRBuilder(BasicBlock bb) {
setInsertPoint(bb);
}
public void setInsertPoint(BasicBlock insertPoint) {
curBB = insertPoint;
// null indicates the new instruction should be inserted at the end of block.
insertPtr = null;
//Util.assertion(insertPtr != null, "Can not set an empty BasicBlock as insert point");
}
public void setInsertPoint(BasicBlock theBB, Instruction pos) {
curBB = theBB;
Util.assertion(pos.getParent().equals(theBB));
insertPtr = pos;
}
private <InstTy extends Instruction> InstTy insert(InstTy inst) {
insertHelper(inst, curBB, insertPtr);
return inst;
}
private <InstTy extends Instruction> InstTy insert(InstTy inst, String name) {
insertHelper(inst, curBB, insertPtr);
inst.setName(name);
return inst;
}
private <InstTy extends Instruction> void insertHelper(
InstTy inst,
BasicBlock bb,
Instruction insertPtr) {
if (insertPtr == null)
bb.appendInst(inst);
else
bb.insertBefore(inst, insertPtr);
}
public BasicBlock getInsertBlock() {
return curBB;
}
public Instruction getInsertPoint() {
return insertPtr;
}
/**
* Clear the current insertion point to let the newest created instruction
* would be inserted into a block.
*/
public void clearInsertPoint() {
curBB = null;
}
//============================================================//
// Cast instruction. //
//============================================================//
public Value createTrunc(Value val, Type destType, String name) {
return createCast(Operator.Trunc, val, destType, name);
}
public Value createZExt(Value val, Type destType, String name) {
return createCast(Operator.ZExt, val, destType, name);
}
public Value createSExt(Value val, Type destType, String name) {
return createCast(Operator.SExt, val, destType, name);
}
public Value createFPToUI(Value val, Type destType, String name) {
return createCast(Operator.FPToUI, val, destType, name);
}
public Value createFPToSI(Value val, Type destType, String name) {
return createCast(Operator.FPToSI, val, destType, name);
}
public Value createUIToFP(Value val, Type destType, String name) {
return createCast(Operator.UIToFP, val, destType, name);
}
public Value createSIToFP(Value val, Type destType, String name) {
return createCast(Operator.SIToFP, val, destType, name);
}
public Value createFPTrunc(Value val, Type destType, String name) {
return createCast(Operator.FPTrunc, val, destType, name);
}
public Value createFPExt(Value val, Type destType, String name) {
return createCast(Operator.FPExt, val, destType, name);
}
public Value createPtrToInt(Value val, Type destType, String name) {
return createCast(Operator.PtrToInt, val, destType, name);
}
public Value createIntToPtr(Value val, Type destType, String name) {
return createCast(Operator.IntToPtr, val, destType, name);
}
public Value createBitCast(Value val, Type destType, String name) {
return createCast(Operator.BitCast, val, destType, name);
}
public Value createIntCast(Value value, backend.type.Type destTy,
boolean isSigned) {
return createIntCast(value, destTy, isSigned, "");
}
public Value createIntCast(Value value, backend.type.Type destTy,
boolean isSigned, String name) {
// if the type of source is equal to destination type
// just return original value.
if (value.getType() == destTy)
return value;
if (value instanceof Constant) {
// TODO make constant folding.
}
return insert(CastInst.createIntegerCast(value, destTy, isSigned),
name);
}
public Value createCast(Operator op, Value val, Type destType, String name) {
if (val.getType() == destType)
return val;
if (val instanceof Constant) {
// TODO make constant folding.
}
return insert(CastInst.create(op, val, destType, "", (Instruction) null), name);
}
public Value createBitCast(Value value, Type destTy) {
return createBitCast(value, destTy, "");
}
/**
* create an unconditional branch instruction-'br label X'.
*
* @param targetBB
*/
public BranchInst createBr(BasicBlock targetBB) {
return insert(new BranchInst(targetBB));
}
/**
* creates a branch instruction, like 'br cond trueBB, falseBB' on the
* specified condition.
*
* @param condVal
* @param trueBB
* @param falseBB
* @return
*/
public Value createCondBr(Value condVal, BasicBlock trueBB,
BasicBlock falseBB) {
return insert(new BranchInst(trueBB, falseBB, condVal));
}
/**
* creates a switch instruction with the specified value.
* default dest, and with a hint for the number of cases that will
* be added (for efficient allocation).
*
* @param condV
* @param defaultBB
* @return
*/
public SwitchInst createSwitch(Value condV, BasicBlock defaultBB) {
return createSwitch(condV, defaultBB, 10);
}
public SwitchInst createSwitch(Value condV, BasicBlock defaultBB,
int numCases) {
return insert(new SwitchInst(condV, defaultBB, numCases, ""));
}
//===--------------------------------------------------------------------===//
// Instruction creation methods: Compare Instructions
//===--------------------------------------------------------------------===//
public Value createICmpEQ(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_EQ, lhs, rhs, name);
}
public Value createICmpNE(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_NE, lhs, rhs, name);
}
public Value createICmpUGT(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_UGT, lhs, rhs, name);
}
public Value createICmpUGE(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_UGE, lhs, rhs, name);
}
public Value createICmpULT(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_ULT, lhs, rhs, name);
}
public Value createICmpULE(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_ULE, lhs, rhs, name);
}
public Value createICmpSGT(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_SGT, lhs, rhs, name);
}
public Value createICmpSGE(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_SGE, lhs, rhs, name);
}
public Value createICmpSLT(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_SLT, lhs, rhs, name);
}
public Value createICmpSLE(Value lhs, Value rhs, final String name) {
return createICmp(ICMP_SLE, lhs, rhs, name);
}
public Value createFCmpOEQ(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_OEQ, lhs, rhs, name);
}
public Value createFCmpOGT(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_OGT, lhs, rhs, name);
}
public Value createFCmpOGE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_OGE, lhs, rhs, name);
}
public Value createFCmpOLT(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_OLT, lhs, rhs, name);
}
public Value createFCmpOLE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_OLE, lhs, rhs, name);
}
public Value createFCmpONE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_ONE, lhs, rhs, name);
}
public Value createFCmpORD(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_ORD, lhs, rhs, name);
}
public Value createFCmpUNO(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_UNO, lhs, rhs, name);
}
public Value createFCmpUEQ(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_UEQ, lhs, rhs, name);
}
public Value createFCmpUGT(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_UGT, lhs, rhs, name);
}
public Value createFCmpUGE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_UGE, lhs, rhs, name);
}
public Value createFCmpULT(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_ULT, lhs, rhs, name);
}
public Value createFCmpULE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_ULE, lhs, rhs, name);
}
public Value createFCmpUNE(Value lhs, Value rhs, final String name) {
return createFCmp(FCMP_UNE, lhs, rhs, name);
}
public Value createICmp(Predicate pred, Value lhs, Value rhs, final String name) {
if (lhs instanceof Constant) {
Constant lc = (Constant) lhs;
if (rhs instanceof Constant) {
Constant rc = (Constant) rhs;
return ConstantFolder.createICmp(pred, lc, rc);
}
}
return insert(new Instruction.ICmpInst(pred, lhs, rhs, ""), name);
}
public Value createFCmp(Predicate P, Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFCmp(P, LC, RC);
}
}*/
return insert(new Instruction.FCmpInst(P, lhs, rhs, ""), name);
}
//===--------------------------------------------------------------------===//
// Instruction creation methods: Binary Operators
//===--------------------------------------------------------------------===//
public Value createAdd(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createAdd(LC, RC);
}
}*/
return insert(BinaryOperator.createAdd(lhs, rhs, ""), name);
}
public Value createFAdd(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFAdd(LC, RC);
}
}*/
return insert(BinaryOperator.createFAdd(lhs, rhs, ""), name);
}
public Value createSub(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createSub(LC, RC);
}
}*/
return insert(BinaryOperator.createSub(lhs, rhs, ""), name);
}
public Value createFSub(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFSub(LC, RC);
}
}*/
return insert(BinaryOperator.createFSub(lhs, rhs, ""), name);
}
public Value createMul(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createMul(LC, RC);
}
}*/
return insert(BinaryOperator.createMul(lhs, rhs, ""), name);
}
public Value createFMul(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFMul(LC, RC);
}
}*/
return insert(BinaryOperator.createFMul(lhs, rhs, ""), name);
}
public Value createUDiv(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createUDiv(LC, RC);
}
}*/
return insert(BinaryOperator.createUDiv(lhs, rhs, ""), name);
}
public Value createSDiv(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createSDiv(LC, RC);
}
}*/
return insert(BinaryOperator.createSDiv(lhs, rhs, ""), name);
}
public Value createFDiv(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFDiv(LC, RC);
}
}*/
return insert(BinaryOperator.createFDiv(lhs, rhs, ""), name);
}
public Value createURem(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createURem(LC, RC);
}
}*/
return insert(BinaryOperator.createURem(lhs, rhs, ""), name);
}
public Value createSRem(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createSRem(LC, RC);
}
}*/
return insert(BinaryOperator.createSRem(lhs, rhs, ""), name);
}
public Value createFRem(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createFRem(LC, RC);
}
}*/
return insert(BinaryOperator.createFRem(lhs, rhs, ""), name);
}
public Value createShl(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createShl(LC, RC);
}
}*/
return insert(BinaryOperator.createShl(lhs, rhs, ""), name);
}
public Value createLShr(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createLShr(LC, RC);
}
}*/
return insert(BinaryOperator.createLShr(lhs, rhs, ""), name);
}
public Value createAShr(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createAShr(LC, RC);
}
}*/
return insert(BinaryOperator.createAShr(lhs, rhs, ""), name);
}
public Value createAnd(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createAnd(LC, RC);
}
}*/
return insert(BinaryOperator.createAnd(lhs, rhs, ""), name);
}
public Value createOr(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createOr(LC, RC);
}
}*/
return insert(BinaryOperator.createOr(lhs, rhs, ""), name);
}
public Value createXor(Value lhs, Value rhs, final String name) {
/* if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createXor(LC, RC);
}
}*/
return insert(BinaryOperator.createXor(lhs, rhs, ""), name);
}
public Value createBinOp(Operator opc, Value lhs,
Value rhs, final String name) {
/*
TODO
if (lhs instanceof Constant)
{
Constant LC = (Constant) lhs;
if (rhs instanceof Constant)
{
Constant RC = (Constant) rhs;
return null; // TODO folder.createBinOp(opc, LC, RC);
}
}*/
return insert(BinaryOperator.create(opc, lhs, rhs, ""), name);
}
public Value createNeg(Value v, final String name) {
/*
TODO
if (v instanceof Constant)
{
Constant vc = (Constant)v;
return null; // TODO folder.createNeg(vc);
}
*/
return insert(BinaryOperator.createNeg(v), name);
}
public Value createFNeg(Value v, final String name) {
/*
TODO
if (v instanceof Constant)
{
Constant vc = (Constant)v;
return null; // TODO folder.createFNeg(vc);
}
*/
return insert(BinaryOperator.createFNeg(v), name);
}
public Value createNot(Value v, final String name) {
/*
TODO
if (v instanceof Constant)
{
Constant vc = (Constant)v;
return null; // TODO folder.createNot(vc);
}*/
return insert(BinaryOperator.createNot(v), name);
}
public LoadInst createLoad(Value addr) {
return createLoad(addr, false, "");
}
public LoadInst createLoad(Value addr, boolean isVolatile, String name) {
return insert(new LoadInst(addr, "", false, 0), name);
}
public Instruction.StoreInst createStore(Value val, Value ptr) {
return insert(new Instruction.StoreInst(val, ptr, ""), "");
}
public AllocaInst createAlloca(final Type ty, Value arraySize,
final String name) {
return insert(new AllocaInst(ty, arraySize, ""), name);
}
public Value createGEP(Value ptr, Value idx, String name) {
return insert(new GetElementPtrInst(ptr, idx, ""), name);
}
public Value createInBoundsGEP(Value ptr, Value idx, String name) {
GetElementPtrInst gep = new GetElementPtrInst(ptr, idx, "");
gep.setIsInBounds(true);
return insert(gep, name);
}
/**
* This method is a variant of {@linkplain #createCall4(Value, Value, Value, Value, Value)}
* with empty asmName by default.
*
* @param callee
* @param arg1
* @param arg2
* @param arg3
* @param arg4
* @return
*/
public CallInst createCall4(
Value callee,
Value arg1,
Value arg2,
Value arg3,
Value arg4) {
Value[] args = {arg1, arg2, arg3, arg4};
return insert(CallInst.create(callee, args, "", (Instruction)null));
}
/**
* Call the function specified by {@code callee} with some arguments, eg.
* arg1, arg2, arg3, arg4.
*
* @param callee
* @param arg1
* @param arg2
* @param arg3
* @param arg4
* @param name
* @return
*/
public CallInst createCall4(
Value callee,
Value arg1,
Value arg2,
Value arg3,
Value arg4,
String name) {
Value[] args = {arg1, arg2, arg3, arg4};
return insert(CallInst.create(callee, args, "", (Instruction)null), name);
}
public CallInst createCall(Value callee, LinkedList<Value> args) {
Value[] temp = new Value[args.size()];
args.toArray(temp);
return insert(CallInst.create(callee, temp, "", (Instruction)null));
}
public Value createStructGEPInbounds(Value base, int idx, String name) {
return createStructGEP32Inbounds(base, 0, idx, name);
}
public Value createStructGEP32Inbounds(Value base, int idx1, int idx2, String name) {
ArrayList<Value> indices = new ArrayList<>();
indices.add(ConstantInt.get(Type.getInt32Ty(base.getContext()), idx1));
indices.add(ConstantInt.get(Type.getInt32Ty(base.getContext()), idx2));
GetElementPtrInst gep = new GetElementPtrInst(base, indices, "");
gep.setIsInBounds(true);
return insert(gep, name);
}
public Value createStructGEPInbounds(Value base, long idx, String name) {
return createStructGEP64Inbounds(base, 0, idx, name);
}
public Value createStructGEP64Inbounds(Value base, long idx1, long idx2, String name) {
ArrayList<Value> indices = new ArrayList<>();
indices.add(ConstantInt.get(Type.getInt64Ty(base.getContext()), idx1));
indices.add(ConstantInt.get(Type.getInt64Ty(base.getContext()), idx2));
GetElementPtrInst gep = new GetElementPtrInst(base, indices, "");
gep.setIsInBounds(true);
return insert(gep, name);
}
/**
* Creates a return instruction {@code 'ret <value>'} which returns
* specified value.
*
* @param value
* @return
*/
public ReturnInst createRet(LLVMContext context, Value value) {
return insert(new ReturnInst(context, value));
}
/**
* Creates a ReturnInst {@code 'ret <void>'} which no return value.
*
* @return
*/
public ReturnInst createRetVoid(LLVMContext context) {
return insert(new ReturnInst(context));
}
/**
* Create a select LLVM instruction.
*
* @param cond The condition of this instruction.
* @param lhs The left hand expression.
* @param rhs The right hand expression.
* @param name The name of this instruction would be printed out into assembly.
* @return
*/
public Value createSelect(Value cond, Value lhs, Value rhs, String name) {
return insert(new SelectInst(cond, lhs, rhs, name), name);
}
public UnreachableInst createUnreachable() {
return insert(new UnreachableInst(context));
}
public PhiNode createPhiNode(Type type, int numVals, String name) {
return insert(new PhiNode(type, numVals, name));
}
public LandingPadInst createLandingPad(Type ty, Value persFn, int numClauses, String name) {
return insert(LandingPadInst.create(ty, persFn, numClauses, name));
}
public Value createExtractValue(Value aag, String name, int ... idxs) {
return insert(new ExtractValueInst(aag, idxs, name));
}
public Value createInsertValue(Value agg, Value op, String name, int ...idxs) {
return insert(new InsertValueInst(agg, op, idxs, name));
}
public void createResume(Value val) {
insert(ResumeInst.create(val));
}
}
| |
package net.bytebuddy.implementation.bind.annotation;
import lombok.EqualsAndHashCode;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.ClassFileVersion;
import net.bytebuddy.description.annotation.AnnotationDescription;
import net.bytebuddy.description.field.FieldDescription;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.method.MethodList;
import net.bytebuddy.description.method.ParameterDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.dynamic.DynamicType;
import net.bytebuddy.dynamic.scaffold.InstrumentedType;
import net.bytebuddy.dynamic.scaffold.subclass.ConstructorStrategy;
import net.bytebuddy.implementation.ExceptionMethod;
import net.bytebuddy.implementation.Implementation;
import net.bytebuddy.implementation.MethodAccessorFactory;
import net.bytebuddy.implementation.auxiliary.AuxiliaryType;
import net.bytebuddy.implementation.bind.MethodDelegationBinder;
import net.bytebuddy.implementation.bytecode.ByteCodeAppender;
import net.bytebuddy.implementation.bytecode.Duplication;
import net.bytebuddy.implementation.bytecode.StackManipulation;
import net.bytebuddy.implementation.bytecode.TypeCreation;
import net.bytebuddy.implementation.bytecode.assign.Assigner;
import net.bytebuddy.implementation.bytecode.member.FieldAccess;
import net.bytebuddy.implementation.bytecode.member.MethodInvocation;
import net.bytebuddy.implementation.bytecode.member.MethodReturn;
import net.bytebuddy.implementation.bytecode.member.MethodVariableAccess;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import java.io.Serializable;
import java.lang.annotation.*;
import java.util.Collections;
import static net.bytebuddy.matcher.ElementMatchers.*;
/**
* Using this annotation it is possible to access fields by getter and setter types. Before this annotation can be
* used, it needs to be installed with two types. The getter type must be defined in a single-method interface
* with a single method that returns an {@link java.lang.Object} type and takes no arguments. The setter interface
* must similarly return {@code void} and take a single {@link java.lang.Object} argument. After installing these
* interfaces with the {@link FieldProxy.Binder}, this
* binder needs to be registered with a {@link net.bytebuddy.implementation.MethodDelegation} before it can be used.
*
* @see net.bytebuddy.implementation.MethodDelegation
* @see net.bytebuddy.implementation.bind.annotation.TargetMethodAnnotationDrivenBinder
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PARAMETER)
public @interface FieldProxy {
/**
* Determines if the proxy should be serializable.
*
* @return {@code true} if the proxy should be serializable.
*/
boolean serializableProxy() default false;
/**
* Determines the name of the field that is to be accessed. If this property is not set, a field name is inferred
* by the intercepted method after the Java beans naming conventions.
*
* @return The name of the field to be accessed.
*/
String value() default TargetMethodAnnotationDrivenBinder.ParameterBinder.ForFieldBinding.BEAN_PROPERTY;
/**
* Determines which type defines the field that is to be accessed. If this property is not set, the most field
* that is defined highest in the type hierarchy is accessed.
*
* @return The type that defines the accessed field.
*/
Class<?> declaringType() default void.class;
/**
* A binder for the {@link FieldProxy} annotation.
*/
@EqualsAndHashCode(callSuper = false)
class Binder extends TargetMethodAnnotationDrivenBinder.ParameterBinder.ForFieldBinding<FieldProxy> {
/**
* A reference to the method that declares the field annotation's defining type property.
*/
private static final MethodDescription.InDefinedShape DECLARING_TYPE;
/**
* A reference to the method that declares the field annotation's field name property.
*/
private static final MethodDescription.InDefinedShape FIELD_NAME;
/**
* A reference to the method that declares the field annotation's serializable proxy property.
*/
private static final MethodDescription.InDefinedShape SERIALIZABLE_PROXY;
/*
* Fetches a reference to all annotation properties.
*/
static {
MethodList<MethodDescription.InDefinedShape> methodList = new TypeDescription.ForLoadedType(FieldProxy.class).getDeclaredMethods();
DECLARING_TYPE = methodList.filter(named("declaringType")).getOnly();
FIELD_NAME = methodList.filter(named("value")).getOnly();
SERIALIZABLE_PROXY = methodList.filter(named("serializableProxy")).getOnly();
}
/**
* Creates a binder by installing a single proxy type where annotating a parameter with {@link FieldProxy} allows
* getting and setting values for a given field.
*
* @param type A type which declares exactly one abstract getter and an abstract setter for the {@link Object}
* type. The type is allowed to be generic.
* @return A binder for the {@link FieldProxy} annotation.
*/
public static TargetMethodAnnotationDrivenBinder.ParameterBinder<FieldProxy> install(Class<?> type) {
return install(new TypeDescription.ForLoadedType(type));
}
/**
* Creates a binder by installing a single proxy type where annotating a parameter with {@link FieldProxy} allows
* getting and setting values for a given field.
*
* @param typeDescription A type which declares exactly one abstract getter and an abstract setter for the {@link Object}
* type. The type is allowed to be generic.
* @return A binder for the {@link FieldProxy} annotation.
*/
public static TargetMethodAnnotationDrivenBinder.ParameterBinder<FieldProxy> install(TypeDescription typeDescription) {
if (!typeDescription.isInterface()) {
throw new IllegalArgumentException(typeDescription + " is not an interface");
} else if (!typeDescription.getInterfaces().isEmpty()) {
throw new IllegalArgumentException(typeDescription + " must not extend other interfaces");
} else if (!typeDescription.isPublic()) {
throw new IllegalArgumentException(typeDescription + " is not public");
}
MethodList<MethodDescription.InDefinedShape> methodCandidates = typeDescription.getDeclaredMethods().filter(isAbstract());
if (methodCandidates.size() != 2) {
throw new IllegalArgumentException(typeDescription + " does not declare exactly two non-abstract methods");
}
MethodList<MethodDescription.InDefinedShape> getterCandidates = methodCandidates.filter(isGetter(Object.class));
if (getterCandidates.size() != 1) {
throw new IllegalArgumentException(typeDescription + " does not declare a getter with an Object type");
}
MethodList<MethodDescription.InDefinedShape> setterCandidates = methodCandidates.filter(isSetter(Object.class));
if (setterCandidates.size() != 1) {
throw new IllegalArgumentException(typeDescription + " does not declare a setter with an Object type");
}
return new Binder(typeDescription, getterCandidates.getOnly(), setterCandidates.getOnly());
}
/**
* Creates a binder by installing two proxy types which are implemented by this binder if a field getter
* or a field setter is requested by using the
* {@link FieldProxy} annotation.
*
* @param getterType The type which should be used for getter proxies. The type must
* represent an interface which defines a single method which returns an
* {@link java.lang.Object} return type and does not take any arguments. The use of generics
* is permitted.
* @param setterType The type which should be uses for setter proxies. The type must
* represent an interface which defines a single method which returns {@code void}
* and takes a single {@link java.lang.Object}-typed argument. The use of generics
* is permitted.
* @return A binder for the {@link FieldProxy} annotation.
*/
public static TargetMethodAnnotationDrivenBinder.ParameterBinder<FieldProxy> install(Class<?> getterType, Class<?> setterType) {
return install(new TypeDescription.ForLoadedType(getterType), new TypeDescription.ForLoadedType(setterType));
}
/**
* Creates a binder by installing two proxy types which are implemented by this binder if a field getter
* or a field setter is requested by using the
* {@link FieldProxy} annotation.
*
* @param getterType The type which should be used for getter proxies. The type must
* represent an interface which defines a single method which returns an
* {@link java.lang.Object} return type and does not take any arguments. The use of generics
* is permitted.
* @param setterType The type which should be uses for setter proxies. The type must
* represent an interface which defines a single method which returns {@code void}
* and takes a single {@link java.lang.Object}-typed argument. The use of generics
* is permitted.
* @return A binder for the {@link FieldProxy} annotation.
*/
public static TargetMethodAnnotationDrivenBinder.ParameterBinder<FieldProxy> install(TypeDescription getterType, TypeDescription setterType) {
MethodDescription.InDefinedShape getterMethod = onlyMethod(getterType);
if (!getterMethod.getReturnType().asErasure().represents(Object.class)) {
throw new IllegalArgumentException(getterMethod + " must take a single Object-typed parameter");
} else if (getterMethod.getParameters().size() != 0) {
throw new IllegalArgumentException(getterMethod + " must not declare parameters");
}
MethodDescription.InDefinedShape setterMethod = onlyMethod(setterType);
if (!setterMethod.getReturnType().asErasure().represents(void.class)) {
throw new IllegalArgumentException(setterMethod + " must return void");
} else if (setterMethod.getParameters().size() != 1 || !setterMethod.getParameters().get(0).getType().asErasure().represents(Object.class)) {
throw new IllegalArgumentException(setterMethod + " must declare a single Object-typed parameters");
}
return new Binder(getterMethod, setterMethod);
}
/**
* Extracts the only method from a given type description which is validated for the required properties for
* using the type as a proxy base type.
*
* @param typeDescription The type description to evaluate.
* @return The only method which was found to be compatible to the proxy requirements.
*/
private static MethodDescription.InDefinedShape onlyMethod(TypeDescription typeDescription) {
if (!typeDescription.isInterface()) {
throw new IllegalArgumentException(typeDescription + " is not an interface");
} else if (!typeDescription.getInterfaces().isEmpty()) {
throw new IllegalArgumentException(typeDescription + " must not extend other interfaces");
} else if (!typeDescription.isPublic()) {
throw new IllegalArgumentException(typeDescription + " is not public");
}
MethodList<MethodDescription.InDefinedShape> methodCandidates = typeDescription.getDeclaredMethods().filter(isAbstract());
if (methodCandidates.size() != 1) {
throw new IllegalArgumentException(typeDescription + " must declare exactly one abstract method");
}
return methodCandidates.getOnly();
}
/**
* The field resolver factory to apply by this binder.
*/
private final FieldResolver.Factory fieldResolverFactory;
/**
* Creates a new binder for a {@link FieldProxy} in simplex mode.
*
* @param getterMethod The getter method.
* @param setterMethod The setter method.
*/
protected Binder(MethodDescription.InDefinedShape getterMethod, MethodDescription.InDefinedShape setterMethod) {
this(new FieldResolver.Factory.Simplex(getterMethod, setterMethod));
}
/**
* Creates a new binder for a {@link FieldProxy} in duplex mode.
*
* @param proxyType The proxy type.
* @param getterMethod The getter method.
* @param setterMethod The setter method.
*/
protected Binder(TypeDescription proxyType, MethodDescription.InDefinedShape getterMethod, MethodDescription.InDefinedShape setterMethod) {
this(new FieldResolver.Factory.Duplex(proxyType, getterMethod, setterMethod));
}
/**
* Creates a new binder for a {@link FieldProxy}.
*
* @param fieldResolverFactory The field resolver factory to apply by this binder.
*/
protected Binder(FieldResolver.Factory fieldResolverFactory) {
this.fieldResolverFactory = fieldResolverFactory;
}
@Override
public Class<FieldProxy> getHandledType() {
return FieldProxy.class;
}
@Override
protected String fieldName(AnnotationDescription.Loadable<FieldProxy> annotation) {
return annotation.getValue(FIELD_NAME).resolve(String.class);
}
@Override
protected TypeDescription declaringType(AnnotationDescription.Loadable<FieldProxy> annotation) {
return annotation.getValue(DECLARING_TYPE).resolve(TypeDescription.class);
}
@Override
protected MethodDelegationBinder.ParameterBinding<?> bind(FieldDescription fieldDescription,
AnnotationDescription.Loadable<FieldProxy> annotation,
MethodDescription source,
ParameterDescription target,
Implementation.Target implementationTarget,
Assigner assigner) {
FieldResolver fieldResolver = fieldResolverFactory.resolve(target.getType().asErasure(), fieldDescription);
if (fieldResolver.isResolved()) {
return new MethodDelegationBinder.ParameterBinding.Anonymous(new AccessorProxy(fieldDescription,
implementationTarget.getInstrumentedType(),
fieldResolver,
assigner,
annotation.getValue(SERIALIZABLE_PROXY).resolve(Boolean.class)));
} else {
return MethodDelegationBinder.ParameterBinding.Illegal.INSTANCE;
}
}
/**
* A resolver for creating an instrumentation for a field access.
*/
protected interface FieldResolver {
/**
* Returns {@code true} if the field access can be establised.
*
* @return {@code true} if the field access can be establised.
*/
boolean isResolved();
/**
* Returns the type of the field access proxy.
*
* @return The type of the field access proxy.
*/
TypeDescription getProxyType();
/**
* Applies this field resolver to a dynamic type.
*
* @param builder The dynamic type builder to use.
* @param fieldDescription The accessed field.
* @param assigner The assigner to use.
* @param methodAccessorFactory The method accessor factory to use.
* @return The builder for creating the field accessor proxy type.
*/
DynamicType.Builder<?> apply(DynamicType.Builder<?> builder,
FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory);
/**
* A factory for creating a field resolver.
*/
interface Factory {
/**
* Creates a field resolver.
*
* @param parameterType The type of the annotated parameter.
* @param fieldDescription The field being proxied.
* @return An appropriate field resolver.
*/
FieldResolver resolve(TypeDescription parameterType, FieldDescription fieldDescription);
/**
* A duplex factory for a type that both sets and gets a field value.
*/
@EqualsAndHashCode
class Duplex implements Factory {
/**
* The type of the accessor proxy.
*/
private final TypeDescription proxyType;
/**
* The getter method.
*/
private final MethodDescription.InDefinedShape getterMethod;
/**
* The setter method.
*/
private final MethodDescription.InDefinedShape setterMethod;
/**
* Creates a new duplex factory.
*
* @param proxyType The type of the accessor proxy.
* @param getterMethod The getter method.
* @param setterMethod The setter method.
*/
protected Duplex(TypeDescription proxyType,
MethodDescription.InDefinedShape getterMethod,
MethodDescription.InDefinedShape setterMethod) {
this.proxyType = proxyType;
this.getterMethod = getterMethod;
this.setterMethod = setterMethod;
}
@Override
public FieldResolver resolve(TypeDescription parameterType, FieldDescription fieldDescription) {
if (parameterType.equals(proxyType)) {
return new ForGetterSetterPair(proxyType, getterMethod, setterMethod);
} else {
throw new IllegalStateException("Cannot use @FieldProxy on a non-installed type");
}
}
}
/**
* A simplex factory where field getters and setters both have their own type.
*/
@EqualsAndHashCode
class Simplex implements Factory {
/**
* The getter method.
*/
private final MethodDescription.InDefinedShape getterMethod;
/**
* The setter method.
*/
private final MethodDescription.InDefinedShape setterMethod;
/**
* Creates a simplex factory.
*
* @param getterMethod The getter method.
* @param setterMethod The setter method.
*/
protected Simplex(MethodDescription.InDefinedShape getterMethod, MethodDescription.InDefinedShape setterMethod) {
this.getterMethod = getterMethod;
this.setterMethod = setterMethod;
}
@Override
public FieldResolver resolve(TypeDescription parameterType, FieldDescription fieldDescription) {
if (parameterType.equals(getterMethod.getDeclaringType())) {
return new ForGetter(getterMethod);
} else if (parameterType.equals(setterMethod.getDeclaringType())) {
return fieldDescription.isFinal()
? Unresolved.INSTANCE
: new ForSetter(setterMethod);
} else {
throw new IllegalStateException("Cannot use @FieldProxy on a non-installed type");
}
}
}
}
/**
* An unresolved field resolver.
*/
enum Unresolved implements FieldResolver {
/**
* The singleton instance.
*/
INSTANCE;
@Override
public boolean isResolved() {
return false;
}
@Override
public TypeDescription getProxyType() {
throw new IllegalStateException("Cannot read type for unresolved field resolver");
}
@Override
public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder,
FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
throw new IllegalStateException("Cannot apply unresolved field resolver");
}
}
/**
* A field resolver for a getter accessor.
*/
@EqualsAndHashCode
class ForGetter implements FieldResolver {
/**
* The getter method.
*/
private final MethodDescription.InDefinedShape getterMethod;
/**
* Creates a new getter field resolver.
*
* @param getterMethod The getter method.
*/
protected ForGetter(MethodDescription.InDefinedShape getterMethod) {
this.getterMethod = getterMethod;
}
@Override
public boolean isResolved() {
return true;
}
@Override
public TypeDescription getProxyType() {
return getterMethod.getDeclaringType();
}
@Override
public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder,
FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
return builder.method(definedMethod(is(getterMethod))).intercept(new FieldGetter(fieldDescription, assigner, methodAccessorFactory));
}
}
/**
* A field resolver for a setter accessor.
*/
@EqualsAndHashCode
class ForSetter implements FieldResolver {
/**
* The setter method.
*/
private final MethodDescription.InDefinedShape setterMethod;
/**
* Creates a new field resolver for a setter accessor.
*
* @param setterMethod The setter method.
*/
protected ForSetter(MethodDescription.InDefinedShape setterMethod) {
this.setterMethod = setterMethod;
}
@Override
public boolean isResolved() {
return true;
}
@Override
public TypeDescription getProxyType() {
return setterMethod.getDeclaringType();
}
@Override
public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder,
FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
return builder.method(is(setterMethod)).intercept(new FieldSetter(fieldDescription, assigner, methodAccessorFactory));
}
}
/**
* A field resolver for an accessor that both gets and sets a field value.
*/
@EqualsAndHashCode
class ForGetterSetterPair implements FieldResolver {
/**
* The type of the accessor proxy.
*/
private final TypeDescription proxyType;
/**
* The getter method.
*/
private final MethodDescription.InDefinedShape getterMethod;
/**
* The setter method.
*/
private final MethodDescription.InDefinedShape setterMethod;
/**
* Creates a new field resolver for an accessor that both gets and sets a field value.
*
* @param proxyType The type of the accessor proxy.
* @param getterMethod The getter method.
* @param setterMethod The setter method.
*/
protected ForGetterSetterPair(TypeDescription proxyType,
MethodDescription.InDefinedShape getterMethod,
MethodDescription.InDefinedShape setterMethod) {
this.proxyType = proxyType;
this.getterMethod = getterMethod;
this.setterMethod = setterMethod;
}
@Override
public boolean isResolved() {
return true;
}
@Override
public TypeDescription getProxyType() {
return proxyType;
}
@Override
public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder,
FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
return builder
.method(is(getterMethod)).intercept(new FieldGetter(fieldDescription, assigner, methodAccessorFactory))
.method(is(setterMethod)).intercept(fieldDescription.isFinal()
? ExceptionMethod.throwing(UnsupportedOperationException.class, "Cannot set final field " + fieldDescription)
: new FieldSetter(fieldDescription, assigner, methodAccessorFactory));
}
}
}
/**
* Represents an implementation for implementing a proxy type constructor when a static field is accessed.
*/
protected enum StaticFieldConstructor implements Implementation {
/**
* The singleton instance.
*/
INSTANCE;
/**
* A reference of the {@link Object} type default constructor.
*/
private final MethodDescription objectTypeDefaultConstructor;
/**
* Creates the constructor call singleton.
*/
StaticFieldConstructor() {
objectTypeDefaultConstructor = TypeDescription.OBJECT.getDeclaredMethods().filter(isConstructor()).getOnly();
}
@Override
public InstrumentedType prepare(InstrumentedType instrumentedType) {
return instrumentedType;
}
@Override
public ByteCodeAppender appender(Target implementationTarget) {
return new ByteCodeAppender.Simple(MethodVariableAccess.loadThis(), MethodInvocation.invoke(objectTypeDefaultConstructor), MethodReturn.VOID);
}
}
/**
* Represents an implementation for implementing a proxy type constructor when a non-static field is accessed.
*/
@EqualsAndHashCode
protected static class InstanceFieldConstructor implements Implementation {
/**
* The instrumented type from which a field is to be accessed.
*/
private final TypeDescription instrumentedType;
/**
* Creates a new implementation for implementing a field accessor proxy's constructor when accessing
* a non-static field.
*
* @param instrumentedType The instrumented type from which a field is to be accessed.
*/
protected InstanceFieldConstructor(TypeDescription instrumentedType) {
this.instrumentedType = instrumentedType;
}
@Override
public InstrumentedType prepare(InstrumentedType instrumentedType) {
return instrumentedType.withField(new FieldDescription.Token(AccessorProxy.FIELD_NAME,
Opcodes.ACC_FINAL | Opcodes.ACC_PRIVATE,
this.instrumentedType.asGenericType()));
}
@Override
public ByteCodeAppender appender(Target implementationTarget) {
return new Appender(implementationTarget);
}
/**
* An appender for implementing an
* {@link FieldProxy.Binder.InstanceFieldConstructor}.
*/
@EqualsAndHashCode
protected static class Appender implements ByteCodeAppender {
/**
* The field to be set within the constructor.
*/
private final FieldDescription fieldDescription;
/**
* Creates a new appender.
*
* @param implementationTarget The implementation target of the current implementation.
*/
protected Appender(Target implementationTarget) {
fieldDescription = implementationTarget.getInstrumentedType()
.getDeclaredFields()
.filter((named(AccessorProxy.FIELD_NAME)))
.getOnly();
}
@Override
public Size apply(MethodVisitor methodVisitor,
Context implementationContext,
MethodDescription instrumentedMethod) {
StackManipulation.Size stackSize = new StackManipulation.Compound(
MethodVariableAccess.loadThis(),
MethodInvocation.invoke(StaticFieldConstructor.INSTANCE.objectTypeDefaultConstructor),
MethodVariableAccess.allArgumentsOf(instrumentedMethod.asDefined()).prependThisReference(),
FieldAccess.forField(fieldDescription).write(),
MethodReturn.VOID
).apply(methodVisitor, implementationContext);
return new Size(stackSize.getMaximalSize(), instrumentedMethod.getStackSize());
}
}
}
/**
* Implementation for a getter method.
*/
@EqualsAndHashCode
protected static class FieldGetter implements Implementation {
/**
* The field that is being accessed.
*/
private final FieldDescription fieldDescription;
/**
* The assigner to use.
*/
private final Assigner assigner;
/**
* The accessed type's method accessor factory.
*/
private final MethodAccessorFactory methodAccessorFactory;
/**
* Creates a new getter implementation.
*
* @param fieldDescription The field that is being accessed.
* @param assigner The assigner to use.
* @param methodAccessorFactory The accessed type's method accessor factory.
*/
protected FieldGetter(FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
this.fieldDescription = fieldDescription;
this.assigner = assigner;
this.methodAccessorFactory = methodAccessorFactory;
}
@Override
public InstrumentedType prepare(InstrumentedType instrumentedType) {
return instrumentedType;
}
@Override
public ByteCodeAppender appender(Target implementationTarget) {
return new Appender(implementationTarget);
}
/**
* A byte code appender for a getter method.
*/
protected class Appender implements ByteCodeAppender {
/**
* The generated accessor type.
*/
private final TypeDescription typeDescription;
/**
* Creates a new appender for a setter method.
*
* @param implementationTarget The implementation target of the current instrumentation.
*/
protected Appender(Target implementationTarget) {
typeDescription = implementationTarget.getInstrumentedType();
}
@Override
public Size apply(MethodVisitor methodVisitor,
Context implementationContext,
MethodDescription instrumentedMethod) {
MethodDescription getterMethod = methodAccessorFactory.registerGetterFor(fieldDescription, MethodAccessorFactory.AccessType.DEFAULT);
StackManipulation.Size stackSize = new StackManipulation.Compound(
fieldDescription.isStatic()
? StackManipulation.Trivial.INSTANCE
: new StackManipulation.Compound(
MethodVariableAccess.loadThis(),
FieldAccess.forField(typeDescription.getDeclaredFields().filter((named(AccessorProxy.FIELD_NAME))).getOnly()).read()),
MethodInvocation.invoke(getterMethod),
assigner.assign(getterMethod.getReturnType(), instrumentedMethod.getReturnType(), Assigner.Typing.DYNAMIC),
MethodReturn.of(instrumentedMethod.getReturnType().asErasure())
).apply(methodVisitor, implementationContext);
return new Size(stackSize.getMaximalSize(), instrumentedMethod.getStackSize());
}
/**
* Returns the outer instance.
*
* @return The outer instance.
*/
private FieldGetter getOuter() {
return FieldGetter.this;
}
@Override // HE: Remove when Lombok support for getOuter is added.
public boolean equals(Object object) {
if (this == object) return true;
if (object == null || getClass() != object.getClass()) return false;
Appender appender = (Appender) object;
return typeDescription.equals(appender.typeDescription) && FieldGetter.this.equals(appender.getOuter());
}
@Override // HE: Remove when Lombok support for getOuter is added.
public int hashCode() {
return typeDescription.hashCode() + 31 * FieldGetter.this.hashCode();
}
}
}
/**
* Implementation for a setter method.
*/
@EqualsAndHashCode
protected static class FieldSetter implements Implementation {
/**
* The field that is being accessed.
*/
private final FieldDescription fieldDescription;
/**
* The assigner to use.
*/
private final Assigner assigner;
/**
* The accessed type's method accessor factory.
*/
private final MethodAccessorFactory methodAccessorFactory;
/**
* Creates a new setter implementation.
*
* @param fieldDescription The field that is being accessed.
* @param assigner The assigner to use.
* @param methodAccessorFactory The accessed type's method accessor factory.
*/
protected FieldSetter(FieldDescription fieldDescription,
Assigner assigner,
MethodAccessorFactory methodAccessorFactory) {
this.fieldDescription = fieldDescription;
this.assigner = assigner;
this.methodAccessorFactory = methodAccessorFactory;
}
@Override
public InstrumentedType prepare(InstrumentedType instrumentedType) {
return instrumentedType;
}
@Override
public ByteCodeAppender appender(Target implementationTarget) {
return new Appender(implementationTarget);
}
/**
* A byte code appender for a setter method.
*/
protected class Appender implements ByteCodeAppender {
/**
* The generated accessor type.
*/
private final TypeDescription typeDescription;
/**
* Creates a new appender for a setter method.
*
* @param implementationTarget The implementation target of the current instrumentation.
*/
protected Appender(Target implementationTarget) {
typeDescription = implementationTarget.getInstrumentedType();
}
@Override
public Size apply(MethodVisitor methodVisitor,
Context implementationContext,
MethodDescription instrumentedMethod) {
TypeDescription.Generic parameterType = instrumentedMethod.getParameters().get(0).getType();
MethodDescription setterMethod = methodAccessorFactory.registerSetterFor(fieldDescription, MethodAccessorFactory.AccessType.DEFAULT);
StackManipulation.Size stackSize = new StackManipulation.Compound(
fieldDescription.isStatic()
? StackManipulation.Trivial.INSTANCE
: new StackManipulation.Compound(
MethodVariableAccess.loadThis(),
FieldAccess.forField(typeDescription.getDeclaredFields()
.filter((named(AccessorProxy.FIELD_NAME))).getOnly()).read()),
MethodVariableAccess.of(parameterType).loadFrom(1),
assigner.assign(parameterType, setterMethod.getParameters().get(0).getType(), Assigner.Typing.DYNAMIC),
MethodInvocation.invoke(setterMethod),
MethodReturn.VOID
).apply(methodVisitor, implementationContext);
return new Size(stackSize.getMaximalSize(), instrumentedMethod.getStackSize());
}
/**
* Returns the outer instance.
*
* @return The outer instance.
*/
private FieldSetter getOuter() {
return FieldSetter.this;
}
@Override // HE: Remove when Lombok support for getOuter is added.
public boolean equals(Object object) {
if (this == object) return true;
if (object == null || getClass() != object.getClass()) return false;
Appender appender = (Appender) object;
return typeDescription.equals(appender.typeDescription) && FieldSetter.this.equals(appender.getOuter());
}
@Override // HE: Remove when Lombok support for getOuter is added.
public int hashCode() {
return typeDescription.hashCode() + 31 * FieldSetter.this.hashCode();
}
}
}
/**
* A proxy type for accessing a field either by a getter or a setter.
*/
protected class AccessorProxy implements AuxiliaryType, StackManipulation {
/**
* The name of the field that stores the accessed instance if any.
*/
protected static final String FIELD_NAME = "instance";
/**
* The field that is being accessed.
*/
private final FieldDescription fieldDescription;
/**
* The type which is accessed.
*/
private final TypeDescription instrumentedType;
/**
* The field resolver to use.
*/
private final FieldResolver fieldResolver;
/**
* The assigner to use.
*/
private final Assigner assigner;
/**
* {@code true} if the generated proxy should be serializable.
*/
private final boolean serializableProxy;
/**
* @param fieldDescription The field that is being accessed.
* @param instrumentedType The type which is accessed.
* @param fieldResolver The field resolver to use.
* @param assigner The assigner to use.
* @param serializableProxy {@code true} if the generated proxy should be serializable.
*/
protected AccessorProxy(FieldDescription fieldDescription,
TypeDescription instrumentedType,
FieldResolver fieldResolver,
Assigner assigner,
boolean serializableProxy) {
this.fieldDescription = fieldDescription;
this.instrumentedType = instrumentedType;
this.fieldResolver = fieldResolver;
this.assigner = assigner;
this.serializableProxy = serializableProxy;
}
@Override
public DynamicType make(String auxiliaryTypeName,
ClassFileVersion classFileVersion,
MethodAccessorFactory methodAccessorFactory) {
return fieldResolver.apply(new ByteBuddy(classFileVersion)
.subclass(fieldResolver.getProxyType(), ConstructorStrategy.Default.NO_CONSTRUCTORS)
.name(auxiliaryTypeName)
.modifiers(DEFAULT_TYPE_MODIFIER)
.implement(serializableProxy ? new Class<?>[]{Serializable.class} : new Class<?>[0])
.defineConstructor().withParameters(fieldDescription.isStatic()
? Collections.<TypeDescription>emptyList()
: Collections.singletonList(instrumentedType))
.intercept(fieldDescription.isStatic()
? StaticFieldConstructor.INSTANCE
: new InstanceFieldConstructor(instrumentedType)), fieldDescription, assigner, methodAccessorFactory).make();
}
@Override
public boolean isValid() {
return true;
}
@Override
public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) {
TypeDescription auxiliaryType = implementationContext.register(this);
return new Compound(
TypeCreation.of(auxiliaryType),
Duplication.SINGLE,
fieldDescription.isStatic()
? Trivial.INSTANCE
: MethodVariableAccess.loadThis(),
MethodInvocation.invoke(auxiliaryType.getDeclaredMethods().filter(isConstructor()).getOnly())
).apply(methodVisitor, implementationContext);
}
/**
* Returns the outer instance.
*
* @return The outer instance.
*/
private Binder getOuter() {
return Binder.this;
}
@Override // HE: Remove when Lombok support for getOuter is added.
public boolean equals(Object object) {
if (this == object) return true;
if (object == null || getClass() != object.getClass()) return false;
AccessorProxy that = (AccessorProxy) object;
return serializableProxy == that.serializableProxy
&& fieldDescription.equals(that.fieldDescription)
&& instrumentedType.equals(that.instrumentedType)
&& fieldResolver.equals(that.fieldResolver)
&& assigner.equals(that.assigner)
&& Binder.this.equals(that.getOuter());
}
@Override // HE: Remove when Lombok support for getOuter is added.
public int hashCode() {
int result = fieldDescription.hashCode();
result = 31 * result + Binder.this.hashCode();
result = 31 * result + instrumentedType.hashCode();
result = 31 * result + fieldResolver.hashCode();
result = 31 * result + assigner.hashCode();
result = 31 * result + (serializableProxy ? 1 : 0);
return result;
}
}
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.explore_sites;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.ContextMenu;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.base.metrics.RecordUserAction;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.native_page.ContextMenuManager;
import org.chromium.chrome.browser.native_page.NativePageNavigationDelegate;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.chrome.browser.suggestions.tile.TileGridLayout;
import org.chromium.components.browser_ui.widget.RoundedIconGenerator;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.ui.base.PageTransition;
import org.chromium.ui.modelutil.PropertyKey;
import org.chromium.ui.modelutil.PropertyModel;
import org.chromium.ui.modelutil.PropertyModelChangeProcessor;
import org.chromium.ui.mojom.WindowOpenDisposition;
import org.chromium.url.GURL;
import java.util.ArrayList;
import java.util.List;
/**
* View for a category name and site tiles.
*/
public class ExploreSitesCategoryCardView extends LinearLayout {
private static final String TAG = "ExploreSitesCategoryCardView";
private final ExploreSitesSiteViewBinder mSiteViewBinder;
private TextView mTitleView;
private TileGridLayout mTileView;
private RoundedIconGenerator mIconGenerator;
private ContextMenuManager mContextMenuManager;
private NativePageNavigationDelegate mNavigationDelegate;
private Profile mProfile;
private List<PropertyModelChangeProcessor<PropertyModel, ExploreSitesTileView, PropertyKey>>
mModelChangeProcessors;
private ExploreSitesCategory mCategory;
private int mCategoryCardIndex;
private int mTileViewLayout;
private boolean mIsDense;
private int mMaxRows;
private int mMaxColumns;
private int mMaxTileCount;
public View getTileViewAt(int tilePosition) {
return mTileView.getChildAt(tilePosition);
}
public int getFocusedTileIndex(int defaultIndex) {
if (mTileView.getFocusedChild() != null) {
for (int i = 0; i < mTileView.getChildCount(); i++) {
if (mTileView.getChildAt(i).hasFocus()) {
return i;
}
}
}
return defaultIndex;
}
public void setTileResource(int tileResource) {
mTileViewLayout = tileResource;
}
protected class CategoryCardInteractionDelegate
implements ContextMenuManager.Delegate, OnClickListener, OnCreateContextMenuListener,
OnFocusChangeListener {
private GURL mSiteUrl;
private int mTileIndex;
public CategoryCardInteractionDelegate(GURL siteUrl, int tileIndex) {
mSiteUrl = siteUrl;
mTileIndex = tileIndex;
}
@Override
public void onClick(View view) {
recordCategoryClick(mCategory.getType());
recordTileIndexClick(mCategoryCardIndex, mTileIndex);
RecordUserAction.record("Android.ExploreSitesPage.ClickOnSiteIcon");
ExploreSitesBridge.recordClick(mProfile, mSiteUrl.getSpec(), mCategory.getType());
mNavigationDelegate.openUrl(WindowOpenDisposition.CURRENT_TAB,
new LoadUrlParams(getUrl(), PageTransition.AUTO_BOOKMARK));
}
@Override
public void onCreateContextMenu(
ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
mContextMenuManager.createContextMenu(menu, v, this);
}
@Override
public void openItem(int windowDisposition) {
mNavigationDelegate.openUrl(
windowDisposition, new LoadUrlParams(getUrl(), PageTransition.AUTO_BOOKMARK));
}
@Override
public void openItemInGroup(int windowDisposition) {
mNavigationDelegate.openUrlInGroup(
windowDisposition, new LoadUrlParams(getUrl(), PageTransition.AUTO_BOOKMARK));
}
@Override
public void removeItem() {
// Update the database on the C++ side.
ExploreSitesBridge.blockSite(mProfile, mSiteUrl.getSpec());
// Remove from model (category).
mCategory.removeSite(mTileIndex);
// Update the view. This may add sites that we didn't have room for before. It
// should reset the tile indexes for views we keep.
updateTileViews(mCategory);
}
@Override
public GURL getUrl() {
return mSiteUrl;
}
@Override
public String getContextMenuTitle() {
return null;
}
@Override
public boolean isItemSupported(@ContextMenuManager.ContextMenuItemId int menuItemId) {
return true;
}
@Override
public void onContextMenuCreated() {}
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (hasFocus) {
// Ensures the whole category card is scrolled to view when a child site has focus.
// Immediate should be false so scrolling will not interfere with any existing
// scrollers running to make the view visible.
getParent().requestChildRectangleOnScreen(ExploreSitesCategoryCardView.this,
new Rect(/* left= */ 0, /* top= */ 0, /* right= */ getWidth(),
/* bottom= */ getHeight()),
/* immediate= */ false);
}
}
}
protected CategoryCardInteractionDelegate createInteractionDelegate(PropertyModel model) {
return new CategoryCardInteractionDelegate(
model.get(ExploreSitesSite.URL_KEY), model.get(ExploreSitesSite.TILE_INDEX_KEY));
}
// We use the MVC paradigm for the site tiles inside the category card. We don't use the MVC
// paradigm for the category card view itself since it is mismatched to the needs of the
// recycler view that we use for category cards. The controller for MVC is actually here, the
// bind code inside the view class.
protected class ExploreSitesSiteViewBinder
implements PropertyModelChangeProcessor
.ViewBinder<PropertyModel, ExploreSitesTileView, PropertyKey> {
@Override
public void bind(PropertyModel model, ExploreSitesTileView view, PropertyKey key) {
if (key == ExploreSitesSite.ICON_KEY) {
view.updateIcon(model.get(ExploreSitesSite.ICON_KEY),
model.get(ExploreSitesSite.TITLE_KEY));
} else if (key == ExploreSitesSite.TITLE_KEY) {
view.setTitle(model.get(ExploreSitesSite.TITLE_KEY));
} else if (key == ExploreSitesSite.URL_KEY) {
// Attach click handlers.
CategoryCardInteractionDelegate interactionDelegate =
createInteractionDelegate(model);
view.setOnClickListener(interactionDelegate);
view.setOnCreateContextMenuListener(interactionDelegate);
view.setOnFocusChangeListener(interactionDelegate);
}
}
}
public ExploreSitesCategoryCardView(Context context, AttributeSet attrs) {
super(context, attrs);
mModelChangeProcessors = new ArrayList<>();
mSiteViewBinder = new ExploreSitesSiteViewBinder();
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mTitleView = findViewById(R.id.category_title);
mTileView = findViewById(R.id.category_sites);
}
public void setTileGridParams(int maxRows, int maxColumns, @DenseVariation int denseVariation) {
mIsDense = ExploreSitesBridge.isDense(denseVariation);
mMaxRows = maxRows;
mMaxColumns = maxColumns;
mMaxTileCount = mMaxRows * mMaxColumns;
mModelChangeProcessors.clear();
mModelChangeProcessors = new ArrayList<>(mMaxTileCount);
mTileView.setMaxColumns(mMaxColumns);
}
public void setCategory(ExploreSitesCategory category, int categoryCardIndex,
RoundedIconGenerator iconGenerator, ContextMenuManager contextMenuManager,
NativePageNavigationDelegate navigationDelegate, Profile profile) {
mIconGenerator = iconGenerator;
mContextMenuManager = contextMenuManager;
mNavigationDelegate = navigationDelegate;
mProfile = profile;
mCategoryCardIndex = categoryCardIndex;
mCategory = category;
updateTitle(mCategory.getTitle());
updateTileViews(mCategory);
}
public void updateTitle(String categoryTitle) {
mTitleView.setText(categoryTitle);
}
public void updateTileViews(ExploreSitesCategory category) {
// Clear observers.
for (PropertyModelChangeProcessor<PropertyModel, ExploreSitesTileView, PropertyKey>
observer : mModelChangeProcessors) {
observer.destroy();
}
mModelChangeProcessors.clear();
boolean incompleteAllowed = allowIncompleteRow(category);
int tileMax = tilesToDisplay(category, incompleteAllowed);
mTileView.setMaxRows(rowsToDisplay(category, incompleteAllowed));
// Remove extra tiles if too many.
if (mTileView.getChildCount() > tileMax) {
mTileView.removeViews(tileMax, mTileView.getChildCount() - tileMax);
}
// Add tiles if too few
if (mTileView.getChildCount() < tileMax) {
for (int i = mTileView.getChildCount(); i < tileMax; i++) {
mTileView.addView(LayoutInflater.from(getContext())
.inflate(mTileViewLayout, mTileView,
/* attachToRoot = */ false));
}
}
// Initialize all the non-empty tiles again to update.
int tileIndex = 0;
for (ExploreSitesSite site : category.getSites()) {
if (tileIndex >= tileMax) break;
final PropertyModel siteModel = site.getModel();
// Skip blocked sites.
if (siteModel.get(ExploreSitesSite.BLOCKED_KEY)) continue;
ExploreSitesTileView tileView = (ExploreSitesTileView) mTileView.getChildAt(tileIndex);
tileView.initialize(mIconGenerator);
siteModel.set(ExploreSitesSite.TILE_INDEX_KEY, tileIndex);
mModelChangeProcessors.add(
PropertyModelChangeProcessor.create(siteModel, tileView, mSiteViewBinder));
// Fetch icon if not present already.
if (siteModel.get(ExploreSitesSite.ICON_KEY) == null) {
ExploreSitesBridge.getSiteImage(mProfile, siteModel.get(ExploreSitesSite.ID_KEY),
(Bitmap icon) -> siteModel.set(ExploreSitesSite.ICON_KEY, icon));
}
tileIndex++;
}
}
/**
* Records UMA data for which category when the user clicks a tile in that category.
* @param category The category the user picked.
*/
public static void recordCategoryClick(int category) {
RecordHistogram.recordEnumeratedHistogram("ExploreSites.CategoryClick", category,
ExploreSitesCategory.CategoryType.NUM_ENTRIES);
}
/**
* Records UMA data for how far down the EoS page the picked tile was.
* @param cardIndex The number card (zero based) of the tile that was picked.
* @param tileIndex The number of the tile within the card.
*/
public void recordTileIndexClick(int cardIndex, int tileIndex) {
// TODO(petewil): Should I get the number of sites in this category from the model instead
// of using MAX_TILE_COUNT?
RecordHistogram.recordLinearCountHistogram("ExploreSites.SiteTilesClickIndex2",
cardIndex * ExploreSitesPage.MAX_TILE_COUNT_ALL_VARIATIONS + tileIndex, 1, 100,
100);
}
/**
* Determine if an incomplete row will be allowed when the view is dense.
*
* An incomplete row is not allowed regardless of the below constraints if:
* - The view is not dense.
* - There are more sites to display than mMaxTileCount.
* - The last row forms a complete row of sites.
*
* An incomplete row is allowed if any of the following constraints are satisfied:
* - There are not enough sites to populate the first row.
* - There is more than one site in the last row.
* - There is one site in the last row as a result of the user blocking a site.
*
* @param category The category from which the number of incomplete row will be calculated.
*/
@VisibleForTesting
boolean allowIncompleteRow(ExploreSitesCategory category) {
if (!mIsDense) return false;
// Do not allow incomplete row if category has more sites than mMaxTileCount.
if (category.getNumDisplayed() > mMaxTileCount) return false;
final int numSitesLastRow = category.getNumDisplayed() % mMaxColumns;
// Do not allow incomplete row if last row forms a complete row anyway.
if (numSitesLastRow == 0) return false;
// Allow incomplete row if category does not have enough sites to populate first row.
if (category.getNumDisplayed() < mMaxColumns) return true;
return (category.getNumberRemoved() > 0 || numSitesLastRow > 1);
}
@VisibleForTesting
int rowsToDisplay(ExploreSitesCategory category, boolean incompleteAllowed) {
if (mIsDense) {
int displayedRows = category.getNumDisplayed() / mMaxColumns;
return Math.min(displayedRows + (incompleteAllowed ? 1 : 0), mMaxRows);
} else {
return Math.min(category.getMaxRows(mMaxColumns), mMaxRows);
}
}
@VisibleForTesting
int tilesToDisplay(ExploreSitesCategory category, boolean incompleteAllowed) {
return incompleteAllowed ? Math.min(category.getNumDisplayed(), mMaxTileCount)
: Math.min(Math.min(category.getMaxRows(mMaxColumns) * mMaxColumns,
category.getNumDisplayed()),
mMaxTileCount);
}
}
| |
package com.hotf.server.action;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.jdo.PersistenceManager;
import net.customware.gwt.dispatch.server.ActionHandler;
import net.customware.gwt.dispatch.server.ExecutionContext;
import net.customware.gwt.dispatch.shared.DispatchException;
import com.google.appengine.api.memcache.InvalidValueException;
import com.google.appengine.api.memcache.MemcacheService;
import com.google.appengine.api.memcache.MemcacheServiceFactory;
import com.hotf.client.action.GetGameAction;
import com.hotf.client.action.result.GameArmourResult;
import com.hotf.client.action.result.GameArtifactResult;
import com.hotf.client.action.result.GameGeneralSkillResult;
import com.hotf.client.action.result.GameResult;
import com.hotf.client.action.result.GameSkillResult;
import com.hotf.client.action.result.GameWeaponResult;
import com.hotf.server.PMUtils;
import com.hotf.server.model.Account;
import com.hotf.server.model.Game;
import com.hotf.server.model.GameArmour;
import com.hotf.server.model.GameArtifact;
import com.hotf.server.model.GameWeapon;
public class GetGameHandler implements ActionHandler<GetGameAction, GameResult> {
private static final Logger log = Logger.getLogger(GetGameHandler.class.getName());
private GetAccountHandler getAccountHandler;
private GetCharacterHandler getCharacterHandler;
public GetGameHandler() {
}
@Override
public GameResult execute(GetGameAction action, ExecutionContext context) throws DispatchException {
Account account = getAccountHandler.getMyAccount();
Game game = getGame(action.getId());
return getResult(account, game);
}
public GameResult getResult(Account account, Game game) {
GameResult gameResult = new GameResult();
gameResult.setDescription(game.getDescription());
gameResult.setGameMasterCharacterId(game.getGameMasterCharacterId());
gameResult.setId(game.getId());
gameResult.setTitle(game.getTitle());
gameResult.setCreated(game.getCreated());
gameResult.setUpdated(game.getUpdated());
com.hotf.server.model.Character gm = getCharacterHandler.getCharacter(game.getGameMasterCharacterId());
gameResult.setOwner(gm.getName());
gameResult.setUpdatePermission(getUpdatePermission(account, game, gm.getGameMasterAccountId()));
List<GameWeaponResult> weapons = new ArrayList<GameWeaponResult>();
for (GameWeapon w : game.getWeapons()) {
GameWeaponResult wr = new GameWeaponResult();
wr.setName(w.getName());
wr.setSlashDamage(w.getSlashDamage());
wr.setCrushDamage(w.getCrushDamage());
wr.setPierceDamage(w.getPierceDamage());
wr.setDefence(w.getDefence());
wr.setTwoHanded(w.getTwoHanded());
wr.setMaxRange(w.getMaxRange());
wr.setMinRange(w.getMinRange());
wr.setShotMaxRange(w.getShotMaxRange());
wr.setShotMinRange(w.getShotMinRange());
wr.setDamageRating(w.getDamageRating());
wr.setStrengthRating(w.getStrengthRating());
wr.setInitiative(w.getInitiative());
wr.setSkillNames(new ArrayList<String>(Arrays.asList(w.getSkillNames())));
weapons.add(wr);
}
gameResult.setWeapons(weapons);
List<GameArmourResult> armour = new ArrayList<GameArmourResult>();
for (GameArmour a : game.getArmour()) {
GameArmourResult ar = new GameArmourResult();
ar.setName(a.getName());
ar.setSlashDefence(a.getSlashDefence());
ar.setCrushDefence(a.getCrushDefence());
ar.setPierceDefence(a.getPierceDefence());
ar.setInitiative(a.getInitiative());
armour.add(ar);
}
gameResult.setArmour(armour);
List<GameArtifactResult> artifact = new ArrayList<GameArtifactResult>();
for (GameArtifact a : game.getArtifacts()) {
GameArtifactResult ar = new GameArtifactResult();
ar.setName(a.getName());
ar.setEffect(a.getEffect());
ar.setSkillNames(new ArrayList<String>(Arrays.asList(a.getSkillNames())));
artifact.add(ar);
}
gameResult.setArtifacts(artifact);
gameResult.setGeneralSkills(new ArrayList<GameGeneralSkillResult>(getGeneralSkills().values()));
return gameResult;
}
public Map<String, GameGeneralSkillResult> getGeneralSkills() {
if (generalSkills == null) {
initialiseSkills();
}
return generalSkills;
}
/**
* @param gameId to find
* @return Game
*/
public Game getGame(String gameId) {
MemcacheService cache = MemcacheServiceFactory.getMemcacheService();
if (cache.contains(gameId)) {
log.info("Getting Game from Memcache");
try {
return (Game)cache.get(gameId);
} catch (InvalidValueException e) {
log.warning("Clearing cache");
cache.clearAll();
}
}
PersistenceManager pm = PMUtils.getPersistenceManager();
try {
log.info("Getting game from Datastore by id");
Game game = pm.getObjectById(Game.class, gameId);
cache.put(gameId, game);
return game;
} catch (RuntimeException t) {
log.severe(t.getMessage());
// roll-back transactions and re-throw
if (pm.currentTransaction().isActive()) {
pm.currentTransaction().rollback();
}
throw t;
}
}
public GameWeapon getWeaponByName(Game game, String name) {
if (name != null) {
for (GameWeapon w : game.getWeapons()) {
if (name.equals(w.getName())) {
return w;
}
}
}
return null;
}
public GameArmour getArmourByName(Game game, String name) {
if (name != null) {
for (GameArmour a : game.getArmour()) {
if (name.equals(a.getName())) {
return a;
}
}
}
return null;
}
/**
* @return true if the users has update permission, otherwise return false
*/
public Boolean getUpdatePermission(Account account, Game game, String gmAccountId) {
boolean updatePermission = false;
if (gmAccountId != null && account != null && gmAccountId.equals(account.getId())) {
updatePermission = true;
}
//return the update permission
return updatePermission;
}
@Override
public Class<GetGameAction> getActionType() {
return GetGameAction.class;
}
@Override
public void rollback(GetGameAction action, GameResult result, ExecutionContext context) throws DispatchException {
}
/**
* @param getAccountHandler the getAccountHandler to set
*/
public void setGetAccountHandler(GetAccountHandler getAccountHandler) {
this.getAccountHandler = getAccountHandler;
}
/**
* @param getCharacterHandler the getCharacterHandler to set
*/
public void setGetCharacterHandler(GetCharacterHandler getCharacterHandler) {
this.getCharacterHandler = getCharacterHandler;
}
private Map<String, GameGeneralSkillResult> generalSkills;
private Map<String, GameSkillResult> skills;
private GameGeneralSkillResult addGeneralSkill(String name, Map<String, GameGeneralSkillResult> generalSkills) {
GameGeneralSkillResult gs = new GameGeneralSkillResult();
gs.setName(name);
getGeneralSkills().put(gs.getName(), gs);
List<GameSkillResult> skills = new ArrayList<GameSkillResult>();
gs.setSkills(skills);
return gs;
}
private void addSkill(GameGeneralSkillResult generalSkill, String name, String attr) {
GameSkillResult s = new GameSkillResult();
s.setGeneralSkill(generalSkill);
s.setName(name);
s.setAttribute(attr);
skills.put(s.getName(), s);
generalSkill.getSkills().add(s);
}
public GameGeneralSkillResult getGeneralSkill(String name) {
return getGeneralSkills().get(name);
}
public GameSkillResult getSkill(String name) {
return skills.get(name);
}
private void initialiseSkills() {
generalSkills = new HashMap<String, GameGeneralSkillResult>();
skills = new HashMap<String, GameSkillResult>();
GameGeneralSkillResult closeCombat = addGeneralSkill("Close Combat", generalSkills);
addSkill(closeCombat, "Swords Requiring Strength", "Vigor");
addSkill(closeCombat, "Swords Requiring Agility", "Vigor");
addSkill(closeCombat, "Daggers", "Vigor");
addSkill(closeCombat, "Concussion Weapons","Vigor");
addSkill(closeCombat, "Flails", "Vigor");
addSkill(closeCombat, "Axes", "Vigor");
addSkill(closeCombat, "Spears Under 8'", "Vigor");
addSkill(closeCombat, "Spears 8' or Over", "Vigor");
addSkill(closeCombat, "Polearms", "Vigor");
addSkill(closeCombat, "Lances From Horseback", "Vigor");
addSkill(closeCombat, "Shields", "Vigor");
addSkill(closeCombat, "Dodge", "Vigor");
addSkill(closeCombat, "Weapon Purchase", "Wit");
addSkill(closeCombat, "Intimidation", "Glamour");
GameGeneralSkillResult rangedCombat = addGeneralSkill("Ranged Combat", generalSkills);
addSkill(rangedCombat, "Hand Thrown Shafted Weapons", "Vigor");
addSkill(rangedCombat, "Bow", "Vigor");
addSkill(rangedCombat, "Thrown Dagger", "Vigor");
addSkill(rangedCombat, "Thrown Fransisca", "Vigor");
addSkill(rangedCombat, "Hand Slings", "Vigor");
addSkill(rangedCombat, "Crossbows and Ballistae", "Vigor");
addSkill(rangedCombat, "Ranged Weapon Purchase", "Wit");
GameGeneralSkillResult larceny = addGeneralSkill("Larceny", generalSkills);
addSkill(larceny, "Cut Purse", "Wit");
addSkill(larceny, "Climbing", "Vigor");
addSkill(larceny, "Disguise", "Wit");
addSkill(larceny, "Pick Lock", "Wit");
addSkill(larceny, "Valuation", "Wit");
addSkill(larceny, "Stealth", "Wit");
addSkill(larceny, "Bluff", "Glamour");
addSkill(larceny, "Search", "Wit");
addSkill(larceny, "Underworld Connections", "Glamour");
addSkill(larceny, "Awareness Urban", "Wit");
GameGeneralSkillResult hunting = addGeneralSkill("Hunting", generalSkills);
addSkill(hunting, "Tracking", "Wit");
addSkill(hunting, "Stalking - Wilderness", "Wit");
addSkill(hunting, "Trap Setting", "Wit");
addSkill(hunting, "Land Navigation", "Wit");
addSkill(hunting, "Bow", "Vigor");
addSkill(hunting, "Crossbows and Ballistae", "Vigor");
addSkill(hunting, "Hand Thrown Shafted Weapons", "Vigor");
addSkill(hunting, "Spears Under 8'", "Vigor");
addSkill(hunting, "Herb Lore", "Wit");
addSkill(hunting, "Weather Lore", "Wit");
addSkill(hunting, "Hand Slings", "Vigor");
addSkill(hunting, "Skinning and Butchery", "Vigor");
addSkill(hunting, "Awareness - Wilderness", "Wit");
GameGeneralSkillResult espionage = addGeneralSkill("Espionage", generalSkills);
addSkill(espionage, "Stealth", "Wit");
addSkill(espionage, "Stalking - Urban", "Wit");
addSkill(espionage, "Pick Lock", "Wit");
addSkill(espionage, "Bluff", "Glamour");
addSkill(espionage, "Pursuade", "Glamour");
addSkill(espionage, "Etiquette", "Glamour");
addSkill(espionage, "Languages", "Wit");
addSkill(espionage, "Literacy", "Wit");
addSkill(espionage, "Map Making", "Wit");
addSkill(espionage, "Search", "Wit");
addSkill(espionage, "Poisons", "Wit");
addSkill(espionage, "Forgery", "Wit");
addSkill(espionage, "Underworld Connections - Urban", "Glamour");
addSkill(espionage, "Awareness - Urban", "Wit");
GameGeneralSkillResult boatCraft = addGeneralSkill("Boat Craft", generalSkills);
addSkill(boatCraft, "Sailing", "Vigor");
addSkill(boatCraft, "Rowing", "Vigor");
addSkill(boatCraft, "Sea Navigation", "Wit");
addSkill(boatCraft, "Rope Craft", "Wit");
addSkill(boatCraft, "Boat Building", "Wit");
addSkill(boatCraft, "Swimming", "Vigor");
addSkill(boatCraft, "Weather Lore", "Wit");
addSkill(boatCraft, "Boat Purchase", "Wit");
GameGeneralSkillResult assassination = addGeneralSkill("Assassination", generalSkills);
addSkill(assassination, "Disguise", "Wit");
addSkill(assassination, "Climbing", "Vigor");
addSkill(assassination, "Garrotte", "Vigor");
addSkill(assassination, "Poisons", "Wit");
addSkill(assassination, "Bow", "Vigor");
addSkill(assassination, "Crossbows and Ballistae", "Vigor");
addSkill(assassination, "Daggers", "Vigor");
addSkill(assassination, "Thrown Dagger", "Vigor");
addSkill(assassination, "Stalking - Urban", "Wit");
addSkill(assassination, "Stealth", "Wit");
addSkill(assassination, "Awareness - Urban", "Wit");
GameGeneralSkillResult horsemanship = addGeneralSkill("Horsemanship", generalSkills);
addSkill(horsemanship, "Riding", "Vigor");
addSkill(horsemanship, "Horse Care", "Wit");
addSkill(horsemanship, "Horse Purchase", "Wit");
GameGeneralSkillResult performance = addGeneralSkill("Performance", generalSkills);
addSkill(performance, "Oratory", "Glamour");
addSkill(performance, "Juggling", "Vigor");
addSkill(performance, "Escapology", "Vigor");
addSkill(performance, "Acrobatics", "Vigor");
addSkill(performance, "Musical Instrament", "Wit");
addSkill(performance, "Bluff", "Glamour");
addSkill(performance, "Disguise", "Glamour");
addSkill(performance, "Bluff", "Glamour");
GameGeneralSkillResult command = addGeneralSkill("Command", generalSkills);
addSkill(command, "Leadership", "Glamour");
addSkill(command, "Oratory", "Glamour");
addSkill(command, "Siege Craft", "Wit");
addSkill(command, "Drill", "Wit");
addSkill(command, "Logistics", "Wit");
addSkill(command, "Intimidation", "Glamour");
addSkill(command, "Awareness - Military", "Wit");
GameGeneralSkillResult scholarship = addGeneralSkill("Scholarship", generalSkills);
addSkill(scholarship, "Languages", "Wit");
addSkill(scholarship, "Dead Languages", "Wit");
addSkill(scholarship, "Magic Lore", "Wit");
addSkill(scholarship, "Herb Lore", "Wit");
addSkill(scholarship, "Literacy", "Wit");
addSkill(scholarship, "Healing", "Wit");
addSkill(scholarship, "Alchemy", "Wit");
addSkill(scholarship, "Map Making", "Wit");
addSkill(scholarship, "History", "Wit");
GameGeneralSkillResult shamanism = addGeneralSkill("Shamanism", generalSkills);
addSkill(shamanism, "Herb Lore", "Wit");
addSkill(shamanism, "Magic Lore", "Wit");
addSkill(shamanism, "Healing", "Wit");
addSkill(shamanism, "Weather Lore", "Wit");
addSkill(shamanism, "Land Navigation", "Wit");
addSkill(shamanism, "Tracking", "Wit");
addSkill(shamanism, "Animal Lore", "Wit");
addSkill(shamanism, "Awareness - Wilderness", "Wit");
addSkill(shamanism, "Stalking - Wilderness", "Wit");
addSkill(shamanism, "Oratory", "Glamour");
GameGeneralSkillResult unarmedCombat = addGeneralSkill("Unarmed Combat", generalSkills);
addSkill(unarmedCombat, "Strike", "Vigor");
addSkill(unarmedCombat, "Grapple", "Vigor");
addSkill(unarmedCombat, "Dodge", "Vigor");
addSkill(unarmedCombat, "Intimidation", "Glamour");
GameGeneralSkillResult artifice = addGeneralSkill("Artifice", generalSkills);
addSkill(artifice, "Alchemy", "Wit");
addSkill(artifice, "Weapon Purchase", "Wit");
addSkill(artifice, "Pick Lock", "Wit");
addSkill(artifice, "Trap Setting", "Wit");
addSkill(artifice, "Rope Craft", "Wit");
addSkill(artifice, "Siege Craft", "Wit");
addSkill(artifice, "Carpentry", "Wit");
addSkill(artifice, "Metal Working", "Wit");
addSkill(artifice, "Stone Mason", "Wit");
addSkill(artifice, "Armourer", "Wit");
GameGeneralSkillResult pathfinding = addGeneralSkill("Pathfinding", generalSkills);
addSkill(pathfinding, "Talent", "Wit");
addSkill(pathfinding, "Land Navigation", "Wit");
addSkill(pathfinding, "Sea Navigation", "Wit");
addSkill(pathfinding, "Tracking", "Wit");
addSkill(pathfinding, "Stalking", "Wit");
addSkill(pathfinding, "Awareness - Urban and Wilderness", "Wit");
addSkill(pathfinding, "Stealth", "Wit");
GameGeneralSkillResult pathfinderArtificer = addGeneralSkill("Pathfinder Artificer", generalSkills);
addSkill(pathfinderArtificer, "Alchemy", "Wit");
addSkill(pathfinderArtificer, "Pick Lock", "Wit");
addSkill(pathfinderArtificer, "Trap Setting", "Wit");
addSkill(pathfinderArtificer, "Rope Craft", "Wit");
addSkill(pathfinderArtificer, "Siege Craft", "Wit");
addSkill(pathfinderArtificer, "Dead Languages", "Wit");
addSkill(pathfinderArtificer, "Literacy", "Wit");
addSkill(pathfinderArtificer, "Magic Lore", "Wit");
addSkill(pathfinderArtificer, "Carpentry", "Wit");
addSkill(pathfinderArtificer, "Metal Working", "Wit");
addSkill(pathfinderArtificer, "Stone Mason", "Wit");
addSkill(pathfinderArtificer, "Armourer", "Wit");
GameGeneralSkillResult trading = addGeneralSkill("Trading", generalSkills);
addSkill(trading, "Languages", "Wit");
addSkill(trading, "Assess Value", "Wit");
addSkill(trading, "Purchase", "Wit");
addSkill(trading, "Persuade", "Glamour");
addSkill(trading, "Oratory", "Glamour");
addSkill(trading, "Bluff", "Glamour");
addSkill(trading, "Trade Connections", "Glamour");
addSkill(trading, "Ministry", "Wit");
addSkill(trading, "Rituals", "Wit");
addSkill(trading, "Dead Languages", "Wit");
addSkill(trading, "Literacy", "Wit");
addSkill(trading, "Magic Lore", "Wit");
addSkill(trading, "Oratory", "Glamour");
addSkill(trading, "Leadership", "Glamour");
addSkill(trading, "Etiquette", "Glamour");
GameGeneralSkillResult nobility = addGeneralSkill("Nobility", generalSkills);
addSkill(nobility, "Languages", "Wit");
addSkill(nobility, "Literacy", "Wit");
addSkill(nobility, "Etiquette", "Wit");
addSkill(nobility, "Arts", "Wit");
addSkill(nobility, "Leadership", "Glamour");
GameGeneralSkillResult diplomacy = addGeneralSkill("Diplomacy", generalSkills);
addSkill(diplomacy, "Oratory", "Glamour");
addSkill(diplomacy, "Connections", "Glamour");
addSkill(diplomacy, "Riding", "Vigor");
GameGeneralSkillResult administration = addGeneralSkill("Administration", generalSkills);
addSkill(administration, "Persuade", "Glamour");
addSkill(administration, "Bluff", "Glamour");
addSkill(administration, "Etiquette", "Glamour");
addSkill(administration, "Administrative Connections", "Glamour");
addSkill(administration, "Literacy", "Wit");
addSkill(administration, "Languages", "Wit");
addSkill(administration, "Logistics", "Wit");
addSkill(administration, "Forgery", "Wit");
}
}
| |
/*
* Copyright 2009-2020 Aarhus University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.brics.tajs.analysis.nativeobjects;
import dk.brics.tajs.analysis.Conversion;
import dk.brics.tajs.analysis.Exceptions;
import dk.brics.tajs.analysis.FunctionCalls;
import dk.brics.tajs.analysis.FunctionCalls.CallInfo;
import dk.brics.tajs.analysis.InitialStateBuilder;
import dk.brics.tajs.analysis.ParallelTransfer;
import dk.brics.tajs.analysis.PropVarOperations;
import dk.brics.tajs.analysis.Solver;
import dk.brics.tajs.flowgraph.AbstractNode;
import dk.brics.tajs.flowgraph.jsnodes.CallNode;
import dk.brics.tajs.lattice.ExecutionContext;
import dk.brics.tajs.lattice.FunctionPartitions;
import dk.brics.tajs.lattice.FunctionTypeSignatures;
import dk.brics.tajs.lattice.ObjectLabel;
import dk.brics.tajs.lattice.ObjectLabel.Kind;
import dk.brics.tajs.lattice.State;
import dk.brics.tajs.lattice.UnknownValueResolver;
import dk.brics.tajs.lattice.Value;
import dk.brics.tajs.solver.Message.Severity;
import dk.brics.tajs.unevalizer.SimpleUnevalizerAPI;
import dk.brics.tajs.unevalizer.UnevalizerLimitations;
import dk.brics.tajs.util.AnalysisException;
import dk.brics.tajs.util.AnalysisLimitationException;
import dk.brics.tajs.util.Collectors;
import org.apache.log4j.Logger;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static dk.brics.tajs.util.Collections.newList;
import static dk.brics.tajs.util.Collections.newSet;
import static dk.brics.tajs.util.Collections.singleton;
/**
* 15.3 native Function functions.
*/
public class JSFunction {
private static Logger log = Logger.getLogger(JSFunction.class);
private JSFunction() {
}
/**
* Evaluates the given native function.
*/
public static Value evaluate(ECMAScriptObjects nativeobject, final CallInfo call, final Solver.SolverInterface c) {
State state = c.getState();
switch (nativeobject) {
case FUNCTION: { // 15.3.1 / 15.3.2 (no difference between function and constructor)
if (c.isScanning())
return Value.makeNone();
//First parse the argument string
if (call.isUnknownNumberOfArgs()) {
return UnevalizerLimitations.handle("Unable to handle unknown args to Function", call.getSourceNode(), c);
}
if (!(call.getSourceNode() instanceof CallNode)) {
if (c.getAnalysis().getUnsoundness().mayIgnoreEvalCallAtNonCallNode(call.getSourceNode())) {
return Value.makeUndef();
}
throw new AnalysisLimitationException.AnalysisModelLimitationException(call.getSourceNode().getSourceLocation() + ": Invoking Function from non-CallNode - unevalizer can't handle that"); // TODO: generalize unevalizer to handle calls from EventDispatcherNode and implicit calls?
}
CallNode callNode = (CallNode) call.getSourceNode();
int nrArgs = call.getNumberOfArgs();
List<Value> vParameterNames = newList();
if (nrArgs > 1) { // if only one arg: no parameters!
for (int i = 0; i < nrArgs - 1; i++) {
Value parameterName = Conversion.toString(FunctionCalls.readParameter(call, state, i), c);
vParameterNames.add(parameterName);
}
}
Value vBody;
if (nrArgs > 0) {
vBody = Conversion.toString(FunctionCalls.readParameter(call, state, nrArgs - 1), c);
} else {
vBody = Value.makeStr("");
}
Set<Value> toStringedArguments = newSet();
toStringedArguments.add(vBody);
toStringedArguments.addAll(vParameterNames);
if (toStringedArguments.stream().anyMatch(Value::isNone)) {
return Value.makeNone();
}
if (toStringedArguments.stream().anyMatch(v -> v.isMaybeFuzzyStr() && v.getIncludedStrings() == null)) {
if (c.getAnalysis().getUnsoundness().maySimplifyImpreciseFunctionConstructor(callNode)) {
vParameterNames.clear();
vBody = Value.makeStr("");
} else {
throw new AnalysisLimitationException.AnalysisPrecisionLimitationException(call.getJSSourceNode().getSourceLocation() + ": Too imprecise calls to Function");
}
}
if (vParameterNames.stream().anyMatch(v -> !v.isMaybeSingleStr())) {
throw new AnalysisLimitationException.AnalysisPrecisionLimitationException(call.getJSSourceNode().getSourceLocation() + ": Too imprecise calls to Function: Known but not single string arguments");
}
List<String> parameterNames = vParameterNames.stream()
.flatMap(v -> Arrays.stream(v.getStr().split(",")))
.map(String::trim)
.collect(Collectors.toList());
Set<String> bodies = vBody.getIncludedStrings() != null ? vBody.getIncludedStrings() : singleton(vBody.getStr());
Set<Value> evaledFunctions = bodies.stream()
.map(body -> SimpleUnevalizerAPI.evaluateFunctionCall(call.getSourceNode(), parameterNames, body, c))
.collect(Collectors.toSet());
return UnknownValueResolver.join(evaledFunctions, c.getState());
}
case FUNCTION_PROTOTYPE: { // 15.3.4
return Value.makeUndef();
}
case FUNCTION_TOSTRING: { // 15.3.4.2
return evaluateToString(state.readThis(), c);
}
case FUNCTION_APPLY: { // 15.3.4.3
final PropVarOperations pv = c.getAnalysis().getPropVarOperations();
Value argarray = FunctionCalls.readParameter(call, state, 1);
// handle bad arguments
boolean hasBadPrimitives = !argarray.restrictToNotObject().restrictToNotNull().restrictToNotUndef().isNone();
boolean hasOnlyBadPrimitives = !argarray.isMaybeObject() && !argarray.isMaybeNull() && !argarray.isMaybeUndef();
if (hasBadPrimitives) {
Exceptions.throwTypeError(c);
c.getMonitoring().addMessage(c.getNode(), Severity.HIGH, "TypeError, invalid arguments to 'apply'");
if (hasOnlyBadPrimitives) {
return Value.makeNone();
}
}
// group the array-like arguments by their lengths. This will reduce the number of calls with an unknown number of arguments.
Map<Value, List<ObjectLabel>> lengths = argarray.getObjectLabels().stream().collect(Collectors.groupingBy(l -> getLengthAsArrayIndex(l, c)));
lengths.remove(Value.makeNone()); // skip objects with none length
// special case: null and undefined count as an empty array
boolean maybeEmpty = argarray.isNullOrUndef();
if (maybeEmpty) {
Value key = Value.makeNum(0);
if (!lengths.containsKey(key)) {
lengths.put(key, newList());
}
}
Value functionValue = state.readThis();
ParallelTransfer.process(lengths.entrySet(), entry -> {
Value lengthValue = entry.getKey();
List<ObjectLabel> argumentObjectsForLength = entry.getValue();
c.getMonitoring().visitPropertyRead(call.getSourceNode(), newSet(argumentObjectsForLength), lengthValue, state, false);
FunctionCalls.callFunction(new CallInfo() { // TODO: possible infinite recursion of callFunction with apply/call? (see test109.js)
@Override
public AbstractNode getSourceNode() {
return call.getSourceNode();
}
@Override
public AbstractNode getJSSourceNode() {
return call.getJSSourceNode();
}
@Override
public boolean isConstructorCall() {
return false;
}
@Override
public Value getFunctionValue() {
return functionValue;
}
@Override
public Value getThis() {
return FunctionCalls.readParameter(call, c.getState(), 0);
}
@Override
public Value getArg(int i) {
if (!isUnknownNumberOfArgs() && lengthValue.getNum() <= i) {
return Value.makeAbsent(); // asking out of bounds
}
Value result = c.withState(state, () -> pv.readPropertyValue(argumentObjectsForLength, Integer.toString(i)));
if (maybeEmpty && lengthValue.getNum() == 0) {
result = result.joinAbsent(); // special case with null and undef acting as an empty array
}
return result;
}
@Override
public int getNumberOfArgs() {
if (isUnknownNumberOfArgs()) {
throw new AnalysisException("Number of arguments is unknown!");
}
return lengthValue.getNum().intValue(); // coercions have made this safe
}
@Override
public Value getUnknownArg() {
return c.withState(state, () -> pv.readPropertyValue(argumentObjectsForLength, Value.makeAnyStrUInt()));
}
@Override
public boolean isUnknownNumberOfArgs() {
return !lengthValue.isMaybeSingleNum();
}
@Override
public int getResultRegister() {
return call.getResultRegister();
}
@Override
public ExecutionContext getExecutionContext() {
return call.getExecutionContext();
}
@Override
public boolean assumeFunction() {
return false; // TODO: could do filtering like for ordinary calls
}
@Override
public FunctionPartitions getFunctionPartitions(ObjectLabel function) {
if (functionValue.getFunctionPartitions() == null)
return null;
return functionValue.getFunctionPartitions().filterByFunction(function);
}
@Override
public FunctionTypeSignatures getFunctionTypeSignatures() {
return functionValue.getFunctionTypeSignatures();
}
}, c);
}, c);
return Value.makeNone();
}
case FUNCTION_CALL: { // 15.3.4.4
FunctionCalls.callFunction(new CallInfo() {
@Override
public AbstractNode getSourceNode() {
return call.getSourceNode();
}
@Override
public AbstractNode getJSSourceNode() {
return call.getJSSourceNode();
}
@Override
public boolean isConstructorCall() {
return false;
}
@Override
public Value getFunctionValue() {
return state.readThis();
}
@Override
public Value getThis() {
return FunctionCalls.readParameter(call, c.getState(), 0);
}
@Override
public Value getArg(int i) {
return call.getArg(i + 1);
}
@Override
public int getNumberOfArgs() {
int n = call.getNumberOfArgs();
return n > 0 ? n - 1 : 0;
}
@Override
public Value getUnknownArg() {
return call.getUnknownArg();
}
@Override
public boolean isUnknownNumberOfArgs() {
return call.isUnknownNumberOfArgs();
}
@Override
public int getResultRegister() {
return call.getResultRegister();
}
@Override
public ExecutionContext getExecutionContext() {
return call.getExecutionContext();
}
@Override
public boolean assumeFunction() {
return false; // TODO: could do filtering like for ordinary calls
}
@Override
public FunctionPartitions getFunctionPartitions(ObjectLabel function) {
if (call.getThis().getFunctionPartitions() == null)
return null;
return call.getThis().getFunctionPartitions().filterByFunction(function);
}
@Override
public FunctionTypeSignatures getFunctionTypeSignatures() {
return call.getThis().getFunctionTypeSignatures();
}
}, c);
return Value.makeNone(); // no direct flow to the successor
}
default:
return null;
}
}
private static Value getLengthAsArrayIndex(ObjectLabel l, Solver.SolverInterface c) {
Value v = c.getAnalysis().getPropVarOperations().readPropertyValue(Collections.singleton(l), "length");
v = UnknownValueResolver.getRealValue(v, c.getState());
Value n = Conversion.toNumber(v, c);
if (n.isMaybeFuzzyNum() || n.isNone()) {
return n;
}
if (n.isMaybeSingleNum()) {
return Value.makeNum(Conversion.toInt32(n.getNum()));
}
if (n.isMaybeNaN() || n.isMaybeInf()) {
return Value.makeNum(0);
}
throw new AnalysisException("Unhandled coerced-number case: " + n);
}
public static Value evaluateToString(Value thisval, Solver.SolverInterface c) {
List<Value> strs = newList();
boolean is_maybe_typeerror = thisval.isMaybePrimitive();
for (ObjectLabel thisObj : thisval.getObjectLabels()) {
if (thisObj.getKind() != Kind.FUNCTION) {
is_maybe_typeerror = true;
} else {
boolean isNativeFunction = false;
if (c.getFlowGraph().isHostEnvironmentSource(thisObj.getSourceLocation())) {
PropVarOperations pv = c.getAnalysis().getPropVarOperations();
Value prototype = UnknownValueResolver.getRealValue(pv.readPropertyValue(singleton(thisObj), "prototype"), c.getState());
Value toStringTag = UnknownValueResolver.getRealValue(pv.readPropertyValue(prototype.getObjectLabels(), Value.makeObject(InitialStateBuilder.WELLKNOWN_SYMBOL_TO_STRING_TAG)), c.getState());
if (toStringTag.isMaybeSingleStr()) {
strs.add(Value.makeStr("function " + toStringTag.getStr() + "() { [native code] }"));
isNativeFunction = true;
} else {
Value name = UnknownValueResolver.getRealValue(pv.readPropertyValue(singleton(thisObj), "name"), c.getState());
if (name.isMaybeSingleStr()) {
strs.add(Value.makeStr("function " + name.getStr() + "() { [native code] }"));
isNativeFunction = true;
}
}
}
if (isNativeFunction)
continue;
Optional<String> toString = c.getAnalysis().getUnsoundness().evaluate_FunctionToString(c.getNode(), thisObj);
if (toString.isPresent()) {
strs.add(Value.makeStr(toString.get()));
} else {
strs.add(Value.makeAnyStr());
}
}
}
if (is_maybe_typeerror) {
Exceptions.throwTypeError(c);
}
return Value.join(strs);
}
}
| |
/*
* Copyright 2016 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gaffer.user;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.hamcrest.core.IsCollectionContaining;
import org.junit.Test;
public class UserTest {
@Test
public void shouldBuildUser() {
// Given
final String userId = "user 01";
final String dataAuth1 = "dataAuth 1";
final String dataAuth2 = "dataAuth 2";
final String opAuth1 = "opAuth 1";
final String opAuth2 = "opAuth 2";
// When
final User user = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
// Then
assertEquals(userId, user.getUserId());
assertEquals(2, user.getDataAuths().size());
assertThat(user.getDataAuths(), IsCollectionContaining.hasItems(
dataAuth1, dataAuth2
));
assertEquals(2, user.getOpAuths().size());
assertThat(user.getOpAuths(), IsCollectionContaining.hasItems(
opAuth1, opAuth1
));
}
@Test
public void shouldReplaceNullIdWithUnknownIdWhenBuildingUser() {
// Given
final String userId = null;
// When
final User user = new User.Builder()
.userId(userId)
.build();
// Then
assertEquals(User.UNKNOWN_USER_ID, user.getUserId());
}
@Test
public void shouldReplaceEmptyIdWithUnknownIdWhenBuildingUser() {
// Given
final String userId = "";
// When
final User user = new User.Builder()
.userId(userId)
.build();
// Then
assertEquals(User.UNKNOWN_USER_ID, user.getUserId());
}
@Test
public void shouldSetUnknownIdWhenBuildingUser() {
// Given
// When
final User user = new User.Builder()
.build();
// Then
assertEquals(User.UNKNOWN_USER_ID, user.getUserId());
}
@Test
public void shouldNotAllowChangingDataAuths() {
// Given
final String userId = "user 01";
final String dataAuth1 = "dataAuth 1";
final String dataAuth2 = "dataAuth 2";
final String newDataAuth = "new dataAuth";
final User user = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.build();
// When
try {
user.getDataAuths().add(newDataAuth);
fail("Exception expected");
} catch (final UnsupportedOperationException e) {
assertNotNull(e);
}
// Then
assertFalse(user.getDataAuths().contains(newDataAuth));
}
@Test
public void shouldNotAllowChangingOpAuths() {
// Given
final String userId = "user 01";
final String opAuth1 = "opAuth 1";
final String opAuth2 = "opAuth 2";
final String newOpAuth = "new opAuth";
final User user = new User.Builder()
.userId(userId)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
// When
try {
user.getOpAuths().add(newOpAuth);
fail("Exception expected");
} catch (final UnsupportedOperationException e) {
assertNotNull(e);
}
// Then
assertFalse(user.getOpAuths().contains(newOpAuth));
}
@Test
public void shouldBeEqualWhen2UsersHaveSameFields() {
// Given
final String userId = "user 01";
final String dataAuth1 = "dataAuth 1";
final String dataAuth2 = "dataAuth 2";
final String opAuth1 = "opAuth 1";
final String opAuth2 = "opAuth 2";
final User userLocked = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
final User userUnlocked = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
// When
final boolean isEqual = userLocked.equals(userUnlocked);
// Then
assertTrue(isEqual);
assertEquals(userLocked.hashCode(), userUnlocked.hashCode());
}
@Test
public void shouldNotBeEqualWhen2UsersHaveDifferentUserIds() {
// Given
final String userId1 = "user 01";
final String userId2 = "user 02";
final String dataAuth1 = "dataAuth 1";
final String dataAuth2 = "dataAuth 2";
final String opAuth1 = "opAuth 1";
final String opAuth2 = "opAuth 2";
final User user1 = new User.Builder()
.userId(userId1)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
final User user2 = new User.Builder()
.userId(userId2)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2)
.opAuth(opAuth1)
.opAuth(opAuth2)
.build();
// When
final boolean isEqual = user1.equals(user2);
// Then
assertFalse(isEqual);
assertNotEquals(user1.hashCode(), user2.hashCode());
}
@Test
public void shouldNotBeEqualWhen2UsersHaveDifferentDataAuths() {
// Given
final String userId = "user 01";
final String dataAuth1 = "dataAuth 1";
final String dataAuth2a = "dataAuth 2a";
final String dataAuth2b = "dataAuth 2b";
final User user1 = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2a)
.build();
final User user2 = new User.Builder()
.userId(userId)
.dataAuth(dataAuth1)
.dataAuth(dataAuth2b)
.build();
// When
final boolean isEqual = user1.equals(user2);
// Then
assertFalse(isEqual);
assertNotEquals(user1.hashCode(), user2.hashCode());
}
@Test
public void shouldNotBeEqualWhen2UsersHaveDifferentOpAuths() {
// Given
final String userId = "user 01";
final String opAuth1 = "opAuth 1";
final String opAuth2a = "opAuth 2a";
final String opAuth2b = "opAuth 2b";
final User user1 = new User.Builder()
.userId(userId)
.opAuth(opAuth1)
.opAuth(opAuth2a)
.build();
final User user2 = new User.Builder()
.userId(userId)
.opAuth(opAuth1)
.opAuth(opAuth2b)
.build();
// When
final boolean isEqual = user1.equals(user2);
// Then
assertFalse(isEqual);
assertNotEquals(user1.hashCode(), user2.hashCode());
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.bitpatterns.gui;
import java.awt.*;
import java.util.List;
import javax.swing.*;
import javax.swing.border.TitledBorder;
import docking.*;
import docking.action.DockingAction;
import docking.action.MenuData;
import docking.widgets.table.GFilterTable;
import ghidra.bitpatterns.info.*;
import ghidra.util.HelpLocation;
import ghidra.util.bytesearch.DittedBitSequence;
import resources.ResourceManager;
/**
* This is a base class for providers which allow the user to analyze sequences of bytes.
*/
public abstract class ByteSequenceAnalyzerProvider extends DialogComponentProvider {
protected ByteSequenceTableModel byteSequenceTable;
protected FunctionBitPatternsExplorerPlugin plugin;
protected JPanel mainPanel;
private JPanel infoPanel;
private JTextField mergedSeqTextField;
private JTextField bitsOfCheckField;
private JTextField noteField;
private DittedBitSequence merged;
protected PatternType type;
protected ContextRegisterFilter cRegFilter;
protected String note;
protected String title;
private boolean mergedToSend = false;
private DockingAction sendSelectedToClipboardAction;
private DockingAction mergeAction;
private DockingAction sendMergedToClipboardAction;
/**
* Creates a dialog for analyzing sequences of bytes.
* @param title dialog title
* @param plugin plugin
* @param rowObjects row objects representing sequences to analyze
* @param parent parent component
* @param type type of sequences
* @param cRegFilter context register filter
* @param note note for clipboard
*/
public ByteSequenceAnalyzerProvider(String title, FunctionBitPatternsExplorerPlugin plugin,
List<ByteSequenceRowObject> rowObjects, Component parent, PatternType type,
ContextRegisterFilter cRegFilter, String note) {
super(title, false, true, true, false);
this.plugin = plugin;
this.type = type;
this.cRegFilter = cRegFilter;
this.note = note;
this.title = title;
byteSequenceTable = createByteSequenceTable(plugin, rowObjects);
infoPanel = createInfoPanel();
mainPanel = createMainPanel();
addWorkPanel(mainPanel);
addCancelButton();
addSendSelectedToClipboardAction();
addMergeAction();
addSendMergedToClipboardAction();
cancelButton.setText("Dismiss");
HelpLocation helpLocation =
new HelpLocation("FunctionBitPatternsExplorerPlugin", "Analyzing_Byte_Sequences");
setHelpLocation(helpLocation);
this.setDefaultSize(1200, 800);
DockingWindowManager.showDialog(parent, this);
}
private JPanel createMainPanel() {
JPanel panel = new JPanel(new BorderLayout());
panel.add(infoPanel, BorderLayout.NORTH);
GFilterTable<ByteSequenceRowObject> filterTable = new GFilterTable<>(byteSequenceTable);
panel.add(filterTable, BorderLayout.CENTER);
return panel;
}
private JPanel createInfoPanel() {
JPanel panel = new JPanel(new BorderLayout());
mergedSeqTextField = new JTextField(60);
mergedSeqTextField.setEditable(false);
TitledBorder lubBorder = new TitledBorder("Merged Selections");
mergedSeqTextField.setBorder(lubBorder);
bitsOfCheckField = new JTextField(5);
bitsOfCheckField.setEditable(false);
TitledBorder bitsOfCheckBorder = new TitledBorder("Bits of Check");
bitsOfCheckField.setBorder(bitsOfCheckBorder);
noteField = new JTextField(60);
noteField.setText(note);
noteField.setEditable(true);
TitledBorder noteBorder = new TitledBorder("Note");
noteField.setBorder(noteBorder);
panel.add(mergedSeqTextField, BorderLayout.NORTH);
panel.add(bitsOfCheckField, BorderLayout.CENTER);
panel.add(noteField, BorderLayout.SOUTH);
return panel;
}
private void addSendSelectedToClipboardAction() {
sendSelectedToClipboardAction = new DockingAction("Send Selected to Clipboard", title) {
@Override
public void actionPerformed(ActionContext context) {
List<ByteSequenceRowObject> rows = byteSequenceTable.getLastSelectedObjects();
for (ByteSequenceRowObject row : rows) {
DittedBitSequence seq = new DittedBitSequence(row.getSequence(), true);
PatternInfoRowObject pattern = new PatternInfoRowObject(type, seq, cRegFilter);
pattern.setNote(row.getDisassembly());
plugin.addPattern(pattern);
}
plugin.updateClipboard();
}
@Override
public boolean isEnabledForContext(ActionContext context) {
List<ByteSequenceRowObject> rows = byteSequenceTable.getLastSelectedObjects();
if (rows == null) {
return false;
}
if (rows.isEmpty()) {
return false;
}
return true;
}
@Override
public boolean isAddToPopup(ActionContext context) {
return true;
}
};
ImageIcon icon = ResourceManager.loadImage("images/2rightarrow.png");
sendSelectedToClipboardAction.setPopupMenuData(
new MenuData(new String[] { "Send Selected to Clipboard" }, icon));
sendSelectedToClipboardAction.setDescription(
"Creates patterns for the currently-selected strings of " +
"bytes and sends them to the clipboard");
sendSelectedToClipboardAction.setHelpLocation(
new HelpLocation("FunctionBitPatternsExplorerPlugin", "Analyzing_Byte_Sequences"));
this.addAction(sendSelectedToClipboardAction);
}
private void addMergeAction() {
mergeAction = new DockingAction("Merge Selected Rows", title) {
@Override
public void actionPerformed(ActionContext context) {
merged = byteSequenceTable.mergeSelectedRows();
if (merged == null) {
return;
}
mergedSeqTextField.setText(merged.getHexString());
bitsOfCheckField.setText(Integer.toString(merged.getNumFixedBits()));
mergedSeqTextField.setBackground(Color.WHITE);
bitsOfCheckField.setBackground(Color.WHITE);
noteField.setBackground(Color.WHITE);
mergedToSend = true;
}
@Override
public boolean isAddToPopup(ActionContext context) {
return true;
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return (!byteSequenceTable.getLastSelectedObjects().isEmpty());
}
};
ImageIcon icon = ResourceManager.loadImage("images/xor.png");
mergeAction.setPopupMenuData(new MenuData(new String[] { "Merge Selected Rows" }, icon));
mergeAction.setDescription("Merges the currently selected rows");
mergeAction.setHelpLocation(
new HelpLocation("FunctionBitPatternsExplorerPlugin", "Analyzing_Byte_Sequences"));
this.addAction(mergeAction);
}
private void addSendMergedToClipboardAction() {
sendMergedToClipboardAction = new DockingAction("Send Merged to Clipboard", title) {
@Override
public void actionPerformed(ActionContext context) {
if (merged != null) {
PatternInfoRowObject mergedInfo =
new PatternInfoRowObject(type, merged, cRegFilter);
note = noteField.getText();
mergedInfo.setNote(note);
plugin.addPattern(mergedInfo);
plugin.updateClipboard();
mergedSeqTextField.setBackground(Color.lightGray);
bitsOfCheckField.setBackground(Color.LIGHT_GRAY);
noteField.setBackground(Color.LIGHT_GRAY);
mergedToSend = false;
}
}
@Override
public boolean isAddToPopup(ActionContext context) {
return true;
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return mergedToSend;
}
};
ImageIcon icon = ResourceManager.loadImage("images/smallRightArrow.png");
sendMergedToClipboardAction.setPopupMenuData(
new MenuData(new String[] { "Send Merged to Clipboard" }, icon));
sendMergedToClipboardAction.setDescription("Sends the Merge Patterns to the Clipboard");
sendMergedToClipboardAction.setHelpLocation(
new HelpLocation("FunctionBitPatternsExplorerPlugin", "Analyzing_Byte_Sequences"));
this.addAction(sendMergedToClipboardAction);
}
/**
* Creates the table to byte sequences to analyze
* @param FBPplugin plugin
* @param rows row objects containing sequences to analyze
* @return
*/
abstract ByteSequenceTableModel createByteSequenceTable(
FunctionBitPatternsExplorerPlugin FBPplugin, List<ByteSequenceRowObject> rows);
}
| |
/**
* Copyright 2015-2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.undertow;
import org.wildfly.swarm.config.Undertow;
import org.wildfly.swarm.config.runtime.AttributeDocumentation;
import org.wildfly.swarm.config.undertow.BufferCache;
import org.wildfly.swarm.config.undertow.HandlerConfiguration;
import org.wildfly.swarm.config.undertow.Server;
import org.wildfly.swarm.config.undertow.ServletContainer;
import org.wildfly.swarm.config.undertow.server.Host;
import org.wildfly.swarm.config.undertow.servlet_container.JSPSetting;
import org.wildfly.swarm.config.undertow.servlet_container.WebsocketsSetting;
import org.wildfly.swarm.spi.api.Defaultable;
import org.wildfly.swarm.spi.api.Fraction;
import org.wildfly.swarm.spi.api.annotations.Configurable;
import org.wildfly.swarm.spi.api.annotations.DeploymentModule;
import org.wildfly.swarm.spi.api.annotations.MarshalDMR;
import org.wildfly.swarm.spi.api.annotations.WildFlyExtension;
import static org.wildfly.swarm.spi.api.Defaultable.bool;
import static org.wildfly.swarm.spi.api.Defaultable.ifAnyExplicitlySet;
import static org.wildfly.swarm.spi.api.Defaultable.integer;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_AJP_PORT;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_BUFFER_CACHE;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_HOST;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_HTTPS_PORT;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_HTTP_LISTENER;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_HTTP_PORT;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_SERVER;
import static org.wildfly.swarm.undertow.UndertowProperties.DEFAULT_SERVLET_CONTAINER;
/**
* @author Bob McWhirter
*/
@MarshalDMR
@WildFlyExtension(module = "org.wildfly.extension.undertow")
@DeploymentModule(name = "org.jboss.modules")
public class UndertowFraction extends Undertow<UndertowFraction> implements Fraction {
/**
* Create the default, HTTP-only fraction.
*
* @return The configured fraction.
*/
public static UndertowFraction createDefaultFraction() {
UndertowFraction fraction = new UndertowFraction();
return fraction.applyDefaults();
}
public UndertowFraction applyDefaults() {
defaultServer(DEFAULT_SERVER)
.defaultVirtualHost(DEFAULT_HOST)
.defaultServletContainer(DEFAULT_SERVLET_CONTAINER)
.defaultSecurityDomain("other")
.server(new Server(DEFAULT_SERVER)
.httpListener(DEFAULT_HTTP_LISTENER, (listener) -> {
listener.socketBinding("http");
})
.host(new Host(DEFAULT_HOST)))
.bufferCache(new BufferCache(DEFAULT_BUFFER_CACHE))
.servletContainer(new ServletContainer(DEFAULT_SERVLET_CONTAINER)
.websocketsSetting(new WebsocketsSetting())
.jspSetting(new JSPSetting()))
.handlerConfiguration(new HandlerConfiguration());
return this;
}
/**
* Create the default HTTP and HTTPS fraction.
*
* <p>This default requires configuration for accessing a keystore.
* The application also <b>must</b> include the <code>management</code>
* fraction in its dependencies.</p>
*
* @param path The keystore path.
* @param password The keystore password.
* @param alias The server certificate alias.
* @return The configured fraction.
* @see #enableHTTPS(String, String, String)
*/
public static UndertowFraction createDefaultFraction(String path, String password, String alias) {
return createDefaultFraction()
.enableHTTPS(path, password, alias);
}
/**
* Create the default HTTP and AJP fraction.
*
* @return The configured fraction.
* @see #enableAJP()
*/
public static UndertowFraction createDefaultAndEnableAJPFraction() {
return createDefaultFraction()
.enableAJP();
}
/**
* Create the default HTTPS-only fraction.
*
* <p>This default inhibits the non-SSL HTTP endpoint, and only creates
* the default HTTPS endpoint. The application also <b>must</b> include
* the <code>management</code> fraction in its dependencies.</p>
*
* @param path The keystore path.
* @param password The keystore password.
* @param alias The server certificate alias.
* @return The configured fraction;
* @see #enableHTTPS(String, String, String)
*/
public static UndertowFraction createDefaultHTTPSOnlyFraction(String path, String password, String alias) {
UndertowFraction fraction = createDefaultFraction();
fraction.removeHttpListenersFromDefaultServer()
.enableHTTPS(path, password, alias);
return fraction;
}
/**
* Create the default AJP-only fraction.
*
* <p>This default inhibits the HTTP endpoint, and only creates
* the default AJP endpoint.</p>
*
* @return The configured fraction.
* @see #enableAJP()
*/
public static UndertowFraction createDefaultAJPOnlyFraction() {
UndertowFraction fraction = createDefaultFraction();
fraction.removeHttpListenersFromDefaultServer()
.enableAJP();
return fraction;
}
/**
* Enable HTTPS on this fraction.
*
* <p>This will enable HTTPS of the fraction. The application also
* <b>must</b> include the <code>management</code> fraction in its
* dependencies.</p>
*
* @param path The keystore path.
* @param password The keystore password.
* @param alias The server certificate alias.
* @return This fraction.
*/
public UndertowFraction enableHTTPS(String path, String password, String alias) {
return enableHTTPS(path, password, password, alias);
}
/**
* Enable HTTPS on this fraction.
*
* <p>This will enable HTTPS of the fraction. The application also
* <b>must</b> include the <code>management</code> fraction in its
* dependencies.</p>
*
* @param path The keystore path.
* @param keystorePassword The keystore password.
* @param keyPassword The key password inside the keystore.
* @param alias The server certificate alias.
* @return This fraction.
*/
public UndertowFraction enableHTTPS(String path, String keystorePassword, String keyPassword, String alias) {
this.keystorePath = path;
this.keystorePassword = keystorePassword;
this.keyPassword = keyPassword;
this.alias = alias;
return this;
}
/**
* Enable AJP on this fraction.
*
* @return This fraction.
*/
public UndertowFraction enableAJP() {
this.enableAJP.set(true);
return this;
}
public UndertowFraction onlyHTTPS() {
this.onlyHTTPS.set(true);
return this;
}
public String keystorePassword() {
return this.keystorePassword;
}
public String keyPassword() {
return this.keyPassword;
}
public String keystorePath() {
return this.keystorePath;
}
public String alias() {
return this.alias;
}
public boolean isOnlyHTTPS() {
return this.onlyHTTPS.get();
}
public boolean isEnableAJP() {
return this.enableAJP.get();
}
public UndertowFraction removeHttpListenersFromDefaultServer() {
this.subresources().server("default-server")
.subresources().httpListeners().clear();
return this;
}
public UndertowFraction httpPort(int httpPort) {
this.httpPort.set(httpPort);
return this;
}
public int httpPort() {
return this.httpPort.get();
}
public UndertowFraction httpsPort(int httpsPort) {
this.httpsPort.set(httpsPort);
return this;
}
public int httpsPort() {
return this.httpsPort.get();
}
public UndertowFraction ajpPort(int ajpPort) {
this.ajpPort.set(ajpPort);
return this;
}
public int ajpPort() {
return this.ajpPort.get();
}
@Configurable("thorntail.http.port")
@AttributeDocumentation("Set the port for the default HTTP listener")
private Defaultable<Integer> httpPort = integer(DEFAULT_HTTP_PORT);
@Configurable("thorntail.https.port")
@AttributeDocumentation("Set the port for the default HTTPS listener")
private Defaultable<Integer> httpsPort = integer(DEFAULT_HTTPS_PORT);
@Configurable("thorntail.ajp.port")
@AttributeDocumentation("Set the port for the default AJP listener")
private Defaultable<Integer> ajpPort = integer(DEFAULT_AJP_PORT);
/**
* Path to the keystore.
*/
@Configurable("thorntail.https.keystore.path")
@Configurable("thorntail.http.keystore.path")
@AttributeDocumentation("Path to the server keystore")
private String keystorePath;
/**
* Password for the keystore.
*/
@Configurable("thorntail.https.keystore.password")
@Configurable("thorntail.http.keystore.password")
@AttributeDocumentation("Password to the server keystore")
private String keystorePassword;
/**
* Password for the key.
*/
@Configurable("thorntail.https.key.password")
@Configurable("thorntail.http.key.password")
@AttributeDocumentation("Password to the server certificate")
private String keyPassword;
/**
* Alias of Server certificate key entry in the keystore.
*/
@Configurable("thorntail.https.key.alias")
@Configurable("thorntail.http.certificate.alias")
@AttributeDocumentation("Alias to the server certificate key entry in the keystore")
private String alias;
/**
* Whether or not disable HTTP interface
*/
@Configurable("thorntail.https.only")
@AttributeDocumentation("Only enable the HTTPS Listener")
private Defaultable<Boolean> onlyHTTPS = bool(false);
/**
* Whether or not enabling AJP
*/
@Configurable("thorntail.ajp.enable")
@AttributeDocumentation("Determine if AJP should be enabled")
private Defaultable<Boolean> enableAJP = ifAnyExplicitlySet(this.ajpPort);
}
| |
package com.beardedhen.androidbootstrap;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.beardedhen.androidbootstrap.utils.ImageUtils;
public class BootstrapCircleThumbnail extends FrameLayout
{
private enum BootstrapCircleType {
SMALL( "small", 2, 48), //padding adjustment for small thumbnails
MEDIUM( "medium", 4, 80),
LARGE( "large", 6, 112),
XLARGE( "xlarge", 8, 176);
private String type;
private int padding;
private int diameter;
private BootstrapCircleType(String type, int padding, int diameter) {
this.type = type;
this.padding = padding;
this.diameter = diameter;
}
public int getDiameter() { // dp
return diameter;
}
public int getPadding() {
return padding;
}
public static BootstrapCircleType getBootstrapCircleTypeFromString(String type) {
for (BootstrapCircleType value : BootstrapCircleType.values()) {
if (value.type.equals(type)) {
return value;
}
}
return MEDIUM;
}
}
private ImageView image;
private boolean minimal = false;//minimal means display just the image, no padding
private int imageWidth;
private int imageHeight;
private int padding = 0;
private LinearLayout placeholder;
private TextView dimensionsLabel;
public BootstrapCircleThumbnail(Context context)
{
super(context);
initialise(null);
}
public BootstrapCircleThumbnail(Context context, AttributeSet attrs)
{
super(context, attrs);
initialise(attrs);
}
public BootstrapCircleThumbnail(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs, defStyle);
initialise(attrs);
}
private void initialise( AttributeSet attrs )
{
LayoutInflater inflater = LayoutInflater.from(getContext());
TypedArray a = getContext().obtainStyledAttributes(attrs,
R.styleable.BootstrapCircleThumbnail);
String size;
String text = "";
int imageDrawable;
try {
imageDrawable = a.getResourceId(R.styleable.BootstrapCircleThumbnail_bct_image, 0);
text = a.getString(R.styleable.BootstrapCircleThumbnail_android_text);
text = (text == null) ? "" : text;
size = a.getString(R.styleable.BootstrapCircleThumbnail_bct_size);
size = (size == null) ? "" : size;
minimal = a.getBoolean(R.styleable.BootstrapCircleThumbnail_bct_minimal, false);
}
finally {
a.recycle();
}
View v = inflater.inflate(R.layout.bootstrap_thumbnail_circle, this, false);
dimensionsLabel = (TextView) v.findViewById(R.id.dimensionsLabel);
LinearLayout container = (LinearLayout) v.findViewById(R.id.container);
placeholder = (LinearLayout) v.findViewById(R.id.placeholder);
image = (ImageView) v.findViewById(R.id.image);
float scale = getResources().getDisplayMetrics().density;
//small image
BootstrapCircleType type = BootstrapCircleType.getBootstrapCircleTypeFromString(size);
padding = type.getPadding();
imageWidth = type.getDiameter();
imageHeight = type.getDiameter();
//convert padding to pixels
int paddingPX = (int)((padding * scale) + 0.5);
//convert image size to pixels
int imageSizeWidthPX = (int)((imageWidth * scale) + 0.5);
int imageSizeHeightPX = (int)((imageHeight * scale) + 0.5);
//make inner image smaller to compensate for the padding so that entire circle including padding equals the size
//ex. small image = 48dp, small padding = 4dp, inner image = 48 - (4 * 2) = 40
if(!this.minimal)
{
imageSizeWidthPX = imageSizeWidthPX - (paddingPX * 2);
imageSizeHeightPX = imageSizeHeightPX - (paddingPX * 2);
container.setPadding(paddingPX, paddingPX, paddingPX, paddingPX);
container.setBackgroundResource(R.drawable.thumbnail_circle_container);
}
else
{
container.setBackgroundResource(R.drawable.thumbnail_circle_minimal);
}
//if no image is given
if(imageDrawable == 0)
{
this.image.setVisibility(View.GONE);
placeholder.setLayoutParams(new LinearLayout.LayoutParams(imageSizeWidthPX, imageSizeHeightPX));
placeholder.setPadding(paddingPX, paddingPX, paddingPX, paddingPX);
//set placeholder image
placeholder.setBackgroundResource(R.drawable.thumbnail_circle);
dimensionsLabel.setText(text);
}
else
{
placeholder.setPadding(0, 0, 0, 0);
dimensionsLabel.setVisibility(View.GONE);
Bitmap bitmap = BitmapFactory.decodeResource(getContext().getResources(), imageDrawable);
Bitmap roundBitmap = ImageUtils.getCircleBitmap(bitmap, imageSizeWidthPX, imageSizeHeightPX);
image.setImageBitmap(roundBitmap);
}
this.addView(v);
}
public void setImage(int drawable)
{
Bitmap bitmap = BitmapFactory.decodeResource(getContext().getResources(), drawable);
setImage(bitmap);
}
public void setImage(Bitmap bitmap)
{
placeholder.setPadding(0, 0, 0, 0);
this.dimensionsLabel.setVisibility(View.GONE);
this.image.setVisibility(View.VISIBLE);
float scale = getResources().getDisplayMetrics().density;
//convert image size to pixels
int widthPX = (int)((this.imageWidth * scale) + 0.5);
int heightPX = (int)((this.imageHeight * scale) + 0.5);
int paddingPX = (int)((this.padding * scale) + 0.5);
if(!this.minimal)
{
widthPX = widthPX - (paddingPX * 2);
heightPX = heightPX - (paddingPX * 2);
}
Bitmap roundBitmap = ImageUtils.getCircleBitmap(bitmap, widthPX, heightPX);
image.setVisibility(View.VISIBLE);
image.setImageBitmap(roundBitmap);
requestLayout();
invalidate();
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.cql3.validation.operations;
import java.util.Arrays;
import org.junit.BeforeClass;
import org.junit.Test;
import static junit.framework.Assert.assertNull;
import static org.junit.Assert.assertEquals;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.CQLTester;
import org.apache.cassandra.cql3.UntypedResultSet;
import org.apache.cassandra.cql3.restrictions.StatementRestrictions;
import org.apache.cassandra.dht.ByteOrderedPartitioner;
/**
* SELECT statement tests that require a ByteOrderedPartitioner
*/
public class SelectOrderedPartitionerTest extends CQLTester
{
@BeforeClass
public static void setUp()
{
DatabaseDescriptor.setPartitionerUnsafe(ByteOrderedPartitioner.instance);
}
@Test
public void testFilteringOnPartitionKeyWithToken() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, PRIMARY KEY ((a, b), c))");
for (int i = 0; i < 10; i++)
{
execute("INSERT INTO %s (a,b,c,d) VALUES (?, ?, ?, ?)", i, i, i, i);
execute("INSERT INTO %s (a,b,c,d) VALUES (?, ?, ?, ?)", i, i + 10, i + 10, i + 10);
}
beforeAndAfterFlush(() -> {
assertRowsIgnoringOrder(execute("SELECT * FROM %s WHERE token(a, b) > token(5, 10) AND b < 8 ALLOW FILTERING"),
row(6, 6, 6, 6),
row(7, 7, 7, 7));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(8, 10) AND a = 9 ALLOW FILTERING"),
row(9, 9, 9, 9),
row(9, 19, 19, 19));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(8, 10) AND a = 9 AND c = 19 ALLOW FILTERING"),
row(9, 19, 19, 19));
assertEmpty(execute("SELECT * FROM %s WHERE token(a, b) = token(8, 8) AND b = 9 ALLOW FILTERING"));
});
}
@Test
public void testTokenFunctionWithSingleColumnPartitionKey() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int PRIMARY KEY, b text)");
execute("INSERT INTO %s (a, b) VALUES (0, 'a')");
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?)", 0), row(0, "a"));
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?) and token(a) < token(?)", 0, 1), row(0, "a"));
assertInvalid("SELECT * FROM %s WHERE token(a) > token(?)", "a");
assertInvalidMessage("The token() function must contains only partition key components",
"SELECT * FROM %s WHERE token(a, b) >= token(?, ?)", "b", 0);
assertInvalidMessage("More than one restriction was found for the start bound on a",
"SELECT * FROM %s WHERE token(a) >= token(?) and token(a) >= token(?)", 0, 1);
assertInvalidMessage("Columns \"a\" cannot be restricted by both an equality and an inequality relation",
"SELECT * FROM %s WHERE token(a) >= token(?) and token(a) = token(?)", 0, 1);
assertInvalidSyntax("SELECT * FROM %s WHERE token(a) = token(?) and token(a) IN (token(?))", 0, 1);
assertInvalidMessage("More than one restriction was found for the start bound on a",
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) > token(?)", 1, 2);
assertInvalidMessage("More than one restriction was found for the end bound on a",
"SELECT * FROM %s WHERE token(a) <= token(?) AND token(a) < token(?)", 1, 2);
assertInvalidMessage("Columns \"a\" cannot be restricted by both an equality and an inequality relation",
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) = token(?)", 1, 2);
assertInvalidMessage("a cannot be restricted by more than one relation if it includes an Equal",
"SELECT * FROM %s WHERE token(a) = token(?) AND token(a) > token(?)", 1, 2);
}
@Test
public void testTokenFunctionWithPartitionKeyAndClusteringKeyArguments() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b text, PRIMARY KEY (a, b))");
assertInvalidMessage("The token() function must contains only partition key components",
"SELECT * FROM %s WHERE token(a, b) > token(0, 'c')");
}
@Test
public void testTokenFunctionWithMultiColumnPartitionKey() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b text, PRIMARY KEY ((a, b)))");
execute("INSERT INTO %s (a, b) VALUES (0, 'a')");
execute("INSERT INTO %s (a, b) VALUES (0, 'b')");
execute("INSERT INTO %s (a, b) VALUES (0, 'c')");
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?)", 0, "a"),
row(0, "b"),
row(0, "c"));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) and token(a, b) < token(?, ?)",
0, "a",
0, "d"),
row(0, "b"),
row(0, "c"));
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?) and token(b) > token(?)", 0, "a");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?, ?) and token(a) < token(?, ?) and token(b) > token(?, ?) ",
0, "a", 0, "d", 0, "a");
assertInvalidMessage("The token function arguments must be in the partition key order: a, b",
"SELECT * FROM %s WHERE token(b, a) > token(0, 'c')");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a, b) > token(?, ?) and token(b) < token(?, ?)", 0, "a", 0, "a");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?, ?) and token(b) > token(?, ?)", 0, "a", 0, "a");
}
@Test
public void testSingleColumnPartitionKeyWithTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int primary key, b int)");
execute("INSERT INTO %s (a, b) VALUES (0, 0);");
execute("INSERT INTO %s (a, b) VALUES (1, 1);");
execute("INSERT INTO %s (a, b) VALUES (2, 2);");
execute("INSERT INTO %s (a, b) VALUES (3, 3);");
execute("INSERT INTO %s (a, b) VALUES (4, 4);");
assertRows(execute("SELECT * FROM %s WHERE a IN (?, ?);", 1, 3),
row(1, 1),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a)> token(?) and token(a) <= token(?);", 1, 3),
row(2, 2),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a)= token(2);"),
row(2, 2));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a IN (?, ?);",
1, 3, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) < token(?) AND token(a) >= token(?) AND a IN (?, ?);",
1, 3, 1, 3),
row(3, 3));
assertInvalidMessage(StatementRestrictions.REQUIRES_ALLOW_FILTERING_MESSAGE,
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a > ?;", 1, 3, 1);
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a IN ?;",
1, 3, Arrays.asList(1, 3)),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a = ?;", 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE a = ? AND token(a) > token(?);", 3, 1),
row(3, 3));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a = ?;", 3, 1));
assertEmpty(execute("SELECT * FROM %s WHERE a = ? AND token(a) > token(?);", 1, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a IN (?, ?);", 2, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) < token(?) AND a IN (?, ?) ;", 2, 5, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE a IN (?, ?) AND token(a) > token(?) AND token(a) < token(?);", 1, 3, 2, 5),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a IN (?, ?) AND token(a) < token(?);", 2, 1, 3, 5),
row(3, 3));
assertEmpty(execute("SELECT * FROM %s WHERE a IN (?, ?) AND token(a) > token(?);", 1, 3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) <= token(?) AND a = ?;", 2, 2),
row(2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) <= token(?) AND a = ?;", 2, 3));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) = token(?) AND a = ?;", 2, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?) AND token(a) <= token(?) AND a = ?;", 2, 2, 2),
row(2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) >= token(?) AND token(a) < token(?) AND a = ?;", 2, 2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a = ?;", 2, 2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) < token(?) AND a = ?;", 2, 2, 2));
}
@Test
public void testMultiColumnPartitionKeyWithTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, primary key((a, b)))");
execute("INSERT INTO %s (a, b, c) VALUES (0, 0, 0);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 1, 1);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 2, 2);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 0, 3);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 4);");
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?);", 0, 0),
row(0, 1, 1),
row(0, 2, 2),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND a = ? AND b IN (?, ?);",
0, 0, 1, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE a = ? AND token(a, b) > token(?, ?) AND b IN (?, ?);",
1, 0, 0, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND a = ?;",
0, 1, 0, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND token(a, b) < token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND token(a, b) <= token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) >= token(?, ?) AND token(a, b) < token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) = token(?, ?) AND a = ?;",
0, 1, 0, 0, 1));
assertInvalidMessage(StatementRestrictions.REQUIRES_ALLOW_FILTERING_MESSAGE,
"SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND a = ?;", 0, 0, 1);
}
@Test
public void testMultiColumnPartitionKeyWithIndexAndTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, primary key((a, b)))");
createIndex("CREATE INDEX ON %s(b)");
createIndex("CREATE INDEX ON %s(c)");
execute("INSERT INTO %s (a, b, c) VALUES (0, 0, 0);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 1, 1);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 2, 2);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 0, 3);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 4);");
assertRows(execute("SELECT * FROM %s WHERE b = ?;", 1),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND b = ?;", 0, 0, 1),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b = ? AND token(a, b) > token(?, ?);", 1, 0, 0),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b = ? AND token(a, b) > token(?, ?) and c = ? ALLOW FILTERING;", 1, 0, 0, 4),
row(1, 1, 4));
}
@Test
public void testTokenFunctionWithCompoundPartitionAndClusteringCols() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b int, c int, d int, PRIMARY KEY ((a, b), c, d))");
// just test that the queries don't error
execute("SELECT * FROM %s WHERE token(a, b) > token(0, 0) AND c > 10 ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE c > 10 AND token(a, b) > token(0, 0) ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE token(a, b) > token(0, 0) AND (c, d) > (0, 0) ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE (c, d) > (0, 0) AND token(a, b) > token(0, 0) ALLOW FILTERING;");
}
/**
* Test undefined columns
* migrated from cql_tests.py:TestCQL.undefined_column_handling_test()
*/
@Test
public void testUndefinedColumns() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, v1 int, v2 int,)");
execute("INSERT INTO %s (k, v1, v2) VALUES (0, 0, 0)");
execute("INSERT INTO %s (k, v1) VALUES (1, 1)");
execute("INSERT INTO %s (k, v1, v2) VALUES (2, 2, 2)");
Object[][] rows = getRows(execute("SELECT v2 FROM %s"));
assertEquals(0, rows[0][0]);
assertEquals(null, rows[1][0]);
assertEquals(2, rows[2][0]);
rows = getRows(execute("SELECT v2 FROM %s WHERE k = 1"));
assertEquals(1, rows.length);
assertNull(rows[0][0]);
}
/**
* Check table with only a PK (#4361),
* migrated from cql_tests.py:TestCQL.only_pk_test()
*/
@Test
public void testPrimaryKeyOnly() throws Throwable
{
createTable("CREATE TABLE %s (k int, c int, PRIMARY KEY (k, c))");
for (int k = 0; k < 2; k++)
for (int c = 0; c < 2; c++)
execute("INSERT INTO %s (k, c) VALUES (?, ?)", k, c);
assertRows(execute("SELECT * FROM %s"),
row(0, 0),
row(0, 1),
row(1, 0),
row(1, 1));
// Check for dense tables too
createTable(" CREATE TABLE %s (k int, c int, PRIMARY KEY (k, c)) WITH COMPACT STORAGE");
for (int k = 0; k < 2; k++)
for (int c = 0; c < 2; c++)
execute("INSERT INTO %s (k, c) VALUES (?, ?)", k, c);
assertRows(execute("SELECT * FROM %s"),
row(0, 0),
row(0, 1),
row(1, 0),
row(1, 1));
}
/**
* Migrated from cql_tests.py:TestCQL.composite_index_with_pk_test()
*/
@Test
public void testCompositeIndexWithPK() throws Throwable
{
createTable("CREATE TABLE %s (blog_id int, time1 int, time2 int, author text, content text, PRIMARY KEY (blog_id, time1, time2))");
createIndex("CREATE INDEX ON %s(author)");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 1, 0, 0, "foo", "bar1");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 1, 0, 1, "foo", "bar2");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 2, 1, 0, "foo", "baz");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 3, 0, 1, "gux", "qux");
assertRows(execute("SELECT blog_id, content FROM %s WHERE author='foo'"),
row(1, "bar1"),
row(1, "bar2"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 > 0 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 = 1 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 = 1 AND time2 = 0 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertEmpty(execute("SELECT content FROM %s WHERE time1 = 1 AND time2 = 1 AND author='foo' ALLOW FILTERING"));
assertEmpty(execute("SELECT content FROM %s WHERE time1 = 1 AND time2 > 0 AND author='foo' ALLOW FILTERING"));
assertInvalid("SELECT content FROM %s WHERE time2 >= 0 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 > 0 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 = 1 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 = 1 AND time2 = 0 AND author='foo'");
assertInvalid("SELECT content FROM %s WHERE time1 = 1 AND time2 = 1 AND author='foo'");
assertInvalid("SELECT content FROM %s WHERE time1 = 1 AND time2 > 0 AND author='foo'");
}
/**
* Test for LIMIT bugs from 4579,
* migrated from cql_tests.py:TestCQL.limit_bugs_test()
*/
@Test
public void testLimitBug() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, e int, PRIMARY KEY (a, b))");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (1, 1, 1, 1, 1);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (2, 2, 2, 2, 2);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (3, 3, 3, 3, 3);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (4, 4, 4, 4, 4);");
assertRows(execute("SELECT * FROM %s"),
row(1, 1, 1, 1, 1),
row(2, 2, 2, 2, 2),
row(3, 3, 3, 3, 3),
row(4, 4, 4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 1"),
row(1, 1, 1, 1, 1));
assertRows(execute("SELECT * FROM %s LIMIT 2"),
row(1, 1, 1, 1, 1),
row(2, 2, 2, 2, 2));
createTable("CREATE TABLE %s (a int primary key, b int, c int,)");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 1)");
execute("INSERT INTO %s (a, b, c) VALUES (2, 2, 2)");
execute("INSERT INTO %s (a, b, c) VALUES (3, 3, 3)");
execute("INSERT INTO %s (a, b, c) VALUES (4, 4, 4)");
assertRows(execute("SELECT * FROM %s"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 1"),
row(1, 1, 1));
assertRows(execute("SELECT * FROM %s LIMIT 2"),
row(1, 1, 1),
row(2, 2, 2));
assertRows(execute("SELECT * FROM %s LIMIT 3"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3));
assertRows(execute("SELECT * FROM %s LIMIT 4"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 5"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
}
/**
* Test for #4612 bug and more generally order by when multiple C* rows are queried
* migrated from cql_tests.py:TestCQL.order_by_multikey_test()
*/
@Test
public void testOrderByMultikey() throws Throwable
{
createTable("CREATE TABLE %s (my_id varchar, col1 int, col2 int, value varchar, PRIMARY KEY (my_id, col1, col2))");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key1', 1, 1, 'a');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key2', 3, 3, 'a');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key3', 2, 2, 'b');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key4', 2, 1, 'b');");
assertRows(execute("SELECT col1 FROM %s WHERE my_id in('key1', 'key2', 'key3') ORDER BY col1"),
row(1), row(2), row(3));
assertRows(execute("SELECT col1, value, my_id, col2 FROM %s WHERE my_id in('key3', 'key4') ORDER BY col1, col2"),
row(2, "b", "key4", 1), row(2, "b", "key3", 2));
assertInvalid("SELECT col1 FROM %s ORDER BY col1");
assertInvalid("SELECT col1 FROM %s WHERE my_id > 'key1' ORDER BY col1");
}
/**
* Migrated from cql_tests.py:TestCQL.composite_index_collections_test()
*/
@Test
public void testIndexOnCompositeWithCollections() throws Throwable
{
createTable("CREATE TABLE %s (blog_id int, time1 int, time2 int, author text, content set<text>, PRIMARY KEY (blog_id, time1, time2))");
createIndex("CREATE INDEX ON %s (author)");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'bar1', 'bar2' })", 1, 0, 0, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'bar2', 'bar3' })", 1, 0, 1, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'baz' })", 2, 1, 0, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'qux' })", 3, 0, 1, "gux");
assertRows(execute("SELECT blog_id, content FROM %s WHERE author='foo'"),
row(1, set("bar1", "bar2")),
row(1, set("bar2", "bar3")),
row(2, set("baz")));
}
/**
* Migrated from cql_tests.py:TestCQL.truncate_clean_cache_test()
*/
@Test
public void testTruncateWithCaching() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, v1 int, v2 int) WITH CACHING = { 'keys': 'ALL', 'rows_per_partition': 'ALL' };");
for (int i = 0; i < 3; i++)
execute("INSERT INTO %s (k, v1, v2) VALUES (?, ?, ?)", i, i, i * 2);
assertRows(execute("SELECT v1, v2 FROM %s WHERE k IN (0, 1, 2)"),
row(0, 0),
row(1, 2),
row(2, 4));
execute("TRUNCATE %s");
assertEmpty(execute("SELECT v1, v2 FROM %s WHERE k IN (0, 1, 2)"));
}
/**
* Migrated from cql_tests.py:TestCQL.range_key_ordered_test()
*/
@Test
public void testRangeKey() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY)");
execute("INSERT INTO %s (k) VALUES (-1)");
execute("INSERT INTO %s (k) VALUES ( 0)");
execute("INSERT INTO %s (k) VALUES ( 1)");
assertRows(execute("SELECT * FROM %s"),
row(0),
row(1),
row(-1));
assertInvalid("SELECT * FROM %s WHERE k >= -1 AND k < 1");
}
@Test
public void testTokenFunctionWithInvalidColumnNames() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, PRIMARY KEY ((a, b), c))");
assertInvalidMessage("Undefined column name e", "SELECT * FROM %s WHERE token(a, e) = token(0, 0)");
assertInvalidMessage("Undefined column name e", "SELECT * FROM %s WHERE token(a, e) > token(0, 1)");
assertInvalidMessage("Undefined column name e", "SELECT b AS e FROM %s WHERE token(a, e) = token(0, 0)");
assertInvalidMessage("Undefined column name e", "SELECT b AS e FROM %s WHERE token(a, e) > token(0, 1)");
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.jpa.entities;
import org.hibernate.annotations.Nationalized;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.MapKeyColumn;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
@Entity
@Table(name="CLIENT", uniqueConstraints = {@UniqueConstraint(columnNames = {"REALM_ID", "CLIENT_ID"})})
@NamedQueries({
@NamedQuery(name="getClientsByRealm", query="select client from ClientEntity client where client.realmId = :realm"),
@NamedQuery(name="getClientById", query="select client from ClientEntity client where client.id = :id and client.realmId = :realm"),
@NamedQuery(name="getClientIdsByRealm", query="select client.id from ClientEntity client where client.realmId = :realm order by client.clientId"),
@NamedQuery(name="getAlwaysDisplayInConsoleClients", query="select client.id from ClientEntity client where client.alwaysDisplayInConsole = true and client.realmId = :realm order by client.clientId"),
@NamedQuery(name="findClientIdByClientId", query="select client.id from ClientEntity client where client.clientId = :clientId and client.realmId = :realm"),
@NamedQuery(name="searchClientsByClientId", query="select client.id from ClientEntity client where lower(client.clientId) like lower(concat('%',:clientId,'%')) and client.realmId = :realm order by client.clientId"),
@NamedQuery(name="getRealmClientsCount", query="select count(client) from ClientEntity client where client.realmId = :realm"),
@NamedQuery(name="findClientByClientId", query="select client from ClientEntity client where client.clientId = :clientId and client.realmId = :realm"),
})
public class ClientEntity {
@Id
@Column(name="ID", length = 36)
@Access(AccessType.PROPERTY) // we do this because relationships often fetch id, but not entity. This avoids an extra SQL
private String id;
@Nationalized
@Column(name = "NAME")
private String name;
@Nationalized
@Column(name = "DESCRIPTION")
private String description;
@Column(name = "CLIENT_ID")
private String clientId;
@Column(name="ENABLED")
private boolean enabled;
@Column(name = "ALWAYS_DISPLAY_IN_CONSOLE")
private boolean alwaysDisplayInConsole;
@Column(name="SECRET")
private String secret;
@Column(name="REGISTRATION_TOKEN")
private String registrationToken;
@Column(name="CLIENT_AUTHENTICATOR_TYPE")
private String clientAuthenticatorType;
@Column(name="NOT_BEFORE")
private int notBefore;
@Column(name="PUBLIC_CLIENT")
private boolean publicClient;
@Column(name="PROTOCOL")
private String protocol;
@Column(name="FRONTCHANNEL_LOGOUT")
private boolean frontchannelLogout;
@Column(name="FULL_SCOPE_ALLOWED")
private boolean fullScopeAllowed;
@Column(name = "REALM_ID")
protected String realmId;
@ElementCollection
@Column(name="VALUE")
@CollectionTable(name = "WEB_ORIGINS", joinColumns={ @JoinColumn(name="CLIENT_ID") })
protected Set<String> webOrigins;
@ElementCollection
@Column(name="VALUE")
@CollectionTable(name = "REDIRECT_URIS", joinColumns={ @JoinColumn(name="CLIENT_ID") })
protected Set<String> redirectUris;
@OneToMany(cascade ={CascadeType.REMOVE}, orphanRemoval = true, mappedBy = "client")
protected Collection<ClientAttributeEntity> attributes;
@ElementCollection
@MapKeyColumn(name="BINDING_NAME")
@Column(name="FLOW_ID", length = 4000)
@CollectionTable(name="CLIENT_AUTH_FLOW_BINDINGS", joinColumns={ @JoinColumn(name="CLIENT_ID") })
protected Map<String, String> authFlowBindings;
@OneToMany(cascade ={CascadeType.REMOVE}, orphanRemoval = true, mappedBy = "client")
Collection<ProtocolMapperEntity> protocolMappers;
@Column(name="SURROGATE_AUTH_REQUIRED")
private boolean surrogateAuthRequired;
@Column(name="ROOT_URL")
private String rootUrl;
@Column(name="BASE_URL")
private String baseUrl;
@Column(name="MANAGEMENT_URL")
private String managementUrl;
@Column(name="BEARER_ONLY")
private boolean bearerOnly;
@Column(name="CONSENT_REQUIRED")
private boolean consentRequired;
@Column(name="STANDARD_FLOW_ENABLED")
private boolean standardFlowEnabled;
@Column(name="IMPLICIT_FLOW_ENABLED")
private boolean implicitFlowEnabled;
@Column(name="DIRECT_ACCESS_GRANTS_ENABLED")
private boolean directAccessGrantsEnabled;
@Column(name="SERVICE_ACCOUNTS_ENABLED")
private boolean serviceAccountsEnabled;
@Column(name="NODE_REREG_TIMEOUT")
private int nodeReRegistrationTimeout;
@ElementCollection
@Column(name="ROLE_ID")
@CollectionTable(name="SCOPE_MAPPING", joinColumns = { @JoinColumn(name="CLIENT_ID")})
private Set<String> scopeMappingIds;
@ElementCollection
@MapKeyColumn(name="NAME")
@Column(name="VALUE")
@CollectionTable(name="CLIENT_NODE_REGISTRATIONS", joinColumns={ @JoinColumn(name="CLIENT_ID") })
Map<String, Integer> registeredNodes;
public String getRealmId() {
return realmId;
}
public void setRealmId(String realmId) {
this.realmId = realmId;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public boolean isAlwaysDisplayInConsole() {
return alwaysDisplayInConsole;
}
public void setAlwaysDisplayInConsole(boolean alwaysDisplayInConsole) {
this.alwaysDisplayInConsole = alwaysDisplayInConsole;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public Set<String> getWebOrigins() {
if (webOrigins == null) {
webOrigins = new HashSet<>();
}
return webOrigins;
}
public void setWebOrigins(Set<String> webOrigins) {
this.webOrigins = webOrigins;
}
public Set<String> getRedirectUris() {
if (redirectUris == null) {
redirectUris = new HashSet<>();
}
return redirectUris;
}
public void setRedirectUris(Set<String> redirectUris) {
this.redirectUris = redirectUris;
}
public String getClientAuthenticatorType() {
return clientAuthenticatorType;
}
public void setClientAuthenticatorType(String clientAuthenticatorType) {
this.clientAuthenticatorType = clientAuthenticatorType;
}
public String getSecret() {
return secret;
}
public void setSecret(String secret) {
this.secret = secret;
}
public String getRegistrationToken() {
return registrationToken;
}
public void setRegistrationToken(String registrationToken) {
this.registrationToken = registrationToken;
}
public int getNotBefore() {
return notBefore;
}
public void setNotBefore(int notBefore) {
this.notBefore = notBefore;
}
public boolean isPublicClient() {
return publicClient;
}
public void setPublicClient(boolean publicClient) {
this.publicClient = publicClient;
}
public boolean isFullScopeAllowed() {
return fullScopeAllowed;
}
public void setFullScopeAllowed(boolean fullScopeAllowed) {
this.fullScopeAllowed = fullScopeAllowed;
}
public Collection<ClientAttributeEntity> getAttributes() {
if (attributes == null) {
attributes = new LinkedList<>();
}
return attributes;
}
public void setAttributes(Collection<ClientAttributeEntity> attributes) {
this.attributes = attributes;
}
public Map<String, String> getAuthFlowBindings() {
if (authFlowBindings == null) {
authFlowBindings = new HashMap<>();
}
return authFlowBindings;
}
public void setAuthFlowBindings(Map<String, String> authFlowBindings) {
this.authFlowBindings = authFlowBindings;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public boolean isFrontchannelLogout() {
return frontchannelLogout;
}
public void setFrontchannelLogout(boolean frontchannelLogout) {
this.frontchannelLogout = frontchannelLogout;
}
public Collection<ProtocolMapperEntity> getProtocolMappers() {
if (protocolMappers == null) {
protocolMappers = new LinkedList<>();
}
return protocolMappers;
}
public void setProtocolMappers(Collection<ProtocolMapperEntity> protocolMappers) {
this.protocolMappers = protocolMappers;
}
public boolean isSurrogateAuthRequired() {
return surrogateAuthRequired;
}
public void setSurrogateAuthRequired(boolean surrogateAuthRequired) {
this.surrogateAuthRequired = surrogateAuthRequired;
}
public String getRootUrl() {
return rootUrl;
}
public void setRootUrl(String rootUrl) {
this.rootUrl = rootUrl;
}
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public String getManagementUrl() {
return managementUrl;
}
public void setManagementUrl(String managementUrl) {
this.managementUrl = managementUrl;
}
public boolean isBearerOnly() {
return bearerOnly;
}
public void setBearerOnly(boolean bearerOnly) {
this.bearerOnly = bearerOnly;
}
public boolean isConsentRequired() {
return consentRequired;
}
public void setConsentRequired(boolean consentRequired) {
this.consentRequired = consentRequired;
}
public boolean isStandardFlowEnabled() {
return standardFlowEnabled;
}
public void setStandardFlowEnabled(boolean standardFlowEnabled) {
this.standardFlowEnabled = standardFlowEnabled;
}
public boolean isImplicitFlowEnabled() {
return implicitFlowEnabled;
}
public void setImplicitFlowEnabled(boolean implicitFlowEnabled) {
this.implicitFlowEnabled = implicitFlowEnabled;
}
public boolean isDirectAccessGrantsEnabled() {
return directAccessGrantsEnabled;
}
public void setDirectAccessGrantsEnabled(boolean directAccessGrantsEnabled) {
this.directAccessGrantsEnabled = directAccessGrantsEnabled;
}
public boolean isServiceAccountsEnabled() {
return serviceAccountsEnabled;
}
public void setServiceAccountsEnabled(boolean serviceAccountsEnabled) {
this.serviceAccountsEnabled = serviceAccountsEnabled;
}
public int getNodeReRegistrationTimeout() {
return nodeReRegistrationTimeout;
}
public void setNodeReRegistrationTimeout(int nodeReRegistrationTimeout) {
this.nodeReRegistrationTimeout = nodeReRegistrationTimeout;
}
public Map<String, Integer> getRegisteredNodes() {
if (registeredNodes == null) {
registeredNodes = new HashMap<>();
}
return registeredNodes;
}
public void setRegisteredNodes(Map<String, Integer> registeredNodes) {
this.registeredNodes = registeredNodes;
}
public Set<String> getScopeMappingIds() {
if (scopeMappingIds == null) {
scopeMappingIds = new HashSet<>();
}
return scopeMappingIds;
}
public void setScopeMapping(Set<String> scopeMappingIds) {
this.scopeMappingIds = scopeMappingIds;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
if (!(o instanceof ClientEntity)) return false;
ClientEntity that = (ClientEntity) o;
if (!id.equals(that.getId())) return false;
return true;
}
@Override
public int hashCode() {
return id.hashCode();
}
}
| |
/*
* This file is part of choco-solver, http://choco-solver.org/
*
* Copyright (c) 2019, IMT Atlantique. All rights reserved.
*
* Licensed under the BSD 4-clause license.
*
* See LICENSE file in the project root for full license information.
*/
package org.chocosolver.solver.constraints.real;
import org.chocosolver.solver.constraints.Operator;
import org.chocosolver.solver.constraints.Propagator;
import org.chocosolver.solver.constraints.PropagatorPriority;
import org.chocosolver.solver.exception.ContradictionException;
import org.chocosolver.solver.variables.Variable;
import org.chocosolver.solver.variables.events.IntEventType;
import org.chocosolver.solver.variables.events.RealEventType;
import org.chocosolver.util.ESat;
import org.chocosolver.util.tools.VariableUtils;
import java.util.Arrays;
import java.util.OptionalDouble;
/**
* A propagator for SUM(x_i*c_i) = b <br/> Based on "Bounds Consistency Techniques for Long Linear
* Constraint" </br> W. Harvey and J. Schimpf <p>
*
* @author Charles Prud'homme
* @since 18/03/11
*/
public class PropScalarMixed extends Propagator<Variable> {
/**
* Number of variables
*/
protected final int l;
/**
* Bound to respect
*/
protected final double b;
/**
* Variability of each variable (ie domain amplitude)
*/
protected final double[] I;
/**
* Stores the maximal variability
*/
protected double maxI;
/**
* SUm of lower bounds
*/
protected double sumLB;
/**
* Sum of upper bounds
*/
protected double sumUB;
/**
* The operator among EQ, LE, GE and NE
*/
protected final Operator o;
/**
* The coefficients
*/
private final double[] c;
/**
* Smallest precision
*/
private final double sprc;
/**
* Create a scalar product: SCALAR(x_i*c_i) o b
*
* @param variables list of variables
* @param coeffs list of coefficients
* @param o operator
* @param b bound to respect.
*/
public PropScalarMixed(Variable[] variables, double[] coeffs, Operator o, double b) {
super(variables, PropagatorPriority.LINEAR, false);
this.c = coeffs;
l = variables.length;
OptionalDouble d = Arrays.stream(vars)
.filter(VariableUtils::isReal)
.mapToDouble(r -> r.asRealVar().getPrecision())
.min();
if (d.isPresent()) {
sprc = d.getAsDouble();
} else {
sprc = variables[0].getModel().getPrecision();
}
this.o = o;
this.b = b;
I = new double[l];
maxI = 0;
}
@Override
public int getPropagationConditions(int vIdx) {
switch (o) {
case LE:
if (VariableUtils.isReal(vars[vIdx])) {
return c[vIdx] > 0 ? RealEventType.INCLOW.getMask() : RealEventType.DECUPP.getMask();
} else {
return IntEventType.combine(IntEventType.INSTANTIATE, c[vIdx] > 0 ? IntEventType.INCLOW : IntEventType.DECUPP);
}
case GE:
if (VariableUtils.isReal(vars[vIdx])) {
return c[vIdx] > 0 ? RealEventType.DECUPP.getMask() : RealEventType.INCLOW.getMask();
} else {
return IntEventType.combine(IntEventType.INSTANTIATE, c[vIdx] > 0 ? IntEventType.DECUPP : IntEventType.INCLOW);
}
default:
if (VariableUtils.isReal(vars[vIdx])) {
return RealEventType.BOUND.getMask();
} else {
return IntEventType.boundAndInst();
}
}
}
@Override
public void propagate(int evtmask) throws ContradictionException {
filter();
}
/**
* Execute filtering wrt the operator
*
* @throws ContradictionException if contradiction is detected
*/
protected void filter() throws ContradictionException {
prepare();
switch (o) {
case LE:
filterOnLeq();
break;
case GE:
filterOnGeq();
break;
default:
filterOnEq();
break;
}
}
protected void prepare() {
sumLB = sumUB = 0;
double lb, ub;
maxI = 0;
for (int i = 0; i < l; i++) { // first the positive coefficients
if (VariableUtils.isReal(vars[i])) {
if (c[i] > 0) {
lb = vars[i].asRealVar().getLB() * c[i];
ub = vars[i].asRealVar().getUB() * c[i];
} else {
lb = vars[i].asRealVar().getUB() * c[i];
ub = vars[i].asRealVar().getLB() * c[i];
}
} else {
if (c[i] > 0) {
lb = vars[i].asIntVar().getLB() * c[i];
ub = vars[i].asIntVar().getUB() * c[i];
} else {
lb = vars[i].asIntVar().getUB() * c[i];
ub = vars[i].asIntVar().getLB() * c[i];
}
}
sumLB += lb;
sumUB += ub;
I[i] = (ub - lb);
if (maxI < I[i]) maxI = I[i];
}
}
protected void filterOnEq() throws ContradictionException {
boolean anychange;
double F = b - sumLB;
double E = sumUB - b;
do {
anychange = false;
if (F < 0 || E < 0) {
fails();
}
if (maxI - F > sprc || maxI - E > sprc) {
maxI = 0;
double lb, ub;
for (int i = 0; i < l; i++) {
if (I[i] - F > 0) {
if (c[i] > 0) {
if (VariableUtils.isReal(vars[i])) {
lb = vars[i].asRealVar().getLB() * c[i];
ub = lb + I[i];
if (vars[i].asRealVar().updateUpperBound((F + lb) / c[i], this)) {
double nub = vars[i].asRealVar().getUB() * c[i];
E += nub - ub;
I[i] = nub - lb;
anychange = true;
}
} else {
lb = vars[i].asIntVar().getLB() * c[i];
ub = lb + I[i];
if (vars[i].asIntVar().updateUpperBound(divFloor(F + lb, c[i]), this)) {
double nub = vars[i].asIntVar().getUB() * c[i];
E += nub - ub;
I[i] = nub - lb;
anychange = true;
}
}
} else {
if (VariableUtils.isReal(vars[i])) {
lb = vars[i].asRealVar().getUB() * c[i];
ub = lb + I[i];
if (vars[i].asRealVar().updateLowerBound((-F - lb) / -c[i], this)) {
double nub = vars[i].asRealVar().getLB() * c[i];
E += nub - ub;
I[i] = nub - lb;
anychange = true;
}
} else {
lb = vars[i].asIntVar().getUB() * c[i];
ub = lb + I[i];
if (vars[i].asIntVar().updateLowerBound(divCeil(-F - lb, -c[i]), this)) {
double nub = vars[i].asIntVar().getLB() * c[i];
E += nub - ub;
I[i] = nub - lb;
anychange = true;
}
}
}
}
if (I[i] - E > 0) {
if (c[i] > 0) {
if (VariableUtils.isReal(vars[i])) {
ub = vars[i].asRealVar().getUB() * c[i];
lb = ub - I[i];
if (vars[i].asRealVar().updateLowerBound((ub - E) / c[i], this)) {
double nlb = vars[i].asRealVar().getLB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
anychange = true;
}
} else {
ub = vars[i].asIntVar().getUB() * c[i];
lb = ub - I[i];
if (vars[i].asIntVar().updateLowerBound(divCeil(ub - E, c[i]), this)) {
double nlb = vars[i].asIntVar().getLB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
anychange = true;
}
}
} else {
if (VariableUtils.isReal(vars[i])) {
ub = vars[i].asRealVar().getLB() * c[i];
lb = ub - I[i];
if (vars[i].asRealVar().updateUpperBound((-ub + E) / -c[i], this)) {
double nlb = vars[i].asRealVar().getUB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
anychange = true;
}
} else {
ub = vars[i].asIntVar().getLB() * c[i];
lb = ub - I[i];
if (vars[i].asIntVar().updateUpperBound(divFloor(-ub + E, -c[i]), this)) {
double nlb = vars[i].asIntVar().getUB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
anychange = true;
}
}
}
}
if (maxI < I[i]) maxI = I[i];
}
}
if (F < 0 && E < 0) {
this.setPassive();
return;
}
} while (anychange);
}
protected void filterOnLeq() throws ContradictionException {
double F = b - sumLB;
double E = sumUB - b;
if (F < 0) {
fails();
}
if (maxI - F > sprc) {
maxI = 0;
double lb, ub;
for (int i = 0; i < l; i++) {
if (I[i] - F > 0) {
if (c[i] > 0) {
if (VariableUtils.isReal(vars[i])) {
lb = vars[i].asRealVar().getLB() * c[i];
ub = lb + I[i];
if (vars[i].asRealVar().updateUpperBound((F + lb) / c[i], this)) {
double nub = vars[i].asRealVar().getUB() * c[i];
E += nub - ub;
I[i] = nub - lb;
}
} else {
lb = vars[i].asIntVar().getLB() * c[i];
ub = lb + I[i];
if (vars[i].asIntVar().updateUpperBound(divFloor(F + lb, c[i]), this)) {
double nub = vars[i].asIntVar().getUB() * c[i];
E += nub - ub;
I[i] = nub - lb;
}
}
} else {
if (VariableUtils.isReal(vars[i])) {
lb = vars[i].asRealVar().getUB() * c[i];
ub = lb + I[i];
if (vars[i].asRealVar().updateLowerBound((-F - lb) / -c[i], this)) {
double nub = vars[i].asRealVar().getLB() * c[i];
E += nub - ub;
I[i] = nub - lb;
}
} else {
lb = vars[i].asIntVar().getUB() * c[i];
ub = lb + I[i];
if (vars[i].asIntVar().updateLowerBound(divCeil(-F - lb, -c[i]), this)) {
double nub = vars[i].asIntVar().getLB() * c[i];
E += nub - ub;
I[i] = nub - lb;
}
}
}
}
if (maxI < I[i]) maxI = I[i];
}
}
if (E < 0) {
this.setPassive();
}
}
protected void filterOnGeq() throws ContradictionException {
double F = b - sumLB;
double E = sumUB - b;
if (E < 0) {
fails();
}
if (maxI - E > sprc) {
maxI = 0;
double lb, ub;
for (int i = 0; i < l; i++) {
if (I[i] - E > 0) {
if (c[i] > 0) {
if (VariableUtils.isReal(vars[i])) {
ub = vars[i].asRealVar().getUB() * c[i];
lb = ub - I[i];
if (vars[i].asRealVar().updateLowerBound((ub - E) / c[i], this)) {
double nlb = vars[i].asRealVar().getLB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
}
} else {
ub = vars[i].asIntVar().getUB() * c[i];
lb = ub - I[i];
if (vars[i].asIntVar().updateLowerBound(divCeil(ub - E, c[i]), this)) {
double nlb = vars[i].asIntVar().getLB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
}
}
} else {
if (VariableUtils.isReal(vars[i])) {
ub = vars[i].asRealVar().getLB() * c[i];
lb = ub - I[i];
if (vars[i].asRealVar().updateUpperBound((-ub + E) / -c[i], this)) {
double nlb = vars[i].asRealVar().getUB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
}
} else {
ub = vars[i].asIntVar().getLB() * c[i];
lb = ub - I[i];
if (vars[i].asIntVar().updateUpperBound(divFloor(-ub + E, -c[i]), this)) {
double nlb = vars[i].asIntVar().getUB() * c[i];
F -= nlb - lb;
I[i] = ub - nlb;
}
}
}
}
if (maxI < I[i]) maxI = I[i];
}
}
if (F < 0) {
this.setPassive();
}
}
@Override
public ESat isEntailed() {
double sumUB = 0, sumLB = 0;
for (int i = 0; i < l; i++) { // first the positive coefficients
if (VariableUtils.isReal(vars[i])) {
if (c[i] > 0) {
sumLB += vars[i].asRealVar().getLB() * c[i];
sumUB += vars[i].asRealVar().getUB() * c[i];
} else {
sumLB += vars[i].asRealVar().getUB() * c[i];
sumUB += vars[i].asRealVar().getLB() * c[i];
}
} else {
if (c[i] > 0) {
sumLB += vars[i].asIntVar().getLB() * c[i];
sumUB += vars[i].asIntVar().getUB() * c[i];
} else {
sumLB += vars[i].asIntVar().getUB() * c[i];
sumUB += vars[i].asIntVar().getLB() * c[i];
}
}
}
return check(sumLB, sumUB);
}
/**
* Whether the current state of the scalar product is entailed
*
* @param sumLB sum of lower bounds
* @param sumUB sum of upper bounds
* @return the entailment check
*/
@SuppressWarnings("Duplicates")
protected ESat check(double sumLB, double sumUB) {
switch (o) {
case LE:
if (sumLB <= b) {
return ESat.TRUE;
}
if (sumLB > b) {
return ESat.FALSE;
}
return ESat.UNDEFINED;
case GE:
if (sumUB >= b) {
return ESat.TRUE;
}
if (sumUB < b) {
return ESat.FALSE;
}
return ESat.UNDEFINED;
default:
if (sumLB <= b && b <= sumUB) {
return ESat.TRUE;
}
if (sumUB < b || sumLB > b) {
return ESat.FALSE;
}
return ESat.UNDEFINED;
}
}
@Override
public String toString() {
StringBuilder linComb = new StringBuilder(20);
linComb.append(c[0]).append('.').append(vars[0].getName());
int i = 1;
for (; i < l; i++) {
if (c[i] > 0) {
linComb.append(" + ").append(c[i]);
} else {
linComb.append(" - ").append(-c[i]);
}
linComb.append('.').append(vars[i].getName());
}
linComb.append(" ").append(o).append(" ");
linComb.append(b);
return linComb.toString();
}
private int divFloor(double a, double b) {
// <!> we assume b > 0
if (a >= 0) {
return (int) (a / b);
} else {
return (int) ((a - b + 1) / b);
}
}
private int divCeil(double a, double b) {
// <!> we assume b > 0
if (a >= 0) {
return (int) ((a + b - 1) / b);
} else {
return (int) (a / b);
}
}
}
| |
//%LICENSE////////////////////////////////////////////////////////////////
//
// Licensed to The Open Group (TOG) under one or more contributor license
// agreements. Refer to the OpenPegasusNOTICE.txt file distributed with
// this work for additional information regarding copyright ownership.
// Each contributor licenses this file to you under the OpenPegasus Open
// Source License; you may not use this file except in compliance with the
// License.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//////////////////////////////////////////////////////////////////////////
package Client;
import org.pegasus.jmpi.CIMClass;
import org.pegasus.jmpi.CIMClient;
import org.pegasus.jmpi.CIMDataType;
import org.pegasus.jmpi.CIMException;
import org.pegasus.jmpi.CIMMethod;
import org.pegasus.jmpi.CIMObjectPath;
import org.pegasus.jmpi.CIMParameter;
public class testCIMParameter
{
private boolean DEBUG = false;
/**
* This returns the group name.
*
* @return String "class" testcase belongs in.
*/
public String getGroup ()
{
return "CIMParameter";
}
public void setDebug (boolean fDebug)
{
DEBUG = fDebug;
}
public boolean main (String args[], CIMClient cimClient)
{
boolean fExecuted = false;
boolean fRet = true;
for (int i = 0; i < args.length; i++)
{
if (args[i].equalsIgnoreCase ("debug"))
{
setDebug (true);
}
}
if (!fExecuted)
{
fRet = testCIMParameter (cimClient);
}
return fRet;
}
private boolean testCIMParameter (CIMClient client)
{
if (client == null)
{
System.out.println ("FAILURE: testCIMParameter: client == null");
return false;
}
// -----
CIMObjectPath cop = null;
CIMClass cc = null;
String pszMethodName = "enableModifications";
CIMMethod cm = null;
CIMParameter cp = null;
cop = new CIMObjectPath ("JMPIExpInstance_TestPropertyTypes",
"root/SampleProvider");
if (DEBUG)
{
System.out.println ("testCIMParameter: cop = " + cop);
}
try
{
cc = client.getClass (cop,
true, // propagated
true, // includeQualifiers
true, // includeClassOrigin
null); // propertyList
}
catch (CIMException e)
{
System.out.println ("FAILURE: testCIMParameter: client.getClass: caught " + e);
return false;
}
if (DEBUG)
{
System.out.println ("testCIMParameter: cc = " + cc);
}
if (cc == null)
{
System.out.println ("FAILURE: testCIMParameter: cc == null");
return false;
}
cm = cc.getMethod (pszMethodName);
if (DEBUG)
{
System.out.println ("testCIMParameter: cm = " + cm);
}
if (cm == null)
{
System.out.println ("FAILURE: testCIMParameter: cm == null");
return false;
}
try
{
cp = cm.getParameter (0);
}
catch (CIMException e)
{
System.out.println ("FAILURE: testCIMParameter: cm.getParameter (0) unknown exception " + e);
return false;
}
if (DEBUG)
{
System.out.println ("testCIMParameter: cp = " + cp);
}
if (cp == null)
{
System.out.println ("FAILURE: testCIMParameter: cp == null");
return false;
}
// -----
String pszParameterName = "fState";
String pszName = null;
pszName = cp.getName ();
if (DEBUG)
{
System.out.println ("testCIMParameter: pszName = " + pszName);
}
if (pszName == null)
{
System.out.println ("FAILURE: testCIMParameter: cp.getName () 1");
return false;
}
if (!pszName.equals (pszParameterName))
{
System.out.println ("FAILURE: testCIMParameter: cp.getName () 2");
return false;
}
// -----
if (DEBUG)
{
System.out.println ("testCIMParameter: cp.isArray () = " + cp.isArray ());
}
if (cp.isArray ())
{
System.out.println ("FAILURE: testCIMParameter: cp.isArray () 1");
return false;
}
// @TBD - make a function that has an array parameter. test if so here.
// -----
// @TBD - make a function that has an array parameter. test the size here.
// -----
String pszReferenceClassName = null;
pszReferenceClassName = cp.getReferenceClassName ();
if (DEBUG)
{
System.out.println ("testCIMParameter: cp.getReferenceClassName () = " + pszReferenceClassName);
}
if (pszReferenceClassName == null)
{
System.out.println ("FAILURE: testCIMParameter: cp.getReferenceClassName () 1");
return false;
}
if (!pszReferenceClassName.equals (""))
{
System.out.println ("FAILURE: testCIMParameter: cp.getReferenceClassName () 2");
return false;
}
// -----
CIMDataType cdt = null;
cdt = cp.getType ();
if (DEBUG)
{
System.out.println ("testCIMParameter: cp.getType () = " + cdt);
}
if (cdt == null)
{
System.out.println ("FAILURE: testCIMParameter: cp.getType () 1");
return false;
}
if (cdt.getType () != CIMDataType.BOOLEAN)
{
System.out.println ("FAILURE: testCIMParameter: cp.getType () 2");
return false;
}
// -----
System.out.println ("SUCCESS: testCIMParameter");
return true;
}
}
| |
package com.clearlyspam23.GLE.GUI.template;
import java.awt.BorderLayout;
import java.awt.FlowLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import javax.swing.border.EmptyBorder;
import com.clearlyspam23.GLE.JAnGLEData;
import com.clearlyspam23.GLE.PluginManager;
import com.clearlyspam23.GLE.Template;
import com.clearlyspam23.GLE.basic.compression.NoCompression;
import com.clearlyspam23.GLE.basic.compression.ZipCompression;
import com.clearlyspam23.GLE.basic.coordinates.BottomLeft;
import com.clearlyspam23.GLE.basic.coordinates.CenteredDown;
import com.clearlyspam23.GLE.basic.coordinates.CenteredUp;
import com.clearlyspam23.GLE.basic.coordinates.TopLeft;
import com.clearlyspam23.GLE.basic.languages.JavaLanguageOptions;
import com.clearlyspam23.GLE.basic.layers.tile.TileLayerDefinition;
import com.clearlyspam23.GLE.basic.parameters.CurrentLevelMacro;
import com.clearlyspam23.GLE.basic.parameters.CurrentTemplateMacro;
import com.clearlyspam23.GLE.basic.parameters.ExecutableDirectoryMacro;
import com.clearlyspam23.GLE.basic.parameters.ExecutableLocationMacro;
import com.clearlyspam23.GLE.basic.parameters.WorkingDirectoryMacro;
import com.clearlyspam23.GLE.basic.properties.IntPropertyDefinition;
import com.clearlyspam23.GLE.basic.properties.VectorPropertyDefinition;
import com.clearlyspam23.GLE.basic.serializers.JsonSerializer;
import com.clearlyspam23.GLE.level.LayerDefinition;
import com.clearlyspam23.GLE.level.LayerTemplate;
import com.clearlyspam23.GLE.util.Utility;
public class TemplateDialog extends JDialog implements ActionListener{
/**
*
*/
private static final long serialVersionUID = 1L;
private final JPanel contentPanel = new JPanel();
// private final GeneralPanel generalPanel;
// private final PLangPanel langPanel;
// private final LayerPanel layerPanel;
private final List<TemplateSubPanel> subPanels = new ArrayList<TemplateSubPanel>();
private boolean accepted = false;
private Template template;
private JTabbedPane tabbedPane;
/**
* Launch the application.
*/
public static void main(String[] args) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException
| IllegalAccessException | UnsupportedLookAndFeelException e1) {
//honestly, if this doesnt work, whatever we'll use default. should fail silently.
}
try {
PluginManager manager = new PluginManager();
JAnGLEData data = new JAnGLEData(manager);
manager.addCoordinateSystems(new TopLeft());
manager.addCoordinateSystems(new BottomLeft());
manager.addCoordinateSystems(new CenteredDown());
manager.addCoordinateSystems(new CenteredUp());
manager.addProgrammingLanguage(new JavaLanguageOptions());
manager.addMacro(new CurrentLevelMacro());
manager.addMacro(new CurrentTemplateMacro());
manager.addMacro(new ExecutableDirectoryMacro());
manager.addMacro(new ExecutableLocationMacro());
manager.addMacro(new WorkingDirectoryMacro());
manager.addProperty(new IntPropertyDefinition());
manager.addProperty(new VectorPropertyDefinition());
manager.addCompression(new NoCompression());
manager.addCompression(new ZipCompression());
manager.addSerializer(new JsonSerializer());
manager.addLayerDefinition(new TileLayerDefinition());
manager.addTemplatePanel(new GeneralPanel(data.getPlugins()));
manager.addTemplatePanel(new LayerPanel(data.getPlugins()));
manager.addAdvancedTemplatePanel(new PLangPanel(data.getPlugins()));
TemplateDialog dialog = new TemplateDialog(data);
dialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
dialog.showDialog();
// Template t = dialog.getTemplate();
// TemplateSerializer serializer = new TemplateSerializer(manager);
// if(t!=null){
// String s = serializer.serialize(t);
// PrintWriter w = new PrintWriter(t.getTemplateFile());
// w.print(s);
// w.close();
// Template recreate = serializer.deserialize(s, t.getTemplateFile());
// System.out.println(recreate);
// }
} catch (Exception e) {
e.printStackTrace();
}
}
private JAnGLEData data;
/**
* Create the dialog.
*/
public TemplateDialog(JAnGLEData data) {
setModal(true);
this.data = data;
//this code should be moved somewhere else eventually
for(TemplateSubPanel p : data.getPlugins().getTemplatePanels()){
subPanels.add(p);
}
for(TemplateSubPanel p : data.getPlugins().getAdvancedTemplatePanels())
subPanels.add(p);
// subPanels.add(new GeneralPanel(data.getPlugins()));
// subPanels.add(new PLangPanel(data.getPlugins()));
// subPanels.add(new LayerPanel(data.getPlugins()));
setResizable(false);
setBounds(100, 100, 580, 680);
getContentPane().setLayout(new BorderLayout());
contentPanel.setBorder(new EmptyBorder(5, 5, 5, 5));
getContentPane().add(contentPanel, BorderLayout.CENTER);
contentPanel.setLayout(new GridLayout(0, 1, 0, 0));
tabbedPane = new JTabbedPane(JTabbedPane.TOP);
contentPanel.add(tabbedPane);
for(TemplateSubPanel panel : data.getPlugins().getTemplatePanels())
tabbedPane.addTab(panel.getPanelName(), panel);
JPanel aPanel = new JPanel();
aPanel.setLayout(new BorderLayout());
JTabbedPane advanced = new JTabbedPane();
aPanel.add(advanced, BorderLayout.CENTER);
for(TemplateSubPanel p : data.getPlugins().getAdvancedTemplatePanels()){
advanced.addTab(p.getPanelName(), p);
}
tabbedPane.addTab("Advanced", advanced);
{
JPanel buttonPane = new JPanel();
buttonPane.setLayout(new FlowLayout(FlowLayout.RIGHT));
getContentPane().add(buttonPane, BorderLayout.SOUTH);
{
JButton okButton = new JButton("OK");
okButton.setActionCommand("OK");
buttonPane.add(okButton);
okButton.addActionListener(this);
}
{
JButton cancelButton = new JButton("Cancel");
cancelButton.setActionCommand("Cancel");
buttonPane.add(cancelButton);
getRootPane().setDefaultButton(cancelButton);
cancelButton.addActionListener(this);
}
}
}
public boolean isAccepted(){
return accepted;
}
public void showDialog(Template template){
accepted = false;
this.template = template;
for(TemplateSubPanel p : subPanels)
p.reset();
if(template!=null)
for(TemplateSubPanel p : subPanels)
p.setToTemplate(template);
setVisible(true);
tabbedPane.setSelectedIndex(0);
}
public void showDialog(){
showDialog(new Template());
}
@SuppressWarnings("rawtypes")
@Override
public void actionPerformed(ActionEvent e) {
accepted = "OK".equals(e.getActionCommand());
if(accepted)
{
Template template = new Template();
List<String> problems = new ArrayList<String>();
for(TemplateSubPanel p : subPanels){
List<String> issues = p.verify();
if(issues!=null&&!issues.isEmpty()){
for(String s : issues){
problems.add(p.getPanelName() + " : " + s);
}
}
}
if(!problems.isEmpty()){
StringBuilder b = new StringBuilder();
for(String s : problems)
b.append(s).append(Utility.NEWLINE);
JOptionPane.showMessageDialog(this, "Unable to save the Template for the following reasons:" +
Utility.NEWLINE + Utility.NEWLINE + b.toString() + Utility.NEWLINE +
"Please fix these issues before continuing");
return;
}
for(TemplateSubPanel p : subPanels)
p.generateTemplate(template);
Set<LayerDefinition> seen = new HashSet<LayerDefinition>();
for(LayerTemplate lt : template.getLayers()){
if(!seen.contains(lt.getDefinition())){
lt.getDefinition().onTemplateCreation(template);
seen.add(lt.getDefinition());
}
}
this.template = template;
if(!data.saveTemplate(template)){
//TODO this is an error, handle it somehow
return;
}
}
setVisible(false);
}
public Template getTemplate() {
return template;
}
public void reset(){
for(TemplateSubPanel p : subPanels)
p.reset();
}
}
| |
/* $Id: ExpireStufferThread.java 988245 2010-08-23 18:39:35Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.system;
import org.apache.manifoldcf.core.interfaces.*;
import org.apache.manifoldcf.agents.interfaces.*;
import org.apache.manifoldcf.crawler.interfaces.*;
import org.apache.manifoldcf.crawler.system.Logging;
import java.util.*;
import java.lang.reflect.*;
/** This class represents the stuffer thread. This thread's job is to request documents from the database and add them to the
* document queue. The thread then sleeps until the document queue is empty again.
*/
public class ExpireStufferThread extends Thread
{
public static final String _rcsid = "@(#)$Id: ExpireStufferThread.java 988245 2010-08-23 18:39:35Z kwright $";
// Local data
/** This is a reference to the static main document expiration queue */
protected final DocumentCleanupQueue documentQueue;
/** Worker thread pool reset manager */
protected final WorkerResetManager resetManager;
/** This is the number of entries we want to stuff at any one time. */
protected final int n;
/** Process ID */
protected final String processID;
/** Constructor.
*@param documentQueue is the document queue we'll be stuffing.
*@param n represents the number of threads that will be processing queued stuff, NOT the
* number of documents to be done at once!
*/
public ExpireStufferThread(DocumentCleanupQueue documentQueue, int n, WorkerResetManager resetManager, String processID)
throws ManifoldCFException
{
super();
this.documentQueue = documentQueue;
this.n = n;
this.resetManager = resetManager;
this.processID = processID;
setName("Expire stuffer thread");
setDaemon(true);
// The priority of this thread is higher than most others. We want stuffing to proceed even if the machine
// is pretty busy already.
setPriority(getPriority()+1);
}
public void run()
{
resetManager.registerMe();
try
{
// Create a thread context object.
IThreadContext threadContext = ThreadContextFactory.make();
IJobManager jobManager = JobManagerFactory.make(threadContext);
Logging.threads.debug("Expire stuffer thread: Maximum document count per check is "+Integer.toString(n));
// Hashmap keyed by jobid and containing ArrayLists.
// This way we can guarantee priority will do the right thing, because the
// priority is per-job. We CANNOT guarantee anything about scheduling order, however,
// other than that it falls in the time window.
HashMap documentSets = new HashMap();
// Job description map (local) - designed to improve performance.
// Cleared and reloaded on every batch of documents.
HashMap jobDescriptionMap = new HashMap();
IDBInterface database = DBInterfaceFactory.make(threadContext,
ManifoldCF.getMasterDatabaseName(),
ManifoldCF.getMasterDatabaseUsername(),
ManifoldCF.getMasterDatabasePassword());
int deleteChunkSize = database.getMaxInClause();
// Loop
while (true)
{
// Do another try/catch around everything in the loop
try
{
if (Thread.currentThread().isInterrupted())
throw new ManifoldCFException("Interrupted",ManifoldCFException.INTERRUPTED);
// Check if we're okay
resetManager.waitForReset(threadContext);
// System.out.println("Waiting...");
// Wait until queue is empty enough.
boolean isEmpty = documentQueue.checkIfEmpty(n*3);
if (isEmpty == false)
{
sleep(1000);
continue;
}
Logging.threads.debug("Expiration stuffer thread woke up");
// What we want to do is load enough documents to completely fill n queued document sets.
// The number n passed in here thus cannot be used in a query to limit the number of returned
// results. Instead, it must be factored into the limit portion of the query.
long currentTime = System.currentTimeMillis();
DocumentSetAndFlags docsAndFlags = jobManager.getExpiredDocuments(processID,deleteChunkSize,currentTime);
DocumentDescription[] descs = docsAndFlags.getDocumentSet();
boolean[] deleteFromIndex = docsAndFlags.getFlags();
if (Thread.currentThread().isInterrupted())
throw new ManifoldCFException("Interrupted",ManifoldCFException.INTERRUPTED);
if (Logging.threads.isDebugEnabled())
{
Logging.threads.debug("Expiration stuffer thread: Found "+Integer.toString(descs.length)+" documents to expire");
}
// If there are no documents at all, then we can sleep for a while.
// The theory is that we need to allow stuff to accumulate.
if (descs.length == 0)
{
ManifoldCF.sleep(5000L); // 5 seconds
continue;
}
// Do the stuffing. Each set must be segregated by job, since we need the job ID in the doc set.
Map jobMap = new HashMap();
int k = 0;
while (k < descs.length)
{
CleanupQueuedDocument x = new CleanupQueuedDocument(descs[k],deleteFromIndex[k]);
Long jobID = descs[k].getJobID();
List y = (List)jobMap.get(jobID);
if (y == null)
{
y = new ArrayList();
jobMap.put(jobID,y);
}
y.add(x);
k++;
}
Iterator iter = jobMap.keySet().iterator();
while (iter.hasNext())
{
Long jobID = (Long)iter.next();
IJobDescription jobDescription = jobManager.load(jobID,true);
List y = (List)jobMap.get(jobID);
CleanupQueuedDocument[] docDescs = new CleanupQueuedDocument[y.size()];
k = 0;
while (k < docDescs.length)
{
docDescs[k] = (CleanupQueuedDocument)y.get(k);
k++;
}
DocumentCleanupSet set = new DocumentCleanupSet(docDescs,jobDescription);
documentQueue.addDocuments(set);
}
yield();
}
catch (ManifoldCFException e)
{
if (e.getErrorCode() == ManifoldCFException.INTERRUPTED)
break;
if (e.getErrorCode() == ManifoldCFException.DATABASE_CONNECTION_ERROR)
{
resetManager.noteEvent();
Logging.threads.error("Expiration stuffer thread aborting and restarting due to database connection reset: "+e.getMessage(),e);
try
{
// Give the database a chance to catch up/wake up
ManifoldCF.sleep(10000L);
}
catch (InterruptedException se)
{
break;
}
continue;
}
// Log it, but keep the thread alive
Logging.threads.error("Exception tossed: "+e.getMessage(),e);
if (e.getErrorCode() == ManifoldCFException.SETUP_ERROR)
{
System.exit(1);
}
}
catch (InterruptedException e)
{
// We're supposed to quit
break;
}
catch (OutOfMemoryError e)
{
System.err.println("agents process ran out of memory - shutting down");
e.printStackTrace(System.err);
System.exit(-200);
}
catch (Throwable e)
{
// A more severe error - but stay alive
Logging.threads.fatal("Error tossed: "+e.getMessage(),e);
}
}
}
catch (Throwable e)
{
// Severe error on initialization
System.err.println("agents process could not start - shutting down");
Logging.threads.fatal("ExpirationStufferThread initialization error tossed: "+e.getMessage(),e);
System.exit(-300);
}
}
}
| |
/*
* Copyright (c) 2011 University of Tartu
*/
package com.turn.tpmml.evaluator;
import com.turn.tpmml.Array;
import com.turn.tpmml.CompoundPredicate;
import com.turn.tpmml.False;
import com.turn.tpmml.Predicate;
import com.turn.tpmml.SimplePredicate;
import com.turn.tpmml.SimpleSetPredicate;
import com.turn.tpmml.True;
import com.turn.tpmml.manager.TPMMLException.TPMMLCause;
import java.util.List;
public class PredicateUtil {
private PredicateUtil() {
}
public static Boolean evaluate(Predicate predicate, EvaluationContext context)
throws EvaluationException {
if (predicate instanceof SimplePredicate) {
return evaluateSimplePredicate((SimplePredicate) predicate, context);
} else
if (predicate instanceof CompoundPredicate) {
return evaluateCompoundPredicate((CompoundPredicate) predicate, context);
} else
if (predicate instanceof SimpleSetPredicate) {
return evaluateSimpleSetPredicate((SimpleSetPredicate) predicate, context);
} else
if (predicate instanceof True) {
return evaluateTrue((True) predicate);
} else
if (predicate instanceof False) {
return evaluateFalse((False) predicate);
} else {
throw new EvaluationException(TPMMLCause.UNSUPPORTED_OPERATION,
predicate.toString());
}
}
public static Boolean evaluateSimplePredicate(SimplePredicate simplePredicate,
EvaluationContext context) throws EvaluationException {
Object value = ExpressionUtil.evaluate(simplePredicate.getField(), context);
switch (simplePredicate.getOperator()) {
case IS_MISSING:
return Boolean.valueOf(value == null);
case IS_NOT_MISSING:
return Boolean.valueOf(value != null);
default:
break;
}
if (value == null) {
return null;
}
int order = ParameterUtil.compare(value, simplePredicate.getValue());
SimplePredicate.Operator operator = simplePredicate.getOperator();
switch (operator) {
case EQUAL:
return Boolean.valueOf(order == 0);
case NOT_EQUAL:
return Boolean.valueOf(order != 0);
case LESS_THAN:
return Boolean.valueOf(order < 0);
case LESS_OR_EQUAL:
return Boolean.valueOf(order <= 0);
case GREATER_THAN:
return Boolean.valueOf(order > 0);
case GREATER_OR_EQUAL:
return Boolean.valueOf(order >= 0);
default:
throw new EvaluationException(TPMMLCause.UNSUPPORTED_OPERATION,
operator.name());
}
}
public static Boolean evaluateCompoundPredicate(CompoundPredicate compoundPredicate,
EvaluationContext context) throws EvaluationException {
List<Predicate> predicates = compoundPredicate.getContent();
Boolean result = evaluate(predicates.get(0), context);
switch (compoundPredicate.getBooleanOperator()) {
case AND:
case OR:
case XOR:
break;
case SURROGATE:
if (result != null) {
return result;
}
break;
}
for (Predicate predicate : predicates.subList(1, predicates.size())) {
Boolean value = evaluate(predicate, context);
switch (compoundPredicate.getBooleanOperator()) {
case AND:
result = PredicateUtil.binaryAnd(result, value);
break;
case OR:
result = PredicateUtil.binaryOr(result, value);
break;
case XOR:
result = PredicateUtil.binaryXor(result, value);
break;
case SURROGATE:
if (value != null) {
return value;
}
break;
}
}
return result;
}
public static Boolean evaluateSimpleSetPredicate(SimpleSetPredicate simpleSetPredicate,
EvaluationContext context) throws EvaluationException {
Object value = ExpressionUtil.evaluate(simpleSetPredicate.getField(), context);
if (value == null) {
throw new EvaluationException(TPMMLCause.MISSING_PARAMETER,
simpleSetPredicate.getField());
}
Array array = simpleSetPredicate.getArray();
SimpleSetPredicate.BooleanOperator operator = simpleSetPredicate.getBooleanOperator();
switch (operator) {
case IS_IN:
return ArrayUtil.isIn(array, value);
case IS_NOT_IN:
return ArrayUtil.isNotIn(array, value);
default:
throw new EvaluationException(TPMMLCause.UNSUPPORTED_OPERATION,
operator.name());
}
}
public static Boolean evaluateTrue(True truePredicate) {
return Boolean.TRUE;
}
public static Boolean evaluateFalse(False falsePredicate) {
return Boolean.FALSE;
}
public static Boolean binaryAnd(Boolean left, Boolean right) {
if (left == null) {
if (right == null || right.booleanValue()) {
return null;
} else {
return Boolean.FALSE;
}
} else if (right == null) {
if (left == null || left.booleanValue()) {
return null;
} else {
return Boolean.FALSE;
}
} else {
return Boolean.valueOf(left.booleanValue() & right.booleanValue());
}
}
public static Boolean binaryOr(Boolean left, Boolean right) {
if (left != null && left.booleanValue()) {
return Boolean.TRUE;
} else if (right != null && right.booleanValue()) {
return Boolean.TRUE;
} else if (left == null || right == null) {
return null;
} else {
return Boolean.valueOf(left.booleanValue() | right.booleanValue());
}
}
public static Boolean binaryXor(Boolean left, Boolean right) {
if (left == null || right == null) {
return null;
} else {
return Boolean.valueOf(left.booleanValue() ^ right.booleanValue());
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudtrace/v1/trace.proto
package com.google.devtools.cloudtrace.v1;
/**
* <pre>
* The request message for the `GetTrace` method.
* </pre>
*
* Protobuf type {@code google.devtools.cloudtrace.v1.GetTraceRequest}
*/
public final class GetTraceRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudtrace.v1.GetTraceRequest)
GetTraceRequestOrBuilder {
// Use GetTraceRequest.newBuilder() to construct.
private GetTraceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetTraceRequest() {
projectId_ = "";
traceId_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private GetTraceRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
projectId_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
traceId_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.devtools.cloudtrace.v1.TraceProto.internal_static_google_devtools_cloudtrace_v1_GetTraceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.cloudtrace.v1.TraceProto.internal_static_google_devtools_cloudtrace_v1_GetTraceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.cloudtrace.v1.GetTraceRequest.class, com.google.devtools.cloudtrace.v1.GetTraceRequest.Builder.class);
}
public static final int PROJECT_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object projectId_;
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TRACE_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object traceId_;
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public java.lang.String getTraceId() {
java.lang.Object ref = traceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
traceId_ = s;
return s;
}
}
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public com.google.protobuf.ByteString
getTraceIdBytes() {
java.lang.Object ref = traceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
traceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getProjectIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_);
}
if (!getTraceIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, traceId_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getProjectIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_);
}
if (!getTraceIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, traceId_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.cloudtrace.v1.GetTraceRequest)) {
return super.equals(obj);
}
com.google.devtools.cloudtrace.v1.GetTraceRequest other = (com.google.devtools.cloudtrace.v1.GetTraceRequest) obj;
boolean result = true;
result = result && getProjectId()
.equals(other.getProjectId());
result = result && getTraceId()
.equals(other.getTraceId());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + TRACE_ID_FIELD_NUMBER;
hash = (53 * hash) + getTraceId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.devtools.cloudtrace.v1.GetTraceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The request message for the `GetTrace` method.
* </pre>
*
* Protobuf type {@code google.devtools.cloudtrace.v1.GetTraceRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudtrace.v1.GetTraceRequest)
com.google.devtools.cloudtrace.v1.GetTraceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.devtools.cloudtrace.v1.TraceProto.internal_static_google_devtools_cloudtrace_v1_GetTraceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.cloudtrace.v1.TraceProto.internal_static_google_devtools_cloudtrace_v1_GetTraceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.cloudtrace.v1.GetTraceRequest.class, com.google.devtools.cloudtrace.v1.GetTraceRequest.Builder.class);
}
// Construct using com.google.devtools.cloudtrace.v1.GetTraceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
projectId_ = "";
traceId_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.devtools.cloudtrace.v1.TraceProto.internal_static_google_devtools_cloudtrace_v1_GetTraceRequest_descriptor;
}
public com.google.devtools.cloudtrace.v1.GetTraceRequest getDefaultInstanceForType() {
return com.google.devtools.cloudtrace.v1.GetTraceRequest.getDefaultInstance();
}
public com.google.devtools.cloudtrace.v1.GetTraceRequest build() {
com.google.devtools.cloudtrace.v1.GetTraceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.devtools.cloudtrace.v1.GetTraceRequest buildPartial() {
com.google.devtools.cloudtrace.v1.GetTraceRequest result = new com.google.devtools.cloudtrace.v1.GetTraceRequest(this);
result.projectId_ = projectId_;
result.traceId_ = traceId_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.cloudtrace.v1.GetTraceRequest) {
return mergeFrom((com.google.devtools.cloudtrace.v1.GetTraceRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.devtools.cloudtrace.v1.GetTraceRequest other) {
if (other == com.google.devtools.cloudtrace.v1.GetTraceRequest.getDefaultInstance()) return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
onChanged();
}
if (!other.getTraceId().isEmpty()) {
traceId_ = other.traceId_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.devtools.cloudtrace.v1.GetTraceRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.devtools.cloudtrace.v1.GetTraceRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object projectId_ = "";
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public Builder setProjectId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
onChanged();
return this;
}
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
onChanged();
return this;
}
/**
* <pre>
* ID of the Cloud project where the trace data is stored.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
public Builder setProjectIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
onChanged();
return this;
}
private java.lang.Object traceId_ = "";
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public java.lang.String getTraceId() {
java.lang.Object ref = traceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
traceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public com.google.protobuf.ByteString
getTraceIdBytes() {
java.lang.Object ref = traceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
traceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public Builder setTraceId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
traceId_ = value;
onChanged();
return this;
}
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public Builder clearTraceId() {
traceId_ = getDefaultInstance().getTraceId();
onChanged();
return this;
}
/**
* <pre>
* ID of the trace to return.
* </pre>
*
* <code>optional string trace_id = 2;</code>
*/
public Builder setTraceIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
traceId_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudtrace.v1.GetTraceRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.GetTraceRequest)
private static final com.google.devtools.cloudtrace.v1.GetTraceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.cloudtrace.v1.GetTraceRequest();
}
public static com.google.devtools.cloudtrace.v1.GetTraceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetTraceRequest>
PARSER = new com.google.protobuf.AbstractParser<GetTraceRequest>() {
public GetTraceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetTraceRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetTraceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetTraceRequest> getParserForType() {
return PARSER;
}
public com.google.devtools.cloudtrace.v1.GetTraceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.QueryBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
public class MatchQuery {
public static enum Type implements Writeable {
/**
* The text is analyzed and terms are added to a boolean query.
*/
BOOLEAN(0),
/**
* The text is analyzed and used as a phrase query.
*/
PHRASE(1),
/**
* The text is analyzed and used in a phrase query, with the last term acting as a prefix.
*/
PHRASE_PREFIX(2);
private final int ordinal;
private Type(int ordinal) {
this.ordinal = ordinal;
}
public static Type readFromStream(StreamInput in) throws IOException {
int ord = in.readVInt();
for (Type type : Type.values()) {
if (type.ordinal == ord) {
return type;
}
}
throw new ElasticsearchException("unknown serialized type [" + ord + "]");
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal);
}
}
public static enum ZeroTermsQuery implements Writeable {
NONE(0),
ALL(1);
private final int ordinal;
private ZeroTermsQuery(int ordinal) {
this.ordinal = ordinal;
}
public static ZeroTermsQuery readFromStream(StreamInput in) throws IOException {
int ord = in.readVInt();
for (ZeroTermsQuery zeroTermsQuery : ZeroTermsQuery.values()) {
if (zeroTermsQuery.ordinal == ord) {
return zeroTermsQuery;
}
}
throw new ElasticsearchException("unknown serialized type [" + ord + "]");
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal);
}
}
/** the default phrase slop */
public static final int DEFAULT_PHRASE_SLOP = 0;
/** the default leniency setting */
public static final boolean DEFAULT_LENIENCY = false;
/** the default zero terms query */
public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE;
protected final QueryShardContext context;
protected String analyzer;
protected BooleanClause.Occur occur = BooleanClause.Occur.SHOULD;
protected boolean enablePositionIncrements = true;
protected int phraseSlop = DEFAULT_PHRASE_SLOP;
protected Fuzziness fuzziness = null;
protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;
protected boolean transpositions = FuzzyQuery.defaultTranspositions;
protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
protected boolean lenient = DEFAULT_LENIENCY;
protected ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;
protected Float commonTermsCutoff = null;
public MatchQuery(QueryShardContext context) {
this.context = context;
}
public void setAnalyzer(String analyzer) {
this.analyzer = analyzer;
}
public void setOccur(BooleanClause.Occur occur) {
this.occur = occur;
}
public void setCommonTermsCutoff(Float cutoff) {
this.commonTermsCutoff = cutoff;
}
public void setEnablePositionIncrements(boolean enablePositionIncrements) {
this.enablePositionIncrements = enablePositionIncrements;
}
public void setPhraseSlop(int phraseSlop) {
this.phraseSlop = phraseSlop;
}
public void setFuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
}
public void setFuzzyPrefixLength(int fuzzyPrefixLength) {
this.fuzzyPrefixLength = fuzzyPrefixLength;
}
public void setMaxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
}
public void setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
}
public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public void setLenient(boolean lenient) {
this.lenient = lenient;
}
public void setZeroTermsQuery(ZeroTermsQuery zeroTermsQuery) {
this.zeroTermsQuery = zeroTermsQuery;
}
protected Analyzer getAnalyzer(MappedFieldType fieldType) {
if (this.analyzer == null) {
if (fieldType != null) {
return context.getSearchAnalyzer(fieldType);
}
return context.getMapperService().searchAnalyzer();
} else {
Analyzer analyzer = context.getMapperService().getIndexAnalyzers().get(this.analyzer);
if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
}
return analyzer;
}
}
public Query parse(Type type, String fieldName, Object value) throws IOException {
final String field;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.name();
} else {
field = fieldName;
}
/*
* If the user forced an analyzer we really don't care if they are
* searching a type that wants term queries to be used with query string
* because the QueryBuilder will take care of it. If they haven't forced
* an analyzer then types like NumberFieldType that want terms with
* query string will blow up because their analyzer isn't capable of
* passing through QueryBuilder.
*/
boolean noForcedAnalyzer = this.analyzer == null;
if (fieldType != null && fieldType.tokenized() == false && noForcedAnalyzer) {
return blendTermQuery(new Term(fieldName, value.toString()), fieldType);
}
Analyzer analyzer = getAnalyzer(fieldType);
assert analyzer != null;
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);
builder.setEnablePositionIncrements(this.enablePositionIncrements);
Query query = null;
switch (type) {
case BOOLEAN:
if (commonTermsCutoff == null) {
query = builder.createBooleanQuery(field, value.toString(), occur);
} else {
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, fieldType);
}
break;
case PHRASE:
query = builder.createPhraseQuery(field, value.toString(), phraseSlop);
break;
case PHRASE_PREFIX:
query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions);
break;
default:
throw new IllegalStateException("No type found for [" + type + "]");
}
if (query == null) {
return zeroTermsQuery();
} else {
return query;
}
}
protected final Query termQuery(MappedFieldType fieldType, Object value, boolean lenient) {
try {
return fieldType.termQuery(value, context);
} catch (RuntimeException e) {
if (lenient) {
return null;
}
throw e;
}
}
protected Query zeroTermsQuery() {
if (zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY) {
return Queries.newMatchNoDocsQuery("Matching no documents because no terms present.");
}
return Queries.newMatchAllQuery();
}
private class MatchQueryBuilder extends QueryBuilder {
private final MappedFieldType mapper;
/**
* Creates a new QueryBuilder using the given analyzer.
*/
public MatchQueryBuilder(Analyzer analyzer, @Nullable MappedFieldType mapper) {
super(analyzer);
this.mapper = mapper;
}
@Override
protected Query newTermQuery(Term term) {
return blendTermQuery(term, mapper);
}
@Override
protected Query newSynonymQuery(Term[] terms) {
return blendTermsQuery(terms, mapper);
}
public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) {
final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop);
float boost = 1;
Query innerQuery = query;
while (innerQuery instanceof BoostQuery) {
BoostQuery bq = (BoostQuery) innerQuery;
boost *= bq.getBoost();
innerQuery = bq.getQuery();
}
final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery();
prefixQuery.setMaxExpansions(maxExpansions);
prefixQuery.setSlop(phraseSlop);
if (innerQuery instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery) innerQuery;
Term[] terms = pq.getTerms();
int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; i++) {
prefixQuery.add(new Term[] {terms[i]}, positions[i]);
}
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
} else if (innerQuery instanceof MultiPhraseQuery) {
MultiPhraseQuery pq = (MultiPhraseQuery) innerQuery;
Term[][] terms = pq.getTermArrays();
int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; i++) {
prefixQuery.add(terms[i], positions[i]);
}
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
} else if (innerQuery instanceof TermQuery) {
prefixQuery.add(((TermQuery) innerQuery).getTerm());
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
} else if (innerQuery instanceof AllTermQuery) {
prefixQuery.add(((AllTermQuery) innerQuery).getTerm());
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
}
return query;
}
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) booleanQuery;
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), fieldType);
for (BooleanClause clause : bq.clauses()) {
if (!(clause.getQuery() instanceof TermQuery)) {
return booleanQuery;
}
query.add(((TermQuery) clause.getQuery()).getTerm());
}
return query;
}
return booleanQuery;
}
}
protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) {
return new SynonymQuery(terms);
}
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
if (fuzziness != null) {
if (fieldType != null) {
try {
Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
if (query instanceof FuzzyQuery) {
QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
}
return query;
} catch (RuntimeException e) {
if (lenient) {
return new TermQuery(term);
} else {
throw e;
}
}
}
int edits = fuzziness.asDistance(term.text());
FuzzyQuery query = new FuzzyQuery(term, edits, fuzzyPrefixLength, maxExpansions, transpositions);
QueryParsers.setRewriteMethod(query, fuzzyRewriteMethod);
return query;
}
if (fieldType != null) {
Query query = termQuery(fieldType, term.bytes(), lenient);
if (query != null) {
return query;
}
}
return new TermQuery(term);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.fielddata.BooleanScriptFieldData;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.script.BooleanFieldScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.runtime.BooleanScriptFieldExistsQuery;
import org.elasticsearch.search.runtime.BooleanScriptFieldTermQuery;
import java.time.ZoneId;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.function.Supplier;
public final class BooleanScriptFieldType extends AbstractScriptFieldType<BooleanFieldScript.LeafFactory> {
public static final RuntimeField.Parser PARSER = new RuntimeField.Parser(name ->
new Builder<>(name, BooleanFieldScript.CONTEXT, BooleanFieldScript.PARSE_FROM_SOURCE) {
@Override
RuntimeField newRuntimeField(BooleanFieldScript.Factory scriptFactory) {
return new BooleanScriptFieldType(name, scriptFactory, getScript(), meta(), this);
}
});
public BooleanScriptFieldType(String name) {
this(name, BooleanFieldScript.PARSE_FROM_SOURCE, null, Collections.emptyMap(), (builder, params) -> builder);
}
BooleanScriptFieldType(
String name,
BooleanFieldScript.Factory scriptFactory,
Script script,
Map<String, String> meta,
ToXContent toXContent
) {
super(name, searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, meta, toXContent);
}
@Override
public String typeName() {
return BooleanFieldMapper.CONTENT_TYPE;
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
switch (value.toString()) {
case "F":
return false;
case "T":
return true;
default:
throw new IllegalArgumentException("Expected [T] or [F] but got [" + value + "]");
}
}
@Override
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones");
}
return DocValueFormat.BOOLEAN;
}
@Override
public BooleanScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
return new BooleanScriptFieldData.Builder(name(), leafFactory(searchLookup.get()));
}
@Override
public Query existsQuery(SearchExecutionContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldExistsQuery(script, leafFactory(context), name());
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
ZoneId timeZone,
DateMathParser parser,
SearchExecutionContext context
) {
boolean trueAllowed;
boolean falseAllowed;
/*
* gte: true --- true matches
* gt: true ---- none match
* gte: false -- both match
* gt: false --- true matches
*/
if (toBoolean(lowerTerm)) {
if (includeLower) {
trueAllowed = true;
falseAllowed = false;
} else {
trueAllowed = false;
falseAllowed = false;
}
} else {
if (includeLower) {
trueAllowed = true;
falseAllowed = true;
} else {
trueAllowed = true;
falseAllowed = false;
}
}
/*
* This is how the indexed version works:
* lte: true --- both match
* lt: true ---- false matches
* lte: false -- false matches
* lt: false --- none match
*/
if (toBoolean(upperTerm)) {
if (includeUpper) {
trueAllowed &= true;
falseAllowed &= true;
} else {
trueAllowed &= false;
falseAllowed &= true;
}
} else {
if (includeUpper) {
trueAllowed &= false;
falseAllowed &= true;
} else {
trueAllowed &= false;
falseAllowed &= false;
}
}
return termsQuery(trueAllowed, falseAllowed, context);
}
@Override
public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context.lookup()), name(), toBoolean(value, true));
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), toBoolean(value, false));
}
@Override
public Query termsQuery(Collection<?> values, SearchExecutionContext context) {
if (values.isEmpty()) {
return Queries.newMatchNoDocsQuery("Empty terms query");
}
boolean trueAllowed = false;
boolean falseAllowed = false;
for (Object value : values) {
if (toBoolean(value, false)) {
trueAllowed = true;
} else {
falseAllowed = true;
}
}
return termsQuery(trueAllowed, falseAllowed, context);
}
private Query termsQuery(boolean trueAllowed, boolean falseAllowed, SearchExecutionContext context) {
if (trueAllowed) {
if (falseAllowed) {
// Either true or false
return existsQuery(context);
}
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), true);
}
if (falseAllowed) {
checkAllowExpensiveQueries(context);
return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), false);
}
return new MatchNoDocsQuery("neither true nor false allowed");
}
private static boolean toBoolean(Object value) {
return toBoolean(value, false);
}
/**
* Convert the term into a boolean. Inspired by {@link BooleanFieldMapper.BooleanFieldType#indexedValueForSearch(Object)}.
*/
private static boolean toBoolean(Object value, boolean caseInsensitive) {
if (value == null) {
return false;
}
if (value instanceof Boolean) {
return (Boolean) value;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (caseInsensitive) {
sValue = Strings.toLowercaseAscii(sValue);
}
return Booleans.parseBoolean(sValue);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import javax.management.ObjectName;
import org.apache.logging.log4j.Logger;
import org.apache.geode.DataSerializer;
import org.apache.geode.internal.serialization.DataSerializableFixedID;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.KnownVersion;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.logging.internal.log4j.api.LogService;
/**
* Central component for federation It consists of an Object State as well as some meta data for the
* Object being federated.
*
*
*/
public class FederationComponent
implements java.io.Serializable, DataSerializableFixedID {
private static final Logger logger = LogService.getLogger();
private static final String THIS_COMPONENT = FederationComponent.class.getName();
private static final long serialVersionUID = 3123549507449088591L;
/**
* Name of the MBean. This name will be replicated at Managing Node
*/
private String objectName;
/**
* Name if the interface class . It will determine the interface for MBean at Managing Node side
*/
private String interfaceClassName;
/**
* Flag to determine if MBean emits notification or not.
*/
private boolean notificationEmitter;
/**
* This Map holds the object state as property-value Every component should be serializable
*/
private Map<String, Object> objectState = new HashMap<>();
private transient Map<String, Method> getterMethodMap;
private transient Object mbeanObject;
private transient Class mbeanInterfaceClass;
private final transient Map<String, Object> oldObjectState = new HashMap<>();
private final transient Map<Method, OpenMethod> methodHandlerMap = OpenTypeUtil.newMap();
private transient boolean prevRefreshChangeDetected = false;
/**
*
* @param objectName ObjectName of the MBean
* @param interfaceClass interface class of the MBean
* @param notificationEmitter specifies whether this MBean is going to emit notifications
*/
public FederationComponent(Object object, ObjectName objectName, Class interfaceClass,
boolean notificationEmitter) {
this.objectName = objectName.toString();
interfaceClassName = interfaceClass.getCanonicalName();
mbeanInterfaceClass = interfaceClass;
this.notificationEmitter = notificationEmitter;
mbeanObject = object;
getterMethodMap = new HashMap<>();
initGetters(interfaceClass);
}
public FederationComponent() {}
// Introspect the mbeanInterface and initialize this object's maps.
//
private void initGetters(Class<?> mbeanInterface) {
final Method[] methodArray = mbeanInterface.getMethods();
for (Method m : methodArray) {
String name = m.getName();
String attrName = "";
if (name.startsWith("get")) {
attrName = name.substring(3);
} else if (name.startsWith("is") && m.getReturnType() == boolean.class) {
attrName = name.substring(2);
}
if (attrName.length() != 0 && m.getParameterTypes().length == 0
&& m.getReturnType() != void.class) { // For Getters
m.setAccessible(true);
getterMethodMap.put(attrName, m);
methodHandlerMap.put(m, OpenMethod.from(m));
}
}
}
/**
* gets the Canonical name of the MBean interface
*
* @return mbean interface class name
*/
public String getMBeanInterfaceClass() {
return interfaceClassName;
}
/**
* True if this MBean is a notification emitter.
*
* @return whether its a notification emitter or not
*/
public boolean isNotificationEmitter() {
return notificationEmitter;
}
/**
* This method will get called from Management Thread. This will dynamically invoke the MBeans
* getter methods and set them in ObjectState Map.
*
* In Future releases we can implement the delta propagation here
*
* @return true if the refresh detects that the state changed. It will return false if two
* consecutive refresh calls results in no state change. This indicates to the
* LocalManager whether to send the MBean state to Manager or not.
*/
public boolean refreshObjectState(boolean keepOldState) {
boolean changeDetected = false;
Object[] args = null;
if (keepOldState) {
oldObjectState.putAll(objectState);
}
for (Map.Entry<String, Method> gettorMethodEntry : getterMethodMap.entrySet()) {
String property = gettorMethodEntry.getKey();
Object propertyValue = null;
try {
Method m = gettorMethodEntry.getValue();
propertyValue = m.invoke(mbeanObject, args);
// To Handle open types in getter values
OpenMethod op = methodHandlerMap.get(m);
propertyValue = op.toOpenReturnValue(propertyValue);
} catch (Exception e) {
propertyValue = null;
if (logger.isTraceEnabled()) {
logger.trace(e.getMessage());
}
}
Object oldValue = objectState.put(property, propertyValue);
if (!changeDetected) {
if (propertyValue != null) {
if (!propertyValue.equals(oldValue)) {
changeDetected = true;
}
} else { // new value is null
if (oldValue != null) {
changeDetected = true;
}
}
}
}
boolean retVal = prevRefreshChangeDetected || changeDetected;
prevRefreshChangeDetected = changeDetected;
return retVal;
}
public boolean equals(Object anObject) {
if (this == anObject) {
return true;
}
if (anObject instanceof FederationComponent) {
FederationComponent anotherFedComp = (FederationComponent) anObject;
return anotherFedComp.interfaceClassName.equals(interfaceClassName)
&& anotherFedComp.notificationEmitter == notificationEmitter
&& anotherFedComp.objectState.equals(objectState)
&& anotherFedComp.objectName.equals(objectName);
}
return false;
}
public int hashCode() {
return objectName.hashCode();
}
/**
* Managing node will get Object state by calling this method
*
* @return value of the given property
*/
public Object getValue(String propertyName) {
return objectState.get(propertyName);
}
public String toString() {
if (Boolean.getBoolean("debug.Management")) {
return " ObjectName = " + objectName + ",InterfaceClassName = " + interfaceClassName
+ ", NotificationEmitter = " + notificationEmitter + ", ObjectState = "
+ objectState.toString();
} else {
return "ObjectName = " + objectName;
}
}
public Map<String, Object> getObjectState() {
return objectState;
}
public Map<String, Object> getOldState() {
return oldObjectState;
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
notificationEmitter = DataSerializer.readPrimitiveBoolean(in);
interfaceClassName = DataSerializer.readString(in);
objectState = DataSerializer.readHashMap(in);
objectName = DataSerializer.readString(in);
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
DataSerializer.writePrimitiveBoolean(notificationEmitter, out);
DataSerializer.writeString(interfaceClassName, out);
DataSerializer.writeHashMap(objectState, out);
DataSerializer.writeString(objectName, out);
}
@Override
public int getDSFID() {
return DataSerializableFixedID.MGMT_FEDERATION_COMPONENT;
}
public Object getMBeanObject() {
return mbeanObject;
}
public Class getInterfaceClass() {
return mbeanInterfaceClass;
}
@Override
public KnownVersion[] getSerializationVersions() {
// TODO Auto-generated method stub
return null;
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.ovr;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Field Of View (FOV) in tangent of the angle units. As an example, for a standard 90 degree vertical FOV, we would have:
*
* <pre><code>{ UpTan = tan(90 degrees / 2), DownTan = tan(90 degrees / 2) }</code></pre>
*
* <h3>Member documentation</h3>
*
* <ul>
* <li>{@code UpTan} – the tangent of the angle between the viewing vector and the top edge of the field of view</li>
* <li>{@code DownTan} – the tangent of the angle between the viewing vector and the bottom edge of the field of view</li>
* <li>{@code LeftTan} – the tangent of the angle between the viewing vector and the left edge of the field of view</li>
* <li>{@code RightTan} – the tangent of the angle between the viewing vector and the right edge of the field of view</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>struct ovrFovPort {
float UpTan;
float DownTan;
float LeftTan;
float RightTan;
}</code></pre>
*/
public class OVRFovPort extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
UPTAN,
DOWNTAN,
LEFTTAN,
RIGHTTAN;
static {
Layout layout = __struct(
__member(4),
__member(4),
__member(4),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
UPTAN = layout.offsetof(0);
DOWNTAN = layout.offsetof(1);
LEFTTAN = layout.offsetof(2);
RIGHTTAN = layout.offsetof(3);
}
OVRFovPort(long address, ByteBuffer container) {
super(address, container);
}
/**
* Creates a {@link OVRFovPort} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public OVRFovPort(ByteBuffer container) {
this(memAddress(container), checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** Returns the value of the {@code UpTan} field. */
public float UpTan() { return nUpTan(address()); }
/** Returns the value of the {@code DownTan} field. */
public float DownTan() { return nDownTan(address()); }
/** Returns the value of the {@code LeftTan} field. */
public float LeftTan() { return nLeftTan(address()); }
/** Returns the value of the {@code RightTan} field. */
public float RightTan() { return nRightTan(address()); }
/** Sets the specified value to the {@code UpTan} field. */
public OVRFovPort UpTan(float value) { nUpTan(address(), value); return this; }
/** Sets the specified value to the {@code DownTan} field. */
public OVRFovPort DownTan(float value) { nDownTan(address(), value); return this; }
/** Sets the specified value to the {@code LeftTan} field. */
public OVRFovPort LeftTan(float value) { nLeftTan(address(), value); return this; }
/** Sets the specified value to the {@code RightTan} field. */
public OVRFovPort RightTan(float value) { nRightTan(address(), value); return this; }
/** Initializes this struct with the specified values. */
public OVRFovPort set(
float UpTan,
float DownTan,
float LeftTan,
float RightTan
) {
UpTan(UpTan);
DownTan(DownTan);
LeftTan(LeftTan);
RightTan(RightTan);
return this;
}
/** Unsafe version of {@link #set(OVRFovPort) set}. */
public OVRFovPort nset(long struct) {
memCopy(struct, address(), SIZEOF);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public OVRFovPort set(OVRFovPort src) {
return nset(src.address());
}
// -----------------------------------
/** Returns a new {@link OVRFovPort} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static OVRFovPort malloc() {
return create(nmemAlloc(SIZEOF));
}
/** Returns a new {@link OVRFovPort} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static OVRFovPort calloc() {
return create(nmemCalloc(1, SIZEOF));
}
/** Returns a new {@link OVRFovPort} instance allocated with {@link BufferUtils}. */
public static OVRFovPort create() {
return new OVRFovPort(BufferUtils.createByteBuffer(SIZEOF));
}
/** Returns a new {@link OVRFovPort} instance for the specified memory address or {@code null} if the address is {@code NULL}. */
public static OVRFovPort create(long address) {
return address == NULL ? null : new OVRFovPort(address, null);
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer malloc(int capacity) {
return create(nmemAlloc(capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer calloc(int capacity) {
return create(nmemCalloc(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static Buffer create(int capacity) {
return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF));
}
/**
* Create a {@link OVRFovPort.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static Buffer create(long address, int capacity) {
return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity);
}
// -----------------------------------
/** Returns a new {@link OVRFovPort} instance allocated on the thread-local {@link MemoryStack}. */
public static OVRFovPort mallocStack() {
return mallocStack(stackGet());
}
/** Returns a new {@link OVRFovPort} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */
public static OVRFovPort callocStack() {
return callocStack(stackGet());
}
/**
* Returns a new {@link OVRFovPort} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static OVRFovPort mallocStack(MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@link OVRFovPort} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static OVRFovPort callocStack(MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated on the thread-local {@link MemoryStack}.
*
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity) {
return mallocStack(capacity, stackGet());
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero.
*
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity) {
return callocStack(capacity, stackGet());
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity, MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link OVRFovPort.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity, MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #UpTan}. */
public static float nUpTan(long struct) { return memGetFloat(struct + OVRFovPort.UPTAN); }
/** Unsafe version of {@link #DownTan}. */
public static float nDownTan(long struct) { return memGetFloat(struct + OVRFovPort.DOWNTAN); }
/** Unsafe version of {@link #LeftTan}. */
public static float nLeftTan(long struct) { return memGetFloat(struct + OVRFovPort.LEFTTAN); }
/** Unsafe version of {@link #RightTan}. */
public static float nRightTan(long struct) { return memGetFloat(struct + OVRFovPort.RIGHTTAN); }
/** Unsafe version of {@link #UpTan(float) UpTan}. */
public static void nUpTan(long struct, float value) { memPutFloat(struct + OVRFovPort.UPTAN, value); }
/** Unsafe version of {@link #DownTan(float) DownTan}. */
public static void nDownTan(long struct, float value) { memPutFloat(struct + OVRFovPort.DOWNTAN, value); }
/** Unsafe version of {@link #LeftTan(float) LeftTan}. */
public static void nLeftTan(long struct, float value) { memPutFloat(struct + OVRFovPort.LEFTTAN, value); }
/** Unsafe version of {@link #RightTan(float) RightTan}. */
public static void nRightTan(long struct, float value) { memPutFloat(struct + OVRFovPort.RIGHTTAN, value); }
// -----------------------------------
/** An array of {@link OVRFovPort} structs. */
public static class Buffer extends StructBuffer<OVRFovPort, Buffer> implements NativeResource {
/**
* Creates a new {@link OVRFovPort.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link OVRFovPort#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
return new Buffer(address, container, mark, pos, lim, cap);
}
@Override
protected OVRFovPort newInstance(long address) {
return new OVRFovPort(address, container);
}
@Override
protected int sizeof() {
return SIZEOF;
}
/** Returns the value of the {@code UpTan} field. */
public float UpTan() { return OVRFovPort.nUpTan(address()); }
/** Returns the value of the {@code DownTan} field. */
public float DownTan() { return OVRFovPort.nDownTan(address()); }
/** Returns the value of the {@code LeftTan} field. */
public float LeftTan() { return OVRFovPort.nLeftTan(address()); }
/** Returns the value of the {@code RightTan} field. */
public float RightTan() { return OVRFovPort.nRightTan(address()); }
/** Sets the specified value to the {@code UpTan} field. */
public OVRFovPort.Buffer UpTan(float value) { OVRFovPort.nUpTan(address(), value); return this; }
/** Sets the specified value to the {@code DownTan} field. */
public OVRFovPort.Buffer DownTan(float value) { OVRFovPort.nDownTan(address(), value); return this; }
/** Sets the specified value to the {@code LeftTan} field. */
public OVRFovPort.Buffer LeftTan(float value) { OVRFovPort.nLeftTan(address(), value); return this; }
/** Sets the specified value to the {@code RightTan} field. */
public OVRFovPort.Buffer RightTan(float value) { OVRFovPort.nRightTan(address(), value); return this; }
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi.impl.operationexecutor.slowoperationdetector;
import com.hazelcast.config.Config;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.internal.management.TimedMemberStateFactory;
import com.hazelcast.internal.json.JsonArray;
import com.hazelcast.internal.json.JsonObject;
import com.hazelcast.map.EntryBackupProcessor;
import com.hazelcast.map.EntryProcessor;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.impl.operationservice.InternalOperationService;
import com.hazelcast.spi.impl.operationservice.impl.OperationServiceImpl;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastTestSupport;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.instance.TestUtil.getHazelcastInstanceImpl;
import static java.lang.String.format;
import static java.lang.String.valueOf;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
abstract class SlowOperationDetectorAbstractTest extends HazelcastTestSupport {
private static final String DEFAULT_KEY = "key";
private static final String DEFAULT_VALUE = "value";
private List<SlowEntryProcessor> entryProcessors = new ArrayList<SlowEntryProcessor>();
HazelcastInstance getSingleNodeCluster(int slowOperationThresholdMillis) {
Config config = new Config();
config.setProperty(GroupProperty.SLOW_OPERATION_DETECTOR_THRESHOLD_MILLIS.getName(),
valueOf(slowOperationThresholdMillis));
return createHazelcastInstance(config);
}
static IMap<String, String> getMapWithSingleElement(HazelcastInstance instance) {
IMap<String, String> map = instance.getMap(randomMapName());
map.put(DEFAULT_KEY, DEFAULT_VALUE);
return map;
}
static void executeOperation(HazelcastInstance instance, Operation operation) {
getOperationService(instance).execute(operation);
}
static void executeEntryProcessor(IMap<String, String> map, EntryProcessor<String, String> entryProcessor) {
map.executeOnKey(DEFAULT_KEY, entryProcessor);
}
static void shutdownOperationService(HazelcastInstance instance) {
if (instance == null) {
return;
}
OperationServiceImpl operationService = (OperationServiceImpl) getOperationService(instance);
operationService.shutdownInvocations();
operationService.shutdownOperationExecutor();
}
static Collection<SlowOperationLog.Invocation> getInvocations(SlowOperationLog log) {
Map<Integer, SlowOperationLog.Invocation> invocationMap = getFieldFromObject(log, "invocations");
return invocationMap.values();
}
static int getDefaultPartitionId(HazelcastInstance instance) {
return instance.getPartitionService().getPartition(DEFAULT_KEY).getPartitionId();
}
static JsonArray getSlowOperationLogsJsonArray(HazelcastInstance instance) {
return getOperationStats(instance).get("slowOperations").asArray();
}
static JsonObject getOperationStats(HazelcastInstance instance) {
TimedMemberStateFactory timedMemberStateFactory = new TimedMemberStateFactory(getHazelcastInstanceImpl(instance));
return timedMemberStateFactory.createTimedMemberState().getMemberState().getOperationStats().toJson();
}
static Collection<SlowOperationLog> getSlowOperationLogsAndAssertNumberOfSlowOperationLogs(final HazelcastInstance instance,
final int expected) {
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
Collection<SlowOperationLog> logs = getSlowOperationLogs(instance);
assertNumberOfSlowOperationLogs(logs, expected);
}
});
return getSlowOperationLogs(instance);
}
static Collection<SlowOperationLog> getSlowOperationLogs(HazelcastInstance instance) {
InternalOperationService operationService = getOperationService(instance);
SlowOperationDetector slowOperationDetector = getFieldFromObject(operationService, "slowOperationDetector");
Map<Integer, SlowOperationLog> slowOperationLogs = getFieldFromObject(slowOperationDetector, "slowOperationLogs");
return slowOperationLogs.values();
}
static void assertNumberOfSlowOperationLogs(Collection<SlowOperationLog> logs, int expected) {
assertEqualsStringFormat("Expected %d slow operation logs, but was %d.", expected, logs.size());
}
static void assertTotalInvocations(SlowOperationLog log, int totalInvocations) {
assertEqualsStringFormat("Expected %d total invocations, but was %d. Log: " + log.createDTO().toJson(),
totalInvocations, log.totalInvocations.get());
}
static void assertEntryProcessorOperation(SlowOperationLog log) {
String operation = log.operation;
assertEqualsStringFormat("Expected operation %s, but was %s",
"com.hazelcast.map.impl.operation.PartitionWideEntryWithPredicateOperation", operation);
}
static void assertOperationContainsClassName(SlowOperationLog log, String className) {
String operation = log.operation;
assertTrue(format("Expected operation to contain '%s'%n%s", className, operation), operation.contains("$" + className));
}
static void assertStackTraceContainsClassName(SlowOperationLog log, String className) {
String stackTrace = log.stackTrace;
assertTrue(format("Expected stacktrace to contain className '%s'%n%s", className, stackTrace),
stackTrace.contains("$" + className + "."));
}
static void assertStackTraceNotContainsClassName(SlowOperationLog log, String className) {
String stackTrace = log.stackTrace;
assertFalse(format("Expected stacktrace to not contain className '%s'%n%s", className, stackTrace),
stackTrace.contains(className));
}
static void assertJSONContainsClassName(JsonObject jsonObject, String className) {
String stackTrace = jsonObject.get("stackTrace").toString();
assertTrue(format("JSON for Management Center should contain stackTrace with class name '%s'%n%s", className, stackTrace),
stackTrace.contains("$" + className + "."));
}
static void assertJSONContainsClassNameJustOnce(JsonObject jsonObject1, JsonObject jsonObject2, String className) {
boolean firstClassFound = jsonObject1.get("stackTrace").toString().contains("$" + className + ".");
boolean secondClassFound = jsonObject2.get("stackTrace").toString().contains("$" + className + ".");
assertTrue(format("JSON for Management Center should contain stackTrace with class name '%s' exactly once", className),
firstClassFound ^ secondClassFound);
}
static void assertInvocationDurationBetween(SlowOperationLog.Invocation invocation, int min, int max) {
Integer duration = invocation.createDTO(0).durationMs;
assertTrue(format("Duration of invocation should be >= %d, but was %d", min, duration), duration >= min);
assertTrue(format("Duration of invocation should be <= %d, but was %d", max, duration), duration <= max);
}
SlowEntryProcessor getSlowEntryProcessor(int sleepSeconds) {
SlowEntryProcessor entryProcessor = new SlowEntryProcessor(sleepSeconds);
entryProcessors.add(entryProcessor);
return entryProcessor;
}
void awaitSlowEntryProcessors() {
for (SlowEntryProcessor slowEntryProcessor : entryProcessors) {
slowEntryProcessor.await();
}
}
@SuppressWarnings("unchecked")
private static <E> E getFieldFromObject(Object object, String fieldName) {
try {
Field field = object.getClass().getDeclaredField(fieldName);
field.setAccessible(true);
return (E) field.get(object);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
static class SlowEntryProcessor extends CountDownLatchHolder implements EntryProcessor<String, String> {
static int globalInstanceCounter;
final int instance = ++globalInstanceCounter;
final int sleepSeconds;
SlowEntryProcessor(int sleepSeconds) {
this.sleepSeconds = sleepSeconds;
}
@Override
public Object process(Map.Entry<String, String> entry) {
sleepSeconds(sleepSeconds);
done();
return null;
}
@Override
public EntryBackupProcessor<String, String> getBackupProcessor() {
return null;
}
@Override
public String toString() {
return "SlowEntryProcessor{"
+ "instance=" + instance
+ ", sleepSeconds=" + sleepSeconds
+ '}';
}
}
static class SlowEntryProcessorChild extends SlowEntryProcessor {
SlowEntryProcessorChild(int sleepSeconds) {
super(sleepSeconds);
}
@Override
public Object process(Map.Entry<String, String> entry) {
// not using sleepSeconds() here to have some variants in the stack traces
try {
TimeUnit.SECONDS.sleep(sleepSeconds);
} catch (InterruptedException ignored) {
}
done();
return null;
}
@Override
public String toString() {
return "SlowEntryProcessorChild{"
+ "instance=" + instance
+ ", sleepSeconds=" + sleepSeconds
+ '}';
}
}
abstract static class JoinableOperation extends Operation {
private final CountDownLatch completedLatch = new CountDownLatch(1);
void done() {
completedLatch.countDown();
}
void join() {
try {
completedLatch.await();
} catch (InterruptedException e) {
ignore(e);
}
}
}
abstract static class CountDownLatchHolder {
private final CountDownLatch latch = new CountDownLatch(1);
void done() {
latch.countDown();
}
void await() {
try {
latch.await();
} catch (InterruptedException e) {
ignore(e);
}
}
}
}
| |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtoolsdriver.safari;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.base.Verify;
import com.google.common.base.VerifyException;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.iosdevicecontrol.util.FluentLogger;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.Monitor;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.iosdevicecontrol.util.EllipsisFormat;
import com.google.iosdevicecontrol.webinspector.ApplicationConnectedMessage;
import com.google.iosdevicecontrol.webinspector.ApplicationDisconnectedMessage;
import com.google.iosdevicecontrol.webinspector.ApplicationSentDataMessage;
import com.google.iosdevicecontrol.webinspector.ApplicationSentListingMessage;
import com.google.iosdevicecontrol.webinspector.ApplicationUpdatedMessage;
import com.google.iosdevicecontrol.webinspector.ForwardGetListingMessage;
import com.google.iosdevicecontrol.webinspector.ForwardSocketDataMessage;
import com.google.iosdevicecontrol.webinspector.ForwardSocketSetupMessage;
import com.google.iosdevicecontrol.webinspector.InspectorApplication;
import com.google.iosdevicecontrol.webinspector.InspectorDriver;
import com.google.iosdevicecontrol.webinspector.InspectorMessage;
import com.google.iosdevicecontrol.webinspector.InspectorPage;
import com.google.iosdevicecontrol.webinspector.ReportConnectedApplicationListMessage;
import com.google.iosdevicecontrol.webinspector.ReportConnectedDriverListMessage;
import com.google.iosdevicecontrol.webinspector.ReportIdentifierMessage;
import com.google.iosdevicecontrol.webinspector.WebInspector;
import java.io.Closeable;
import java.io.IOException;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.function.Supplier;
import javax.annotation.concurrent.GuardedBy;
import javax.json.JsonObject;
/** The state of the applications available and their page listings. */
final class InspectorMessenger implements Closeable {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
/** Used to truncate inspector messages in the log. */
private static final EllipsisFormat MESSAGE_FORMAT = new EllipsisFormat(2500);
/** The sender value seems to be an arbitrary UUID, so choose a fixed one here. */
private static final String SENDER_UUID = UUID.randomUUID().toString();
private final WebInspector inspector;
private final String connectionId = UUID.randomUUID().toString();
private final AtomicReference<Consumer<JsonObject>> devtoolsListener = new AtomicReference<>();
private final ScheduledExecutorService executor;
private final Future<?> receiveFuture;
private final Monitor monitor = new Monitor();
@GuardedBy("monitor")
private final BiMap<String, AppListing> appIdToListings = HashBiMap.create();
@GuardedBy("this")
private Optional<PageContext> activePageContext = Optional.empty();
InspectorMessenger(WebInspector inspector) {
this(inspector, Executors.newSingleThreadScheduledExecutor());
}
@VisibleForTesting
InspectorMessenger(WebInspector inspector, ScheduledExecutorService executor) {
this.inspector = checkNotNull(inspector);
this.executor = checkNotNull(executor);
receiveFuture = executor.scheduleWithFixedDelay(this::receiveMessage, 0, 50, MILLISECONDS);
}
void setEventListener(Consumer<JsonObject> listener) {
devtoolsListener.set(checkNotNull(listener));
}
synchronized OptionalInt activePageId() {
return activePageContext.isPresent()
? OptionalInt.of(activePageContext.get().pageId)
: OptionalInt.empty();
}
void sendConnect() throws IOException {
sendMessage(ReportIdentifierMessage.builder().connectionId(connectionId));
}
synchronized boolean sendSwitchTo(int pageId) throws IOException {
PageContext activePageContext = checkActivePageContext();
return sendSwitchTo(activePageContext.appId, pageId);
}
synchronized boolean sendSwitchTo(String appId, int pageId) throws IOException {
// Important that a ForwardSocketSetup message is never sent to an already setup socket,
// because this sometimes causes the application to disconnect, at least on iOS 9.
if (activePageContext.isPresent()
&& activePageContext.get().appId.equals(appId)
&& activePageContext.get().pageId == pageId) {
return false;
}
sendMessage(
ForwardSocketSetupMessage.builder()
.applicationId(appId)
.automaticallyPause(false)
.connectionId(connectionId)
.pageId(pageId)
.sender(SENDER_UUID));
activePageContext = Optional.of(new PageContext(appId, pageId));
return true;
}
synchronized void sendListPages() throws IOException {
String activeAppId = checkActivePageContext().appId;
monitor.enter();
try {
appIdToListings.computeIfPresent(
activeAppId, (unused, appListing) -> new AppListing(appListing.app, Optional.empty()));
} finally {
monitor.leave();
}
sendMessage(
ForwardGetListingMessage.builder().applicationId(activeAppId).connectionId(connectionId));
}
synchronized void sendCommand(JsonObject command) throws IOException {
PageContext activePageContext = checkActivePageContext();
checkNotNull(devtoolsListener.get());
sendMessage(
ForwardSocketDataMessage.builder()
.applicationId(activePageContext.appId)
.connectionId(connectionId)
.pageId(activePageContext.pageId)
.sender(SENDER_UUID)
.socketData(command));
}
@VisibleForTesting
@SuppressWarnings("GuardedBy")
Optional<ImmutableSet<AppListing>> getAllAppListings(String hostBundleId) {
Set<AppListing> listings = appIdToListings.values();
ImmutableSet<String> hostAppIds =
listings
.stream()
.filter(appListing -> appListing.app.applicationBundleId().equals(hostBundleId))
.map(appListing -> appListing.app.applicationId())
.collect(ImmutableSet.toImmutableSet());
Verify.verify(hostAppIds.size() <= 1, "multiple matching host apps: %s", hostAppIds);
if (!hostAppIds.isEmpty()) {
String hostAppId = Iterables.getOnlyElement(hostAppIds);
ImmutableSet<AppListing> childListings =
listings
.stream()
.filter(
appListing ->
hostAppId.equals(appListing.app.optionalHostApplicationId().orNull()))
.collect(ImmutableSet.toImmutableSet());
if (!childListings.isEmpty()
&& childListings.stream().allMatch(appListing -> appListing.listing.isPresent())) {
return Optional.of(childListings);
}
}
return Optional.empty();
}
ImmutableList<InspectorPage> awaitPages() throws IOException {
return await(this::getPages);
}
@VisibleForTesting
@SuppressWarnings("GuardedBy")
synchronized Optional<ImmutableList<InspectorPage>> getPages() {
PageContext activePageContext = checkActivePageContext();
AppListing appListing = appIdToListings.get(activePageContext.appId);
return appListing == null ? Optional.empty() : appListing.listing;
}
ImmutableSet<AppListing> awaitAllAppListings(String hostBundleId) throws IOException {
return await(() -> getAllAppListings(hostBundleId));
}
/** Waits for the supplier to return a present value. */
private <T> T await(Supplier<Optional<T>> compute) throws IOException {
AtomicReference<T> result = new AtomicReference<>();
BooleanSupplier condition =
() -> {
Optional<T> value = compute.get();
if (value.isPresent()) {
result.set(value.get());
}
return value.isPresent();
};
try {
monitor.enterWhen(monitor.newGuard(condition));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException(e);
}
try {
return Verify.verifyNotNull(result.get());
} finally {
monitor.leave();
}
}
/** Stops receiving messages from the inspector and closes it. */
@Override
public void close() throws IOException {
try {
// Canceling the future marks the future done and the messenger "closed".
// If it can't be cancelled, it must have terminated prematurely, so raise an exception.
if (!receiveFuture.cancel(false)) {
Futures.getChecked(receiveFuture, IOException.class);
}
} finally {
try {
MoreExecutors.shutdownAndAwaitTermination(executor, 5, SECONDS);
} finally {
inspector.close();
}
}
}
/** A pair of an application and a page listing. */
static final class AppListing {
final InspectorApplication app;
final Optional<ImmutableList<InspectorPage>> listing;
private AppListing(InspectorApplication app, Optional<ImmutableList<InspectorPage>> listing) {
this.app = app;
this.listing = listing;
}
@Override
public String toString() {
return MoreObjects.toStringHelper("AppListing")
.add("app", app)
.add("listing", listing)
.toString();
}
}
private boolean isClosed() {
return receiveFuture.isDone();
}
private void sendMessage(InspectorMessage.Builder builder) throws IOException {
InspectorMessage message = builder.build();
logger.atInfo().log("Message sent: %s", formatMessage(message));
inspector.sendMessage(message);
}
private void receiveMessage() {
try {
// Receive a plist over the socket. On EOF, if the thread is marked interrupted, that means
// the socket was intentionally closed by the #close method; otherwise do a full close now.
Optional<InspectorMessage> message = inspector.receiveMessage();
if (!message.isPresent()) {
if (!isClosed()) {
logger.atSevere().log("Web inspector closed unexpectedly.");
close();
}
return;
}
onMessageReceived(message.get());
} catch (Throwable e) {
logger.atWarning().withCause(e).log();
}
}
private void onMessageReceived(InspectorMessage message) {
logger.atInfo().log("Message received: %s", formatMessage(message));
switch (message.selector()) {
case APPLICATION_CONNECTED:
addApplication(((ApplicationConnectedMessage) message).asApplication());
return;
case APPLICATION_DISCONNECTED:
String disconnectedId = ((ApplicationDisconnectedMessage) message).applicationId();
synchronized (this) {
if (activePageContext.isPresent()
&& disconnectedId.equals(activePageContext.get().appId)) {
activePageContext = Optional.empty();
}
}
monitor.enter();
try {
appIdToListings.remove(disconnectedId);
} finally {
monitor.leave();
}
return;
case APPLICATION_SENT_DATA:
devtoolsListener.get().accept(((ApplicationSentDataMessage) message).messageData());
return;
case APPLICATION_SENT_LISTING:
ApplicationSentListingMessage listingMsg = (ApplicationSentListingMessage) message;
String appId = listingMsg.applicationId();
monitor.enter();
try {
AppListing curListing = appIdToListings.get(appId);
Verify.verifyNotNull(curListing, "received listing for unknown app: %s", appId);
AppListing newListing = new AppListing(curListing.app, Optional.of(listingMsg.listing()));
appIdToListings.put(appId, newListing);
} finally {
monitor.leave();
}
return;
case APPLICATION_UPDATED:
addApplication(((ApplicationUpdatedMessage) message).asApplication());
return;
case REPORT_CONNECTED_APPLICATION_LIST:
ImmutableList<InspectorApplication> apps =
((ReportConnectedApplicationListMessage) message).applicationDictionary();
for (InspectorApplication app : apps) {
addApplication(app);
}
return;
case REPORT_CONNECTED_DRIVER_LIST:
// We've never seen one of these messages before where the driver dictionary was
// populated, nor do we know what it means, so let's be alerted the first time it happens.
ImmutableList<InspectorDriver> drivers =
((ReportConnectedDriverListMessage) message).driverDictionary();
Verify.verify(drivers.isEmpty());
return;
case REPORT_SETUP:
// Intentionally ignore that the connection is setup.
return;
default:
throw new VerifyException("Did not expect to receive message: " + message);
}
}
private void addApplication(InspectorApplication app) {
monitor.enter();
try {
appIdToListings.compute(
app.applicationId(),
(appId, appListing) ->
new AppListing(app, appListing == null ? Optional.empty() : appListing.listing));
} finally {
monitor.leave();
}
}
private synchronized PageContext checkActivePageContext() {
checkState(activePageContext.isPresent());
return activePageContext.get();
}
private static final class PageContext {
private final String appId;
private final int pageId;
private PageContext(String appId, int pageId) {
this.appId = checkNotNull(appId);
this.pageId = pageId;
}
}
private static Object formatMessage(InspectorMessage message) {
return new Object() {
@Override
public String toString() {
return MESSAGE_FORMAT.format(message.toString());
}
};
}
}
| |
package mb.nabl2.terms.matching;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
import org.metaborg.util.functions.Action2;
import org.metaborg.util.functions.Function0;
import org.metaborg.util.functions.Function1;
import org.metaborg.util.tuple.Tuple2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import mb.nabl2.terms.IAttachments;
import mb.nabl2.terms.ITerm;
import mb.nabl2.terms.ITermVar;
import mb.nabl2.terms.substitution.IRenaming;
import mb.nabl2.terms.substitution.ISubstitution;
import mb.nabl2.terms.substitution.PersistentSubstitution;
import mb.nabl2.terms.unification.Unifiers;
import mb.nabl2.terms.unification.u.IUnifier;
public abstract class Pattern implements Serializable {
private static final long serialVersionUID = 1L;
private final IAttachments attachments;
protected Pattern(IAttachments attachments) {
this.attachments = attachments;
}
public IAttachments getAttachments() {
return attachments;
}
public abstract Set<ITermVar> getVars();
public abstract boolean isConstructed();
public Optional<ISubstitution.Immutable> match(ITerm term) {
return match(term, Unifiers.Immutable.of()).match(t -> t, v -> Optional.empty());
}
public MaybeNotInstantiated<Optional<ISubstitution.Immutable>> match(ITerm term, IUnifier.Immutable unifier) {
final ISubstitution.Transient subst = PersistentSubstitution.Transient.of();
final List<ITermVar> stuckVars = Lists.newArrayList();
final Eqs eqs = new Eqs() {
@Override public void add(ITermVar var, ITerm pattern) {
stuckVars.add(var);
}
@Override public void add(ITermVar var, Pattern pattern) {
stuckVars.add(var);
}
};
if(!matchTerm(term, subst, unifier, eqs)) {
return MaybeNotInstantiated.ofResult(Optional.empty());
} else if(!stuckVars.isEmpty()) {
return MaybeNotInstantiated.ofNotInstantiated(stuckVars);
} else {
return MaybeNotInstantiated.ofResult(Optional.of(subst.freeze()));
}
}
/**
* Match terms against a pattern and generate additional equalities that result from the match.
*
* Fresh variables are generated for unmatched variables in the patterns. As a result, the resulting substitution
* has entries for all the variables in the patterns, and no pattern variables escape in the equalities.
*/
public Optional<MatchResult> matchWithEqs(ITerm term, IUnifier.Immutable unifier, VarProvider fresh) {
// substitution from pattern variables to unifier variables
final ISubstitution.Transient _subst = PersistentSubstitution.Transient.of();
// equalities between unifier terms
final List<Tuple2<ITermVar, ITerm>> termEqs = Lists.newArrayList();
// equalities between unifier variables and patterns
final List<Tuple2<ITermVar, Pattern>> patternEqs = Lists.newArrayList();
// match
final Eqs eqs = new Eqs() {
@Override public void add(ITermVar var, ITerm term) {
termEqs.add(Tuple2.of(var, term));
}
@Override public void add(ITermVar var, Pattern pattern) {
patternEqs.add(Tuple2.of(var, pattern));
}
};
if(!matchTerm(term, _subst, unifier, eqs)) {
return Optional.empty();
}
// generate fresh unifier variables for unmatched pattern variables
final Set<ITermVar> freeVars = Sets.difference(getVars(), _subst.domainSet()).immutableCopy();
for(ITermVar v : freeVars) {
_subst.put(v, fresh.freshVar(v));
}
final ISubstitution.Immutable subst = _subst.freeze();
// create equalities between unifier terms from pattern equalities
final ImmutableSet.Builder<ITermVar> stuckVars = ImmutableSet.builder();
final ImmutableList.Builder<Tuple2<ITerm, ITerm>> allEqs = ImmutableList.builder();
for(Tuple2<ITermVar, ITerm> termEq : termEqs) {
final ITermVar leftVar = termEq._1();
final ITerm rightTerm = termEq._2();
stuckVars.add(leftVar);
allEqs.add(Tuple2.of(leftVar, rightTerm));
}
for(Tuple2<ITermVar, Pattern> patternEq : patternEqs) {
final ITermVar leftVar = patternEq._1();
final ITerm rightTerm = patternEq._2().asTerm((v, t) -> {
allEqs.add(Tuple2.of(subst.apply(v), subst.apply(t)));
}, (v) -> v.orElseGet(() -> fresh.freshWld()));
stuckVars.add(leftVar);
allEqs.add(Tuple2.of(leftVar, subst.apply(rightTerm)));
}
return Optional.of(new MatchResult(subst, stuckVars.build(), allEqs.build()));
}
protected abstract boolean matchTerm(ITerm term, ISubstitution.Transient subst, IUnifier.Immutable unifier,
Eqs eqs);
protected static boolean matchTerms(final Iterable<Pattern> patterns, final Iterable<ITerm> terms,
ISubstitution.Transient subst, IUnifier.Immutable unifier, Eqs eqs) {
final Iterator<Pattern> itPattern = patterns.iterator();
final Iterator<ITerm> itTerm = terms.iterator();
while(itPattern.hasNext()) {
if(!itTerm.hasNext()) {
return false;
}
if(!itPattern.next().matchTerm(itTerm.next(), subst, unifier, eqs)) {
return false;
}
}
if(itTerm.hasNext()) {
return false;
}
return true;
}
public abstract Pattern apply(IRenaming subst);
public abstract Pattern eliminateWld(Function0<ITermVar> fresh);
public Tuple2<ITerm, List<Tuple2<ITermVar, ITerm>>> asTerm(Function1<Optional<ITermVar>, ITermVar> fresh) {
final ImmutableList.Builder<Tuple2<ITermVar, ITerm>> eqs = ImmutableList.builder();
final ITerm term = asTerm((v, t) -> {
eqs.add(Tuple2.of(v, t));
}, fresh);
return Tuple2.of(term, eqs.build());
}
protected abstract ITerm asTerm(Action2<ITermVar, ITerm> equalities, Function1<Optional<ITermVar>, ITermVar> fresh);
protected interface Eqs {
void add(ITermVar var, Pattern pattern);
void add(ITermVar var, ITerm pattern);
}
///////////////////////////////////////////////////////////////////////////
// Pattern ordering //
///////////////////////////////////////////////////////////////////////////
/**
* Note: this comparator imposes orderings that are inconsistent with equals.
*/
public static final LeftRightOrder leftRightOrdering = new LeftRightOrder();
public static class LeftRightOrder {
/**
* Compares two patterns for generality.
*
* If two patterns are comparable, it return an integer indicating which patterns is more general.
* <ul>
* <li>If the first pattern is more specific than the second, c < 0.
* <li>If the first pattern is more general than the second, c > 0.
* <li>If both are equally general, c = 0. When patterns are non-linear, patterns may be declared equal even if
* their not.
* </ul>
* When used as an ordering (e.g., using asComparator) patterns are sorted such that more general patterns
* appear after more specific.
*
*/
public Optional<Integer> compare(Pattern p1, Pattern p2) {
return Optional.ofNullable(compare(p1, p2, new AtomicInteger(), new HashMap<>(), new HashMap<>()));
}
private @Nullable Integer compare(Pattern p1, Pattern p2, AtomicInteger pos, Map<ITermVar, Integer> vars1,
Map<ITermVar, Integer> vars2) {
if(p1 instanceof ApplPattern) {
final ApplPattern appl1 = (ApplPattern) p1;
if(p2 instanceof ApplPattern) {
final ApplPattern appl2 = (ApplPattern) p2;
if(!appl1.getOp().equals(appl2.getOp())) {
return null;
}
if(appl1.getArgs().size() != appl2.getArgs().size()) {
return null;
}
final Iterator<Pattern> it1 = appl1.getArgs().iterator();
final Iterator<Pattern> it2 = appl2.getArgs().iterator();
Integer c = 0;
while(c != null && c == 0 && it1.hasNext()) {
c = compare(it1.next(), it2.next(), pos, vars1, vars2);
}
return c;
} else if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
if(boundAt(var2, vars2) >= 0) {
return 1;
} else {
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return null;
}
} else if(p1 instanceof ConsPattern) {
final ConsPattern cons1 = (ConsPattern) p1;
if(p2 instanceof ConsPattern) {
final ConsPattern cons2 = (ConsPattern) p2;
Integer c = 0;
c = compare(cons1.getHead(), cons2.getHead(), pos, vars1, vars2);
if(c != null && c == 0) {
c = compare(cons1.getTail(), cons2.getTail(), pos, vars1, vars2);
}
return c;
} else if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
if(boundAt(var2, vars2) >= 0) {
return 1;
} else {
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return null;
}
} else if(p1 instanceof NilPattern) {
if(p2 instanceof NilPattern) {
return 0;
} else if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
if(boundAt(var2, vars2) >= 0) {
return 1;
} else {
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return null;
}
} else if(p1 instanceof StringPattern) {
final StringPattern string1 = (StringPattern) p1;
if(p2 instanceof StringPattern) {
final StringPattern string2 = (StringPattern) p2;
return string1.getValue().equals(string2.getValue()) ? 0 : null;
} else if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
if(boundAt(var2, vars2) >= 0) {
return 1;
} else {
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return null;
}
} else if(p1 instanceof IntPattern) {
final IntPattern integer1 = (IntPattern) p1;
if(p2 instanceof IntPattern) {
final IntPattern integer2 = (IntPattern) p2;
return integer1.getValue() == integer2.getValue() ? 0 : null;
} else if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
if(boundAt(var2, vars2) >= 0) {
return 1;
} else {
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return null;
}
} else if(p1 instanceof PatternVar) {
final PatternVar var1 = (PatternVar) p1;
final int i1 = boundAt(var1, vars1);
if(p2 instanceof PatternVar) {
final PatternVar var2 = (PatternVar) p2;
final int i2 = boundAt(var2, vars2);
if(i1 < 0 && i2 < 0) { // neither are bound
bind(var1.getVar(), vars1, var2.getVar(), vars2, pos.getAndIncrement());
return 0;
} else if(i1 < 0 && i2 >= 0) { // p2 is bound
bind(var2.getVar(), vars1, pos.getAndIncrement());
return 1;
} else if(i1 >= 0 && i2 < 0) { // p1 is bound
bind(var2.getVar(), vars2, pos.getAndIncrement());
return -1;
} else { // both are bound, the left-most takes precedence
return i1 - i2;
}
} else if(p2 instanceof PatternAs) {
final PatternAs as2 = (PatternAs) p2;
bind(as2.getVar(), vars2, pos.get());
return compare(p1, as2.getPattern(), pos, vars1, vars2);
} else {
return 1;
}
} else if(p1 instanceof PatternAs) {
final PatternAs as1 = (PatternAs) p1;
bind(as1.getVar(), vars1, pos.get()); // FIXME what if this is already bound?
return compare(as1.getPattern(), p2, pos, vars1, vars2);
} else {
return null;
}
}
private int boundAt(PatternVar vp, Map<ITermVar, Integer> vars) {
final @Nullable ITermVar v = vp.getVar();
if(v == null) {
return -1;
} else {
return vars.getOrDefault(v, -1);
}
}
private void bind(@Nullable ITermVar v1, Map<ITermVar, Integer> vars1, @Nullable ITermVar v2,
Map<ITermVar, Integer> vars2, int pos) {
bind(v1, vars1, pos);
bind(v2, vars2, pos);
}
private void bind(@Nullable ITermVar v, Map<ITermVar, Integer> vars, int pos) {
if(v != null && !vars.containsKey(v)) {
vars.put(v, pos);
}
}
/**
* Return a comparator for patterns.
*
* Can be used to order patterns. It cannot not differentiate between incomparable patterns, and equivalent
* patterns: both return 0.
*
* Note: this comparator imposes orderings that are inconsistent with equals.
*/
public java.util.Comparator<Pattern> asComparator() {
return new java.util.Comparator<Pattern>() {
@Override public int compare(Pattern p1, Pattern p2) {
return LeftRightOrder.this.compare(p1, p2).orElse(0);
}
};
}
}
}
| |
package com.eden.orchid.api.generators;
import com.eden.common.util.EdenPair;
import com.eden.orchid.api.OrchidContext;
import com.eden.orchid.api.OrchidService;
import com.eden.orchid.api.indexing.OrchidRootIndex;
import com.eden.orchid.api.options.OptionsExtractor;
import com.eden.orchid.api.render.RenderService;
import com.eden.orchid.api.resources.resource.OrchidResource;
import com.eden.orchid.api.resources.resource.StringResource;
import com.eden.orchid.api.theme.Theme;
import com.eden.orchid.api.theme.pages.OrchidPage;
import com.eden.orchid.api.theme.pages.OrchidReference;
import com.eden.orchid.impl.relations.ThemeRelation;
import com.eden.orchid.testhelpers.OrchidUnitTest;
import com.eden.orchid.utilities.OrchidExtensionsKt;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.stubbing.Answer;
import javax.annotation.Nonnull;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static com.eden.orchid.api.generators.OrchidGeneratorKt.modelOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public final class GeneratorServiceTest implements OrchidUnitTest {
private OrchidContext context;
private OptionsExtractor extractor;
private Theme theme;
private BuildMetrics buildMetrics;
private GeneratorService underTest;
private GeneratorServiceImpl service;
private OrchidRootIndex internalIndex;
private Set<OrchidGenerator> generators;
private MockGenerator generator1;
private List<OrchidPage> pages1;
private OrchidPage mockPage1;
private OrchidReference mockPage1Reference;
private OrchidResource mockPage1Resource;
private MockGenerator generator2;
private List<OrchidPage> pages2;
private OrchidResource mockFreeableResource;
private OrchidPage mockPage2;
private OrchidReference mockPage2Reference;
private MockGenerator generator3;
private List<OrchidPage> pages3;
@BeforeEach
public void setUp() {
context = mock(OrchidContext.class);
extractor = mock(OptionsExtractor.class);
theme = mock(Theme.class);
buildMetrics = new BuildMetrics(context);
internalIndex = new OrchidRootIndex(context, "internal");
when(context.resolve(OptionsExtractor.class)).thenReturn(extractor);
when(context.findTheme(any())).thenReturn(theme);
when(context.getIndex()).thenReturn(internalIndex);
when(context.includeDrafts()).thenReturn(false);
when(context.getEmbeddedData(anyString(), anyString())).thenReturn(new EdenPair<>("", new HashMap<>()));
generators = new HashSet<>();
mockPage1Reference = new OrchidReference(context, "page1.html");
mockPage1Resource = new StringResource(mockPage1Reference, "");
mockPage1 = spy(new OrchidPage(mockPage1Resource, RenderService.RenderMode.TEMPLATE, "mockPage1", ""));
pages1 = new ArrayList<>();
pages1.add(mockPage1);
generator1 = spy(new MockGenerator("gen1", OrchidGenerator.Stage.COLLECTION, pages1));
generators.add(generator1);
mockPage2Reference = new OrchidReference(context, "page2.html");
mockFreeableResource = spy(new OrchidResource(mockPage2Reference) {
@Nonnull
@Override
public InputStream getContentStream() {
return OrchidExtensionsKt.asInputStream("");
}
});
mockPage2 = spy(new OrchidPage(mockFreeableResource, RenderService.RenderMode.TEMPLATE, "mockPage2", ""));
pages2 = new ArrayList<>();
pages2.add(mockPage2);
generator2 = spy(new MockGenerator("gen2", OrchidGenerator.Stage.CONTENT, pages2));
generators.add(generator2);
pages3 = new ArrayList<>();
generator3 = new MockGenerator("gen3", OrchidGenerator.Stage.WARM_UP, pages3);
generator3 = spy(generator3);
generators.add(generator3);
when(context.resolveSet(OrchidGenerator.class)).thenReturn(generators);
// test the service directly
service = new GeneratorServiceImpl();
service.initialize(context);
// test that the default implementation is identical to the real implementation
underTest = new GeneratorService() {
public void initialize(OrchidContext context) { }
public <T extends OrchidService> T getService(Class<T> serviceClass) { return (T) service; }
};
}
@Test
public void testSetupCorrectly() throws Throwable {
underTest.startIndexing();
underTest.startGeneration();
verify(generator1).extractOptions((OrchidContext) any(), any());
verify(generator1).startIndexing(context);
assertThat(generator1.mockPages.size(), is(1));
assertThat(generator1.mockPages.size(), is(1));
verify(generator2).extractOptions((OrchidContext) any(), any());
verify(generator2).startIndexing(context);
assertThat(generator2.mockPages.size(), is(1));
verify(generator3).extractOptions((OrchidContext) any(), any());
verify(generator3).startIndexing(context);
assertThat(generator3.mockPages, is(notNullValue()));
assertThat(generator3.mockPages.size(), is(0));
}
@Test
public void testFilteringGenerators() throws Throwable {
service.startIndexing();
List<OrchidGenerator> generators;
generators = service.getFilteredGenerators().collect(Collectors.toList());
assertThat(generators, containsInAnyOrder(generator1, generator2, generator3));
service.setDisabled(new String[]{"gen1"});
generators = service.getFilteredGenerators().collect(Collectors.toList());
assertThat(generators, containsInAnyOrder(generator2, generator3));
service.setDisabled(null);
generators = service.getFilteredGenerators().collect(Collectors.toList());
assertThat(generators, containsInAnyOrder(generator1, generator2, generator3));
service.setEnabled(new String[]{"gen1"});
generators = service.getFilteredGenerators().collect(Collectors.toList());
assertThat(generators, containsInAnyOrder(generator1));
service.setDisabled(new String[]{"gen1"});
generators = service.getFilteredGenerators().collect(Collectors.toList());
assertThat(generators.size(), is(0));
}
@Test
public void testGeneratorThemes() throws Throwable {
underTest.startIndexing();
underTest.startGeneration();
verify(context, never()).pushTheme(any());
clearInvocations(context);
ThemeRelation g1Theme = new ThemeRelation(context);
g1Theme.extractOptions(context, Collections.singletonMap("key", "theme1"));
generator1.setTheme(g1Theme);
underTest.startIndexing();
underTest.startGeneration();
clearInvocations(context);
generator1.setTheme(g1Theme);
underTest.startIndexing();
underTest.startGeneration();
clearInvocations(context);
generator3.setTheme(g1Theme);
underTest.startIndexing();
underTest.startGeneration();
clearInvocations(context);
}
@Test
public void testFreeableResourcesFreed() throws Throwable {
doAnswer((Answer) invocation -> {
mockFreeableResource.free();
return null;
}).when(mockPage2).free();
underTest.startIndexing();
underTest.startGeneration();
verify(mockFreeableResource).free();
}
public static class MockGenerator extends OrchidGenerator<OrchidGenerator.Model> {
List<? extends OrchidPage> mockPages;
List<? extends OrchidPage> generatedPages;
public MockGenerator(String key, OrchidGenerator.Stage stage, @Nonnull List<? extends OrchidPage> mockPages) {
super(key, stage);
this.mockPages = mockPages;
}
@Nonnull
@Override
public Model startIndexing(@Nonnull OrchidContext context) {
return modelOf(this, ()->mockPages);
}
@Override
public void startGeneration(@Nonnull OrchidContext context, Model model) {
generatedPages = model.getAllPages();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.logical;
import java.math.BigDecimal;
import java.util.GregorianCalendar;
import java.util.LinkedList;
import java.util.List;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.FunctionCallFactory;
import org.apache.drill.common.expression.IfExpression;
import org.apache.drill.common.expression.IfExpression.IfCondition;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.NullExpression;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.TypedNullConstant;
import org.apache.drill.common.expression.ValueExpressions;
import org.apache.drill.common.expression.ValueExpressions.QuotedString;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.planner.StarColumnHelper;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexCorrelVariable;
import org.apache.calcite.rex.RexDynamicParam;
import org.apache.calcite.rex.RexFieldAccess;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexLocalRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexOver;
import org.apache.calcite.rex.RexRangeRef;
import org.apache.calcite.rex.RexVisitorImpl;
import org.apache.calcite.sql.SqlSyntax;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.util.NlsString;
import com.google.common.collect.Lists;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.work.ExecErrorConstants;
/**
* Utilities for Drill's planner.
*/
public class DrillOptiq {
public static final String UNSUPPORTED_REX_NODE_ERROR = "Cannot convert RexNode to equivalent Drill expression. ";
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillOptiq.class);
/**
* Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax.
*/
public static LogicalExpression toDrill(DrillParseContext context, RelNode input, RexNode expr) {
final RexToDrill visitor = new RexToDrill(context, input);
return expr.accept(visitor);
}
private static class RexToDrill extends RexVisitorImpl<LogicalExpression> {
private final RelNode input;
private final DrillParseContext context;
RexToDrill(DrillParseContext context, RelNode input) {
super(true);
this.context = context;
this.input = input;
}
@Override
public LogicalExpression visitInputRef(RexInputRef inputRef) {
final int index = inputRef.getIndex();
final RelDataTypeField field = input.getRowType().getFieldList().get(index);
return FieldReference.getWithQuotedRef(field.getName());
}
@Override
public LogicalExpression visitCall(RexCall call) {
// logger.debug("RexCall {}, {}", call);
final SqlSyntax syntax = call.getOperator().getSyntax();
switch (syntax) {
case BINARY:
logger.debug("Binary");
final String funcName = call.getOperator().getName().toLowerCase();
return doFunction(call, funcName);
case FUNCTION:
case FUNCTION_ID:
logger.debug("Function");
return getDrillFunctionFromOptiqCall(call);
case POSTFIX:
logger.debug("Postfix");
switch(call.getKind()){
case IS_NOT_NULL:
case IS_NOT_TRUE:
case IS_NOT_FALSE:
case IS_NULL:
case IS_TRUE:
case IS_FALSE:
case OTHER:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, call.getOperands().get(0).accept(this));
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case PREFIX:
logger.debug("Prefix");
LogicalExpression arg = call.getOperands().get(0).accept(this);
switch(call.getKind()){
case NOT:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, arg);
case MINUS_PREFIX:
final RexBuilder builder = input.getCluster().getRexBuilder();
final List<RexNode> operands = Lists.newArrayList();
operands.add(builder.makeExactLiteral(new BigDecimal(-1)));
operands.add(call.getOperands().get(0));
return visitCall((RexCall) builder.makeCall(
SqlStdOperatorTable.MULTIPLY,
operands));
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case SPECIAL:
logger.debug("Special");
switch(call.getKind()){
case CAST:
return getDrillCastFunctionFromOptiq(call);
case LIKE:
case SIMILAR:
return getDrillFunctionFromOptiqCall(call);
case CASE:
List<LogicalExpression> caseArgs = Lists.newArrayList();
for(RexNode r : call.getOperands()){
caseArgs.add(r.accept(this));
}
caseArgs = Lists.reverse(caseArgs);
// number of arguements are always going to be odd, because
// Optiq adds "null" for the missing else expression at the end
assert caseArgs.size()%2 == 1;
LogicalExpression elseExpression = caseArgs.get(0);
for (int i=1; i<caseArgs.size(); i=i+2) {
elseExpression = IfExpression.newBuilder()
.setElse(elseExpression)
.setIfCondition(new IfCondition(caseArgs.get(i + 1), caseArgs.get(i))).build();
}
return elseExpression;
}
if (call.getOperator() == SqlStdOperatorTable.ITEM) {
SchemaPath left = (SchemaPath) call.getOperands().get(0).accept(this);
// Convert expr of item[*, 'abc'] into column expression 'abc'
String rootSegName = left.getRootSegment().getPath();
if (StarColumnHelper.isStarColumn(rootSegName)) {
rootSegName = rootSegName.substring(0, rootSegName.indexOf("*"));
final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
return SchemaPath.getSimplePath(rootSegName + literal.getValue2().toString());
}
final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
switch(literal.getTypeName()){
case DECIMAL:
case INTEGER:
return left.getChild(((BigDecimal)literal.getValue()).intValue());
case CHAR:
return left.getChild(literal.getValue2().toString());
default:
// fall through
}
}
if (call.getOperator() == SqlStdOperatorTable.DATETIME_PLUS) {
return doFunction(call, "+");
}
// fall through
default:
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
}
}
private LogicalExpression doFunction(RexCall call, String funcName) {
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode r : call.getOperands()){
args.add(r.accept(this));
}
if (FunctionCallFactory.isBooleanOperator(funcName)) {
LogicalExpression func = FunctionCallFactory.createBooleanOperator(funcName, args);
return func;
} else {
args = Lists.reverse(args);
LogicalExpression lastArg = args.get(0);
for(int i = 1; i < args.size(); i++){
lastArg = FunctionCallFactory.createExpression(funcName, Lists.newArrayList(args.get(i), lastArg));
}
return lastArg;
}
}
private LogicalExpression doUnknown(RexNode o){
// raise an error
throw UserException.planError().message(UNSUPPORTED_REX_NODE_ERROR +
"RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()).build(logger);
}
@Override
public LogicalExpression visitLocalRef(RexLocalRef localRef) {
return doUnknown(localRef);
}
@Override
public LogicalExpression visitOver(RexOver over) {
return doUnknown(over);
}
@Override
public LogicalExpression visitCorrelVariable(RexCorrelVariable correlVariable) {
return doUnknown(correlVariable);
}
@Override
public LogicalExpression visitDynamicParam(RexDynamicParam dynamicParam) {
return doUnknown(dynamicParam);
}
@Override
public LogicalExpression visitRangeRef(RexRangeRef rangeRef) {
return doUnknown(rangeRef);
}
@Override
public LogicalExpression visitFieldAccess(RexFieldAccess fieldAccess) {
return super.visitFieldAccess(fieldAccess);
}
private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
LogicalExpression arg = call.getOperands().get(0).accept(this);
MajorType castType = null;
switch(call.getType().getSqlTypeName().getName()){
case "VARCHAR":
case "CHAR":
castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
break;
case "INTEGER": castType = Types.required(MinorType.INT); break;
case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
case "DECIMAL":
if (context.getPlannerSettings().getOptions().
getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false ) {
throw UserException
.unsupportedError()
.message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
.build(logger);
}
int precision = call.getType().getPrecision();
int scale = call.getType().getScale();
if (precision <= 9) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
} else if (precision <= 18) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
} else if (precision <= 28) {
// Inject a cast to SPARSE before casting to the dense type.
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
} else if (precision <= 38) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
} else {
throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
}
break;
case "INTERVAL_YEAR_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break;
case "INTERVAL_DAY_TIME": castType = Types.required(MinorType.INTERVALDAY); break;
case "BOOLEAN": castType = Types.required(MinorType.BIT); break;
case "ANY": return arg; // Type will be same as argument.
default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
}
return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
}
private LogicalExpression getDrillFunctionFromOptiqCall(RexCall call) {
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode n : call.getOperands()){
args.add(n.accept(this));
}
int argsSize = args.size();
String functionName = call.getOperator().getName().toLowerCase();
// TODO: once we have more function rewrites and a patter emerges from different rewrites, factor this out in a better fashion
/* Rewrite extract functions in the following manner
* extract(year, date '2008-2-23') ---> extractYear(date '2008-2-23')
*/
if (functionName.equals("extract")) {
// Assert that the first argument to extract is a QuotedString
assert args.get(0) instanceof ValueExpressions.QuotedString;
// Get the unit of time to be extracted
String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value;
switch (timeUnitStr){
case ("YEAR"):
case ("MONTH"):
case ("DAY"):
case ("HOUR"):
case ("MINUTE"):
case ("SECOND"):
String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase();
functionName += functionPostfix;
return FunctionCallFactory.createExpression(functionName, args.subList(1, 2));
default:
throw new UnsupportedOperationException("extract function supports the following time units: YEAR, MONTH, DAY, HOUR, MINUTE, SECOND");
}
} else if (functionName.equals("trim")) {
String trimFunc = null;
List<LogicalExpression> trimArgs = Lists.newArrayList();
assert args.get(0) instanceof ValueExpressions.QuotedString;
switch (((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase()) {
case "LEADING":
trimFunc = "ltrim";
break;
case "TRAILING":
trimFunc = "rtrim";
break;
case "BOTH":
trimFunc = "btrim";
break;
default:
assert 1 == 0;
}
trimArgs.add(args.get(2));
trimArgs.add(args.get(1));
return FunctionCallFactory.createExpression(trimFunc, trimArgs);
} else if (functionName.equals("ltrim") || functionName.equals("rtrim") || functionName.equals("btrim")) {
if (argsSize == 1) {
args.add(ValueExpressions.getChar(" "));
}
return FunctionCallFactory.createExpression(functionName, args);
} else if (functionName.equals("date_part")) {
// Rewrite DATE_PART functions as extract functions
// assert that the function has exactly two arguments
assert argsSize == 2;
/* Based on the first input to the date_part function we rewrite the function as the
* appropriate extract function. For example
* date_part('year', date '2008-2-23') ------> extractYear(date '2008-2-23')
*/
assert args.get(0) instanceof QuotedString;
QuotedString extractString = (QuotedString) args.get(0);
String functionPostfix = extractString.value.substring(0, 1).toUpperCase() + extractString.value.substring(1).toLowerCase();
return FunctionCallFactory.createExpression("extract" + functionPostfix, args.subList(1, 2));
} else if (functionName.equals("concat")) {
if (argsSize == 1) {
/*
* We treat concat with one argument as a special case. Since we don't have a function
* implementation of concat that accepts one argument. We simply add another dummy argument
* (empty string literal) to the list of arguments.
*/
List<LogicalExpression> concatArgs = new LinkedList<>(args);
concatArgs.add(new QuotedString("", ExpressionPosition.UNKNOWN));
return FunctionCallFactory.createExpression(functionName, concatArgs);
} else if (argsSize > 2) {
List<LogicalExpression> concatArgs = Lists.newArrayList();
/* stack concat functions on top of each other if we have more than two arguments
* Eg: concat(col1, col2, col3) => concat(concat(col1, col2), col3)
*/
concatArgs.add(args.get(0));
concatArgs.add(args.get(1));
LogicalExpression first = FunctionCallFactory.createExpression(functionName, concatArgs);
for (int i = 2; i < argsSize; i++) {
concatArgs = Lists.newArrayList();
concatArgs.add(first);
concatArgs.add(args.get(i));
first = FunctionCallFactory.createExpression(functionName, concatArgs);
}
return first;
}
} else if (functionName.equals("length")) {
if (argsSize == 2) {
// Second argument should always be a literal specifying the encoding format
assert args.get(1) instanceof ValueExpressions.QuotedString;
String encodingType = ((ValueExpressions.QuotedString) args.get(1)).value;
functionName += encodingType.substring(0, 1).toUpperCase() + encodingType.substring(1).toLowerCase();
return FunctionCallFactory.createExpression(functionName, args.subList(0, 1));
}
} else if ((functionName.equals("convert_from") || functionName.equals("convert_to"))
&& args.get(1) instanceof QuotedString) {
return FunctionCallFactory.createConvert(functionName, ((QuotedString)args.get(1)).value, args.get(0), ExpressionPosition.UNKNOWN);
} else if ((functionName.equalsIgnoreCase("rpad")) || functionName.equalsIgnoreCase("lpad")) {
// If we have only two arguments for rpad/lpad append a default QuotedExpression as an argument which will be used to pad the string
if (argsSize == 2) {
String spaceFill = " ";
LogicalExpression fill = ValueExpressions.getChar(spaceFill);
args.add(fill);
}
}
return FunctionCallFactory.createExpression(functionName, args);
}
@Override
public LogicalExpression visitLiteral(RexLiteral literal) {
switch(literal.getType().getSqlTypeName()){
case BIGINT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIGINT);
}
long l = (((BigDecimal) literal.getValue()).setScale(0, BigDecimal.ROUND_HALF_UP)).longValue();
return ValueExpressions.getBigInt(l);
case BOOLEAN:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIT);
}
return ValueExpressions.getBit(((Boolean) literal.getValue()));
case CHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case DOUBLE:
if (isLiteralNull(literal)){
return createNullExpr(MinorType.FLOAT8);
}
double d = ((BigDecimal) literal.getValue()).doubleValue();
return ValueExpressions.getFloat8(d);
case FLOAT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT4);
}
float f = ((BigDecimal) literal.getValue()).floatValue();
return ValueExpressions.getFloat4(f);
case INTEGER:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INT);
}
int a = (((BigDecimal) literal.getValue()).setScale(0, BigDecimal.ROUND_HALF_UP)).intValue();
return ValueExpressions.getInt(a);
case DECIMAL:
/* TODO: Enable using Decimal literals once we have more functions implemented for Decimal
* For now continue using Double instead of decimals
int precision = ((BigDecimal) literal.getValue()).precision();
if (precision <= 9) {
return ValueExpressions.getDecimal9((BigDecimal)literal.getValue());
} else if (precision <= 18) {
return ValueExpressions.getDecimal18((BigDecimal)literal.getValue());
} else if (precision <= 28) {
return ValueExpressions.getDecimal28((BigDecimal)literal.getValue());
} else if (precision <= 38) {
return ValueExpressions.getDecimal38((BigDecimal)literal.getValue());
} */
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT8);
}
double dbl = ((BigDecimal) literal.getValue()).doubleValue();
logger.warn("Converting exact decimal into approximate decimal. Should be fixed once decimal is implemented.");
return ValueExpressions.getFloat8(dbl);
case VARCHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case SYMBOL:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(literal.getValue().toString());
case DATE:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.DATE);
}
return (ValueExpressions.getDate((GregorianCalendar)literal.getValue()));
case TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIME);
}
return (ValueExpressions.getTime((GregorianCalendar)literal.getValue()));
case TIMESTAMP:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIMESTAMP);
}
return (ValueExpressions.getTimeStamp((GregorianCalendar) literal.getValue()));
case INTERVAL_YEAR_MONTH:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALYEAR);
}
return (ValueExpressions.getIntervalYear(((BigDecimal) (literal.getValue())).intValue()));
case INTERVAL_DAY_TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALDAY);
}
return (ValueExpressions.getIntervalDay(((BigDecimal) (literal.getValue())).longValue()));
case NULL:
return NullExpression.INSTANCE;
case ANY:
if (isLiteralNull(literal)) {
return NullExpression.INSTANCE;
}
default:
throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Drill constant expression.", literal, literal.getType().getSqlTypeName()));
}
}
}
private static final TypedNullConstant createNullExpr(MinorType type) {
return new TypedNullConstant(Types.optional(type));
}
public static boolean isLiteralNull(RexLiteral literal) {
return literal.getTypeName().getName().equals("NULL");
}
}
| |
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jdbc.object;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.HashMap;
import java.util.Map;
import org.easymock.MockControl;
import org.springframework.core.JdkVersion;
import org.springframework.jdbc.AbstractJdbcTests;
import org.springframework.jdbc.JdbcUpdateAffectedIncorrectNumberOfRowsException;
import org.springframework.jdbc.core.SqlParameter;
import org.springframework.jdbc.support.GeneratedKeyHolder;
import org.springframework.jdbc.support.KeyHolder;
/**
* @author Trevor Cook
* @author Thomas Risberg
* @author Juergen Hoeller
*/
public class SqlUpdateTests extends AbstractJdbcTests {
private static final String UPDATE =
"update seat_status set booking_id = null";
private static final String UPDATE_INT =
"update seat_status set booking_id = null where performance_id = ?";
private static final String UPDATE_INT_INT =
"update seat_status set booking_id = null where performance_id = ? and price_band_id = ?";
private static final String UPDATE_NAMED_PARAMETERS =
"update seat_status set booking_id = null where performance_id = :perfId and price_band_id = :priceId";
private static final String UPDATE_STRING =
"update seat_status set booking_id = null where name = ?";
private static final String UPDATE_OBJECTS =
"update seat_status set booking_id = null where performance_id = ? and price_band_id = ? and name = ? and confirmed = ?";
private static final String INSERT_GENERATE_KEYS =
"insert into show (name) values(?)";
private MockControl ctrlPreparedStatement;
private PreparedStatement mockPreparedStatement;
private MockControl ctrlResultSet;
private ResultSet mockResultSet;
private MockControl ctrlResultSetMetaData;
private ResultSetMetaData mockResultSetMetaData;
protected void setUp() throws Exception {
super.setUp();
ctrlPreparedStatement = MockControl.createControl(PreparedStatement.class);
mockPreparedStatement = (PreparedStatement) ctrlPreparedStatement.getMock();
}
protected void tearDown() throws Exception {
super.tearDown();
if (shouldVerify()) {
ctrlPreparedStatement.verify();
}
}
protected void replay() {
super.replay();
ctrlPreparedStatement.replay();
}
public void testUpdate() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
Updater pc = new Updater();
int rowsAffected = pc.run();
assertEquals(1, rowsAffected);
}
public void testUpdateInt() throws SQLException {
mockPreparedStatement.setObject(1, new Integer(1), Types.NUMERIC);
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_INT);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
IntUpdater pc = new IntUpdater();
int rowsAffected = pc.run(1);
assertEquals(1, rowsAffected);
}
public void testUpdateIntInt() throws SQLException {
mockPreparedStatement.setObject(1, new Integer(1), Types.NUMERIC);
mockPreparedStatement.setObject(2, new Integer(1), Types.NUMERIC);
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_INT_INT);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
IntIntUpdater pc = new IntIntUpdater();
int rowsAffected = pc.run(1, 1);
assertEquals(1, rowsAffected);
}
public void testNamedParameterUpdateWithUnnamedDeclarations() throws SQLException {
doTestNamedParameterUpdate(false);
}
public void testNamedParameterUpdateWithNamedDeclarations() throws SQLException {
doTestNamedParameterUpdate(true);
}
private void doTestNamedParameterUpdate(final boolean namedDeclarations) throws SQLException {
mockPreparedStatement.setObject(1, new Integer(1), Types.NUMERIC);
mockPreparedStatement.setObject(2, new Integer(1), Types.DECIMAL);
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_INT_INT);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
class NamedParameterUpdater extends SqlUpdate {
public NamedParameterUpdater() {
setSql(UPDATE_NAMED_PARAMETERS);
setDataSource(mockDataSource);
if (namedDeclarations) {
declareParameter(new SqlParameter("priceId", Types.DECIMAL));
declareParameter(new SqlParameter("perfId", Types.NUMERIC));
}
else {
declareParameter(new SqlParameter(Types.NUMERIC));
declareParameter(new SqlParameter(Types.DECIMAL));
}
compile();
}
public int run(int performanceId, int type) {
Map params = new HashMap();
params.put("perfId", new Integer(performanceId));
params.put("priceId", new Integer(type));
return updateByNamedParam(params);
}
}
NamedParameterUpdater pc = new NamedParameterUpdater();
int rowsAffected = pc.run(1, 1);
assertEquals(1, rowsAffected);
}
public void testUpdateString() throws SQLException {
mockPreparedStatement.setString(1, "rod");
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_STRING);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
StringUpdater pc = new StringUpdater();
int rowsAffected = pc.run("rod");
assertEquals(1, rowsAffected);
}
public void testUpdateMixed() throws SQLException {
mockPreparedStatement.setObject(1, new Integer(1), Types.NUMERIC);
mockPreparedStatement.setObject(2, new Integer(1), Types.NUMERIC, 2);
mockPreparedStatement.setString(3, "rod");
mockPreparedStatement.setObject(4, Boolean.TRUE, Types.BOOLEAN);
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_OBJECTS);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
MixedUpdater pc = new MixedUpdater();
int rowsAffected = pc.run(1, 1, "rod", true);
assertEquals(1, rowsAffected);
}
public void testUpdateAndGeneratedKeys() throws SQLException {
ctrlResultSetMetaData = MockControl.createControl(ResultSetMetaData.class);
mockResultSetMetaData = (ResultSetMetaData) ctrlResultSetMetaData.getMock();
mockResultSetMetaData.getColumnCount();
ctrlResultSetMetaData.setReturnValue(1);
mockResultSetMetaData.getColumnLabel(1);
ctrlResultSetMetaData.setReturnValue("1", 2);
ctrlResultSet = MockControl.createControl(ResultSet.class);
mockResultSet = (ResultSet) ctrlResultSet.getMock();
mockResultSet.getMetaData();
ctrlResultSet.setReturnValue(mockResultSetMetaData);
mockResultSet.next();
ctrlResultSet.setReturnValue(true);
mockResultSet.getObject(1);
ctrlResultSet.setReturnValue(new Integer(11));
mockResultSet.next();
ctrlResultSet.setReturnValue(false);
mockResultSet.close();
ctrlResultSet.setVoidCallable();
mockPreparedStatement.setString(1, "rod");
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getGeneratedKeys();
ctrlPreparedStatement.setReturnValue(mockResultSet);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(INSERT_GENERATE_KEYS, PreparedStatement.RETURN_GENERATED_KEYS);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
ctrlResultSet.replay();
ctrlResultSetMetaData.replay();
GeneratedKeysUpdater pc = new GeneratedKeysUpdater();
KeyHolder generatedKeyHolder = new GeneratedKeyHolder();
int rowsAffected = pc.run("rod", generatedKeyHolder);
assertEquals(1, rowsAffected);
assertEquals(1, generatedKeyHolder.getKeyList().size());
assertEquals(11, generatedKeyHolder.getKey().intValue());
}
public void testUpdateConstructor() throws SQLException {
mockPreparedStatement.setObject(1, new Integer(1), Types.NUMERIC);
mockPreparedStatement.setObject(2, new Integer(1), Types.NUMERIC);
mockPreparedStatement.setString(3, "rod");
mockPreparedStatement.setObject(4, Boolean.TRUE, Types.BOOLEAN);
ctrlPreparedStatement.setVoidCallable();
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(1);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE_OBJECTS);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
ConstructorUpdater pc = new ConstructorUpdater();
int rowsAffected = pc.run(1, 1, "rod", true);
assertEquals(1, rowsAffected);
}
public void testUnderMaxRows() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(3);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
MaxRowsUpdater pc = new MaxRowsUpdater();
int rowsAffected = pc.run();
assertEquals(3, rowsAffected);
}
public void testMaxRows() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(5);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
MaxRowsUpdater pc = new MaxRowsUpdater();
int rowsAffected = pc.run();
assertEquals(5, rowsAffected);
}
public void testOverMaxRows() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(8);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
MaxRowsUpdater pc = new MaxRowsUpdater();
try {
int rowsAffected = pc.run();
fail("Shouldn't continue when too many rows affected");
}
catch (JdbcUpdateAffectedIncorrectNumberOfRowsException juaicrex) {
// OK
}
}
public void testRequiredRows() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(3);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
RequiredRowsUpdater pc = new RequiredRowsUpdater();
int rowsAffected = pc.run();
assertEquals(3, rowsAffected);
}
public void testNotRequiredRows() throws SQLException {
mockPreparedStatement.executeUpdate();
ctrlPreparedStatement.setReturnValue(2);
mockPreparedStatement.getWarnings();
ctrlPreparedStatement.setReturnValue(null);
mockPreparedStatement.close();
ctrlPreparedStatement.setVoidCallable();
mockConnection.prepareStatement(UPDATE);
ctrlConnection.setReturnValue(mockPreparedStatement);
replay();
RequiredRowsUpdater pc = new RequiredRowsUpdater();
try {
int rowsAffected = pc.run();
fail("Shouldn't continue when too many rows affected");
}
catch (JdbcUpdateAffectedIncorrectNumberOfRowsException juaicrex) {
// OK
}
}
private class Updater extends SqlUpdate {
public Updater() {
setSql(UPDATE);
setDataSource(mockDataSource);
compile();
}
public int run() {
return update();
}
}
private class IntUpdater extends SqlUpdate {
public IntUpdater() {
setSql(UPDATE_INT);
setDataSource(mockDataSource);
declareParameter(new SqlParameter(Types.NUMERIC));
compile();
}
public int run(int performanceId) {
return update(performanceId);
}
}
private class IntIntUpdater extends SqlUpdate {
public IntIntUpdater() {
setSql(UPDATE_INT_INT);
setDataSource(mockDataSource);
declareParameter(new SqlParameter(Types.NUMERIC));
declareParameter(new SqlParameter(Types.NUMERIC));
compile();
}
public int run(int performanceId, int type) {
return update(performanceId, type);
}
}
private class StringUpdater extends SqlUpdate {
public StringUpdater() {
setSql(UPDATE_STRING);
setDataSource(mockDataSource);
declareParameter(new SqlParameter(Types.VARCHAR));
compile();
}
public int run(String name) {
return update(name);
}
}
private class MixedUpdater extends SqlUpdate {
public MixedUpdater() {
setSql(UPDATE_OBJECTS);
setDataSource(mockDataSource);
declareParameter(new SqlParameter(Types.NUMERIC));
declareParameter(new SqlParameter(Types.NUMERIC, 2));
declareParameter(new SqlParameter(Types.VARCHAR));
declareParameter(new SqlParameter(Types.BOOLEAN));
compile();
}
public int run(int performanceId, int type, String name, boolean confirmed) {
Object[] params =
new Object[] {new Integer(performanceId), new Integer(type), name,
new Boolean(confirmed)};
return update(params);
}
}
private class GeneratedKeysUpdater extends SqlUpdate {
public GeneratedKeysUpdater() {
setSql(INSERT_GENERATE_KEYS);
setDataSource(mockDataSource);
declareParameter(new SqlParameter(Types.VARCHAR));
setReturnGeneratedKeys(true);
compile();
}
public int run(String name, KeyHolder generatedKeyHolder) {
Object[] params = new Object[] {name};
return update(params, generatedKeyHolder);
}
}
private class ConstructorUpdater extends SqlUpdate {
public ConstructorUpdater() {
super(mockDataSource, UPDATE_OBJECTS,
new int[] {Types.NUMERIC, Types.NUMERIC, Types.VARCHAR, Types.BOOLEAN });
compile();
}
public int run(int performanceId, int type, String name, boolean confirmed) {
Object[] params =
new Object[] {
new Integer(performanceId), new Integer(type), name, new Boolean(confirmed)};
return update(params);
}
}
private class MaxRowsUpdater extends SqlUpdate {
public MaxRowsUpdater() {
setSql(UPDATE);
setDataSource(mockDataSource);
setMaxRowsAffected(5);
compile();
}
public int run() {
return update();
}
}
private class RequiredRowsUpdater extends SqlUpdate {
public RequiredRowsUpdater() {
setSql(UPDATE);
setDataSource(mockDataSource);
setRequiredRowsAffected(3);
compile();
}
public int run() {
return update();
}
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings;
import org.xmlpull.v1.XmlPullParserException;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ListFragment;
import android.app.admin.DeviceAdminInfo;
import android.app.admin.DeviceAdminReceiver;
import android.app.admin.DevicePolicyManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.UserHandle;
import android.os.UserManager;
import android.util.Log;
import android.util.SparseArray;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class DeviceAdminSettings extends ListFragment {
static final String TAG = "DeviceAdminSettings";
private DevicePolicyManager mDPM;
private UserManager mUm;
/**
* Internal collection of device admin info objects for all profiles associated with the current
* user.
*/
private final SparseArray<ArrayList<DeviceAdminInfo>>
mAdminsByProfile = new SparseArray<ArrayList<DeviceAdminInfo>>();
private String mDeviceOwnerPkg;
private SparseArray<ComponentName> mProfileOwnerComponents = new SparseArray<ComponentName>();
private final BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// Refresh the list, if state change has been received. It could be that checkboxes
// need to be updated
if (DevicePolicyManager.ACTION_DEVICE_POLICY_MANAGER_STATE_CHANGED.equals(
intent.getAction())) {
updateList();
}
}
};
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDPM = (DevicePolicyManager) getActivity().getSystemService(Context.DEVICE_POLICY_SERVICE);
mUm = (UserManager) getActivity().getSystemService(Context.USER_SERVICE);
return inflater.inflate(R.layout.device_admin_settings, container, false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Utils.forceCustomPadding(getListView(), true /* additive padding */);
}
@Override
public void onResume() {
super.onResume();
IntentFilter filter = new IntentFilter();
filter.addAction(DevicePolicyManager.ACTION_DEVICE_POLICY_MANAGER_STATE_CHANGED);
getActivity().registerReceiverAsUser(
mBroadcastReceiver, UserHandle.ALL, filter, null, null);
mDeviceOwnerPkg = mDPM.getDeviceOwner();
if (mDeviceOwnerPkg != null && !mDPM.isDeviceOwner(mDeviceOwnerPkg)) {
mDeviceOwnerPkg = null;
}
mProfileOwnerComponents.clear();
final List<UserHandle> profiles = mUm.getUserProfiles();
final int profilesSize = profiles.size();
for (int i = 0; i < profilesSize; ++i) {
final int profileId = profiles.get(i).getIdentifier();
mProfileOwnerComponents.put(profileId, mDPM.getProfileOwnerAsUser(profileId));
}
updateList();
}
@Override
public void onPause() {
getActivity().unregisterReceiver(mBroadcastReceiver);
super.onPause();
}
/**
* Update the internal collection of available admins for all profiles associated with the
* current user.
*/
void updateList() {
mAdminsByProfile.clear();
final List<UserHandle> profiles = mUm.getUserProfiles();
final int profilesSize = profiles.size();
for (int i = 0; i < profilesSize; ++i) {
final int profileId = profiles.get(i).getIdentifier();
updateAvailableAdminsForProfile(profileId);
}
getListView().setAdapter(new PolicyListAdapter());
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
Object o = l.getAdapter().getItem(position);
if (!(o instanceof DeviceAdminInfo)) {
// race conditions may cause this
return;
}
DeviceAdminInfo dpi = (DeviceAdminInfo) o;
final Activity activity = getActivity();
final int userId = getUserId(dpi);
if (userId == UserHandle.myUserId() || !isProfileOwner(dpi)) {
Intent intent = new Intent();
intent.setClass(activity, DeviceAdminAdd.class);
intent.putExtra(DevicePolicyManager.EXTRA_DEVICE_ADMIN, dpi.getComponent());
activity.startActivityAsUser(intent, new UserHandle(userId));
} else {
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setMessage(getString(R.string.managed_profile_device_admin_info,
dpi.loadLabel(activity.getPackageManager())));
builder.setPositiveButton(android.R.string.ok, null);
builder.create().show();
}
}
static class ViewHolder {
ImageView icon;
TextView name;
CheckBox checkbox;
TextView description;
}
class PolicyListAdapter extends BaseAdapter {
final LayoutInflater mInflater;
PolicyListAdapter() {
mInflater = (LayoutInflater)
getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
@Override
public boolean hasStableIds() {
return false;
}
@Override
public int getCount() {
int adminCount = 0;
final int profileCount = mAdminsByProfile.size();
for (int i = 0; i < profileCount; ++i) {
adminCount += mAdminsByProfile.valueAt(i).size();
}
// Add 'profileCount' for title items.
return adminCount + profileCount;
}
/**
* The item for the given position in the list.
*
* @return a String object for title items and a DeviceAdminInfo object for actual device
* admins.
*/
@Override
public Object getItem(int position) {
if (position < 0) {
throw new ArrayIndexOutOfBoundsException();
}
// The position of the item in the list of admins.
// We start from the given position and discount the length of the upper lists until we
// get the one for the right profile
int adminPosition = position;
final int n = mAdminsByProfile.size();
int i = 0;
for (; i < n; ++i) {
// The elements in that section including the title item (that's why adding one).
final int listSize = mAdminsByProfile.valueAt(i).size() + 1;
if (adminPosition < listSize) {
break;
}
adminPosition -= listSize;
}
if (i == n) {
throw new ArrayIndexOutOfBoundsException();
}
// If countdown == 0 that means the title item
if (adminPosition == 0) {
Resources res = getActivity().getResources();
if (mAdminsByProfile.keyAt(i) == UserHandle.myUserId()) {
return res.getString(R.string.personal_device_admin_title);
} else {
return res.getString(R.string.managed_device_admin_title);
}
} else {
// Subtracting one for the title.
return mAdminsByProfile.valueAt(i).get(adminPosition - 1);
}
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public boolean areAllItemsEnabled() {
return false;
}
/**
* See {@link #getItemViewType} for the view types.
*/
@Override
public int getViewTypeCount() {
return 2;
}
/**
* Returns 1 for title items and 0 for anything else.
*/
@Override
public int getItemViewType(int position) {
Object o = getItem(position);
return (o instanceof String) ? 1 : 0;
}
@Override
public boolean isEnabled(int position) {
Object o = getItem(position);
return isEnabled(o);
}
private boolean isEnabled(Object o) {
if (!(o instanceof DeviceAdminInfo)) {
// Title item
return false;
}
DeviceAdminInfo info = (DeviceAdminInfo) o;
if (isActiveAdmin(info) && getUserId(info) == UserHandle.myUserId()
&& (isDeviceOwner(info) || isProfileOwner(info))) {
return false;
}
// Disable item if admin is being removed
if (isRemovingAdmin(info)) {
return false;
}
return true;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
Object o = getItem(position);
if (o instanceof DeviceAdminInfo) {
if (convertView == null) {
convertView = newDeviceAdminView(parent);
}
bindView(convertView, (DeviceAdminInfo) o);
} else {
if (convertView == null) {
convertView = newTitleView(parent);
}
final TextView title = (TextView) convertView.findViewById(android.R.id.title);
title.setText((String)o);
}
return convertView;
}
private View newDeviceAdminView(ViewGroup parent) {
View v = mInflater.inflate(R.layout.device_admin_item, parent, false);
ViewHolder h = new ViewHolder();
h.icon = (ImageView)v.findViewById(R.id.icon);
h.name = (TextView)v.findViewById(R.id.name);
h.checkbox = (CheckBox)v.findViewById(R.id.checkbox);
h.description = (TextView)v.findViewById(R.id.description);
v.setTag(h);
return v;
}
private View newTitleView(ViewGroup parent) {
final TypedArray a = mInflater.getContext().obtainStyledAttributes(null,
com.android.internal.R.styleable.Preference,
com.android.internal.R.attr.preferenceCategoryStyle, 0);
final int resId = a.getResourceId(com.android.internal.R.styleable.Preference_layout,
0);
return mInflater.inflate(resId, parent, false);
}
private void bindView(View view, DeviceAdminInfo item) {
final Activity activity = getActivity();
ViewHolder vh = (ViewHolder) view.getTag();
Drawable activityIcon = item.loadIcon(activity.getPackageManager());
Drawable badgedIcon = activity.getPackageManager().getUserBadgedIcon(
activityIcon, new UserHandle(getUserId(item)));
vh.icon.setImageDrawable(badgedIcon);
vh.name.setText(item.loadLabel(activity.getPackageManager()));
vh.checkbox.setChecked(isActiveAdmin(item));
final boolean enabled = isEnabled(item);
try {
vh.description.setText(item.loadDescription(activity.getPackageManager()));
} catch (Resources.NotFoundException e) {
}
vh.checkbox.setEnabled(enabled);
vh.name.setEnabled(enabled);
vh.name.setSelected(true);
vh.description.setEnabled(enabled);
vh.icon.setEnabled(enabled);
}
}
private boolean isDeviceOwner(DeviceAdminInfo item) {
return getUserId(item) == UserHandle.myUserId()
&& item.getPackageName().equals(mDeviceOwnerPkg);
}
private boolean isProfileOwner(DeviceAdminInfo item) {
ComponentName profileOwner = mProfileOwnerComponents.get(getUserId(item));
return item.getComponent().equals(profileOwner);
}
private boolean isActiveAdmin(DeviceAdminInfo item) {
return mDPM.isAdminActiveAsUser(item.getComponent(), getUserId(item));
}
private boolean isRemovingAdmin(DeviceAdminInfo item) {
return mDPM.isRemovingAdmin(item.getComponent(), getUserId(item));
}
/**
* Add device admins to the internal collection that belong to a profile.
*
* @param profileId the profile identifier.
*/
private void updateAvailableAdminsForProfile(final int profileId) {
// We are adding the union of two sets 'A' and 'B' of device admins to mAvailableAdmins.
// Set 'A' is the set of active admins for the profile whereas set 'B' is the set of
// listeners to DeviceAdminReceiver.ACTION_DEVICE_ADMIN_ENABLED for the profile.
// Add all of set 'A' to mAvailableAdmins.
List<ComponentName> activeAdminsListForProfile = mDPM.getActiveAdminsAsUser(profileId);
addActiveAdminsForProfile(activeAdminsListForProfile, profileId);
// Collect set 'B' and add B-A to mAvailableAdmins.
addDeviceAdminBroadcastReceiversForProfile(activeAdminsListForProfile, profileId);
}
/**
* Add a profile's device admins that are receivers of
* {@code DeviceAdminReceiver.ACTION_DEVICE_ADMIN_ENABLED} to the internal collection if they
* haven't been added yet.
*
* @param alreadyAddedComponents the set of active admin component names. Receivers of
* {@code DeviceAdminReceiver.ACTION_DEVICE_ADMIN_ENABLED} whose component is in this
* set are not added to the internal collection again.
* @param profileId the identifier of the profile
*/
private void addDeviceAdminBroadcastReceiversForProfile(
Collection<ComponentName> alreadyAddedComponents, final int profileId) {
final PackageManager pm = getActivity().getPackageManager();
List<ResolveInfo> enabledForProfile = pm.queryBroadcastReceivers(
new Intent(DeviceAdminReceiver.ACTION_DEVICE_ADMIN_ENABLED),
PackageManager.GET_META_DATA | PackageManager.GET_DISABLED_UNTIL_USED_COMPONENTS,
profileId);
if (enabledForProfile == null) {
enabledForProfile = Collections.emptyList();
}
final int n = enabledForProfile.size();
ArrayList<DeviceAdminInfo> deviceAdmins = mAdminsByProfile.get(profileId);
if (deviceAdmins == null) {
deviceAdmins = new ArrayList<DeviceAdminInfo>(n);
}
for (int i = 0; i < n; ++i) {
ResolveInfo resolveInfo = enabledForProfile.get(i);
ComponentName riComponentName =
new ComponentName(resolveInfo.activityInfo.packageName,
resolveInfo.activityInfo.name);
if (alreadyAddedComponents == null
|| !alreadyAddedComponents.contains(riComponentName)) {
DeviceAdminInfo deviceAdminInfo = createDeviceAdminInfo(resolveInfo);
// add only visible ones (note: active admins are added regardless of visibility)
if (deviceAdminInfo != null && deviceAdminInfo.isVisible()) {
deviceAdmins.add(deviceAdminInfo);
}
}
}
if (!deviceAdmins.isEmpty()) {
mAdminsByProfile.put(profileId, deviceAdmins);
}
}
/**
* Add a {@link DeviceAdminInfo} object to the internal collection of available admins for all
* active admin components associated with a profile.
*
* @param profileId a profile identifier.
*/
private void addActiveAdminsForProfile(final List<ComponentName> activeAdmins,
final int profileId) {
if (activeAdmins != null) {
final PackageManager packageManager = getActivity().getPackageManager();
final int n = activeAdmins.size();
ArrayList<DeviceAdminInfo> deviceAdmins = new ArrayList<DeviceAdminInfo>(n);
for (int i = 0; i < n; ++i) {
ComponentName activeAdmin = activeAdmins.get(i);
List<ResolveInfo> resolved = packageManager.queryBroadcastReceivers(
new Intent().setComponent(activeAdmin), PackageManager.GET_META_DATA
| PackageManager.GET_DISABLED_UNTIL_USED_COMPONENTS, profileId);
if (resolved != null) {
final int resolvedMax = resolved.size();
for (int j = 0; j < resolvedMax; ++j) {
DeviceAdminInfo deviceAdminInfo = createDeviceAdminInfo(resolved.get(j));
if (deviceAdminInfo != null) {
deviceAdmins.add(deviceAdminInfo);
}
}
}
}
if (!deviceAdmins.isEmpty()) {
mAdminsByProfile.put(profileId, deviceAdmins);
}
}
}
/**
* Creates a device admin info object for the resolved intent that points to the component of
* the device admin.
*
* @param resolved resolved intent.
* @return new {@link DeviceAdminInfo} object or null if there was an error.
*/
private DeviceAdminInfo createDeviceAdminInfo(ResolveInfo resolved) {
try {
return new DeviceAdminInfo(getActivity(), resolved);
} catch (XmlPullParserException e) {
Log.w(TAG, "Skipping " + resolved.activityInfo, e);
} catch (IOException e) {
Log.w(TAG, "Skipping " + resolved.activityInfo, e);
}
return null;
}
/**
* Extracts the user id from a device admin info object.
* @param adminInfo the device administrator info.
* @return identifier of the user associated with the device admin.
*/
private int getUserId(DeviceAdminInfo adminInfo) {
return UserHandle.getUserId(adminInfo.getActivityInfo().applicationInfo.uid);
}
}
| |
package org.hisp.dhis.dxf2.events.relationship;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.common.IllegalQueryException;
import org.hisp.dhis.commons.collection.ListUtils;
import org.hisp.dhis.dbms.DbmsManager;
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.events.RelationshipParams;
import org.hisp.dhis.dxf2.events.TrackedEntityInstanceParams;
import org.hisp.dhis.dxf2.events.TrackerAccessManager;
import org.hisp.dhis.dxf2.events.enrollment.Enrollment;
import org.hisp.dhis.dxf2.events.enrollment.EnrollmentService;
import org.hisp.dhis.dxf2.events.event.Event;
import org.hisp.dhis.dxf2.events.event.EventService;
import org.hisp.dhis.dxf2.events.trackedentity.Relationship;
import org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstanceService;
import org.hisp.dhis.dxf2.importsummary.ImportConflict;
import org.hisp.dhis.dxf2.importsummary.ImportStatus;
import org.hisp.dhis.dxf2.importsummary.ImportSummaries;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.hisp.dhis.program.ProgramInstance;
import org.hisp.dhis.program.ProgramStageInstance;
import org.hisp.dhis.query.Query;
import org.hisp.dhis.query.QueryService;
import org.hisp.dhis.query.Restrictions;
import org.hisp.dhis.relationship.RelationshipConstraint;
import org.hisp.dhis.relationship.RelationshipEntity;
import org.hisp.dhis.relationship.RelationshipItem;
import org.hisp.dhis.relationship.RelationshipType;
import org.hisp.dhis.schema.SchemaService;
import org.hisp.dhis.system.util.DateUtils;
import org.hisp.dhis.trackedentity.TrackedEntityInstance;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.user.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.hisp.dhis.relationship.RelationshipEntity.*;
@Transactional
public abstract class AbstractRelationshipService
implements RelationshipService
{
@Autowired
protected DbmsManager dbmsManager;
@Autowired
private CurrentUserService currentUserService;
@Autowired
private SchemaService schemaService;
@Autowired
private QueryService queryService;
@Autowired
private TrackerAccessManager trackerAccessManager;
@Autowired
private org.hisp.dhis.relationship.RelationshipService relationshipService;
@Autowired
private TrackedEntityInstanceService trackedEntityInstanceService;
@Autowired
private EnrollmentService enrollmentService;
@Autowired
private EventService eventService;
@Autowired
private org.hisp.dhis.trackedentity.TrackedEntityInstanceService teiDaoService;
@Autowired
private UserService userService;
private HashMap<String, RelationshipType> relationshipTypeCache = new HashMap<>();
private HashMap<String, TrackedEntityInstance> trackedEntityInstanceCache = new HashMap<>();
private HashMap<String, ProgramInstance> programInstanceCache = new HashMap<>();
private HashMap<String, ProgramStageInstance> programStageInstanceCache = new HashMap<>();
@Override
public List<Relationship> getRelationshipsByTrackedEntityInstance(
TrackedEntityInstance tei, boolean skipAccessValidation )
{
User user = currentUserService.getCurrentUser();
return relationshipService.getRelationshipsByTrackedEntityInstance( tei, skipAccessValidation ).stream()
.map( mapDaoToDto( user ) ).collect( Collectors.toList() );
}
@Override
public List<Relationship> getRelationshipsByProgramInstance( ProgramInstance pi, boolean skipAccessValidation )
{
User user = currentUserService.getCurrentUser();
return relationshipService.getRelationshipsByProgramInstance( pi, skipAccessValidation ).stream()
.map( mapDaoToDto( user ) ).collect( Collectors.toList() );
}
@Override
public List<Relationship> getRelationshipsByProgramStageInstance( ProgramStageInstance psi,
boolean skipAccessValidation )
{
User user = currentUserService.getCurrentUser();
return relationshipService.getRelationshipsByProgramStageInstance( psi, skipAccessValidation ).stream()
.map( mapDaoToDto( user ) ).collect( Collectors.toList() );
}
@Override
public ImportSummaries addRelationships( List<Relationship> relationships, ImportOptions importOptions )
{
List<List<Relationship>> partitions = Lists.partition( relationships, FLUSH_FREQUENCY );
importOptions = updateImportOptions( importOptions );
ImportSummaries importSummaries = new ImportSummaries();
for ( List<Relationship> _relationships : partitions )
{
reloadUser( importOptions );
prepareCaches( _relationships, importOptions.getUser() );
for ( Relationship relationship : _relationships )
{
importSummaries.addImportSummary( addRelationship( relationship, importOptions ) );
}
clearSession();
}
return importSummaries;
}
@Override
public ImportSummary addRelationship( Relationship relationship, ImportOptions importOptions )
{
ImportSummary importSummary = new ImportSummary( relationship.getRelationship() );
Set<ImportConflict> importConflicts = new HashSet<>();
importOptions = updateImportOptions( importOptions );
// Set up cache if not set already
if ( !cacheExists() )
{
prepareCaches( Lists.newArrayList( relationship ), importOptions.getUser() );
}
if ( relationshipService.relationshipExists( relationship.getRelationship() ) )
{
String message = "Relationship " + relationship.getRelationship() +
" already exists";
return new ImportSummary( ImportStatus.ERROR, message )
.setReference( relationship.getRelationship() )
.incrementIgnored();
}
importConflicts.addAll( checkRelationship( relationship, importOptions ) );
if ( !importConflicts.isEmpty() )
{
importSummary.setConflicts( importConflicts );
importSummary.setStatus( ImportStatus.ERROR );
importSummary.getImportCount().incrementIgnored();
return importSummary;
}
org.hisp.dhis.relationship.Relationship daoRelationship = createDAORelationship(
relationship, importOptions, importSummary );
if ( daoRelationship == null )
{
return importSummary;
}
// Check access for both sides
List<String> errors = trackerAccessManager.canWrite( importOptions.getUser(), daoRelationship );
if ( !errors.isEmpty() )
{
return new ImportSummary( ImportStatus.ERROR, errors.toString() )
.incrementIgnored();
}
relationshipService.addRelationship( daoRelationship );
importSummary.setReference( daoRelationship.getUid() );
importSummary.getImportCount().incrementImported();
return importSummary;
}
@Override
public ImportSummaries updateRelationships( List<Relationship> relationships, ImportOptions importOptions )
{
List<List<Relationship>> partitions = Lists.partition( relationships, FLUSH_FREQUENCY );
importOptions = updateImportOptions( importOptions );
ImportSummaries importSummaries = new ImportSummaries();
for ( List<Relationship> _relationships : partitions )
{
reloadUser( importOptions );
prepareCaches( _relationships, importOptions.getUser() );
for ( Relationship relationship : _relationships )
{
importSummaries.addImportSummary( updateRelationship( relationship, importOptions ) );
}
clearSession();
}
return importSummaries;
}
@Override
public ImportSummary updateRelationship( Relationship relationship, ImportOptions importOptions )
{
ImportSummary importSummary = new ImportSummary( relationship.getRelationship() );
importOptions = updateImportOptions( importOptions );
Set<ImportConflict> importConflicts = new HashSet<>();
// Set up cache if not set already
if ( !cacheExists() )
{
prepareCaches( Lists.newArrayList( relationship ), importOptions.getUser() );
}
org.hisp.dhis.relationship.Relationship daoRelationship = relationshipService
.getRelationship( relationship.getRelationship() );
importConflicts.addAll( checkRelationship( relationship, importOptions ) );
if ( daoRelationship == null )
{
String message = "Relationship '" + relationship.getRelationship() + "' does not exist";
importConflicts.add( new ImportConflict( "Relationship", message ) );
importSummary.setStatus( ImportStatus.ERROR );
importSummary.getImportCount().incrementIgnored();
importSummary.setConflicts( importConflicts );
return importSummary;
}
List<String> errors = trackerAccessManager.canWrite( importOptions.getUser(), daoRelationship );
if ( !errors.isEmpty() || !importConflicts.isEmpty() )
{
importSummary.setStatus( ImportStatus.ERROR );
importSummary.getImportCount().incrementIgnored();
if ( !errors.isEmpty() )
{
importSummary.setDescription( errors.toString() );
}
importSummary.setConflicts( importConflicts );
return importSummary;
}
org.hisp.dhis.relationship.Relationship _relationship = createDAORelationship( relationship, importOptions,
importSummary );
daoRelationship.setRelationshipType( _relationship.getRelationshipType() );
daoRelationship.setTo( _relationship.getTo() );
daoRelationship.setFrom( _relationship.getFrom() );
relationshipService.updateRelationship( daoRelationship );
importSummary.setReference( daoRelationship.getUid() );
importSummary.getImportCount().incrementUpdated();
return importSummary;
}
@Override
public ImportSummary deleteRelationship( String uid )
{
return deleteRelationship( uid, null );
}
@Override
public ImportSummaries deleteRelationships( List<Relationship> relationships, ImportOptions importOptions )
{
ImportSummaries importSummaries = new ImportSummaries();
importOptions = updateImportOptions( importOptions );
int counter = 0;
for ( Relationship relationship : relationships )
{
importSummaries.addImportSummary( deleteRelationship( relationship.getRelationship(), importOptions ) );
if ( counter % FLUSH_FREQUENCY == 0 )
{
clearSession();
}
counter++;
}
return importSummaries;
}
@Override
public Relationship getRelationshipByUid( String id )
{
org.hisp.dhis.relationship.Relationship relationship = relationshipService.getRelationship( id );
if ( relationship == null)
{
return null;
}
return getRelationship( relationship, currentUserService.getCurrentUser() );
}
@Override
@Transactional
public Relationship getRelationship( org.hisp.dhis.relationship.Relationship dao, RelationshipParams params,
User user )
{
List<String> errors = trackerAccessManager.canRead( user, dao );
if ( !errors.isEmpty() )
{
throw new IllegalQueryException( errors.toString() );
}
Relationship relationship = new Relationship();
relationship.setRelationship( dao.getUid() );
relationship.setRelationshipType( dao.getRelationshipType().getUid() );
relationship.setRelationshipName( dao.getRelationshipType().getName() );
relationship.setFrom( includeRelationshipItem( dao.getFrom(), !params.isIncludeFrom() ) );
relationship.setTo( includeRelationshipItem( dao.getTo(), !params.isIncludeTo() ) );
relationship.setCreated( DateUtils.getIso8601NoTz( dao.getCreated() ) );
relationship.setLastUpdated( DateUtils.getIso8601NoTz( dao.getLastUpdated() ) );
return relationship;
}
private Relationship getRelationship( org.hisp.dhis.relationship.Relationship dao, User user )
{
return getRelationship( dao, RelationshipParams.TRUE, user );
}
private org.hisp.dhis.dxf2.events.trackedentity.RelationshipItem includeRelationshipItem( RelationshipItem dao,
boolean uidOnly )
{
TrackedEntityInstanceParams teiParams = TrackedEntityInstanceParams.FALSE;
org.hisp.dhis.dxf2.events.trackedentity.RelationshipItem relationshipItem = new org.hisp.dhis.dxf2.events.trackedentity.RelationshipItem();
if ( dao.getTrackedEntityInstance() != null )
{
org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstance tei = new org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstance();
String uid = dao.getTrackedEntityInstance().getUid();
if ( uidOnly )
{
tei.clear();
tei.setTrackedEntityInstance( uid );
}
else
{
tei = trackedEntityInstanceService
.getTrackedEntityInstance( dao.getTrackedEntityInstance(), teiParams );
}
relationshipItem.setTrackedEntityInstance( tei );
}
else if ( dao.getProgramInstance() != null )
{
Enrollment enrollment = new Enrollment();
String uid = dao.getProgramInstance().getUid();
if ( uidOnly )
{
enrollment.clear();
enrollment.setEnrollment( uid );
}
else
{
enrollment = enrollmentService.getEnrollment( dao.getProgramInstance(), teiParams );
}
relationshipItem.setEnrollment( enrollment );
}
else if ( dao.getProgramStageInstance() != null )
{
Event event = new Event();
String uid = dao.getProgramStageInstance().getUid();
if ( uidOnly )
{
event.clear();
event.setEvent( uid );
}
else
{
event = eventService.getEvent( dao.getProgramStageInstance() );
event.setRelationships( null );
}
relationshipItem.setEvent( event );
}
return relationshipItem;
}
private ImportSummary deleteRelationship( String uid, ImportOptions importOptions )
{
ImportSummary importSummary = new ImportSummary();
importOptions = updateImportOptions( importOptions );
if ( uid.isEmpty() )
{
importSummary.setStatus( ImportStatus.WARNING );
importSummary.setDescription( "Missing required property 'relationship'" );
return importSummary.incrementIgnored();
}
org.hisp.dhis.relationship.Relationship daoRelationship = relationshipService.getRelationship( uid );
if ( daoRelationship != null )
{
importSummary.setReference( uid );
List<String> errors = trackerAccessManager.canWrite( importOptions.getUser(), daoRelationship );
if ( !errors.isEmpty() )
{
importSummary.setDescription( errors.toString() );
importSummary.setStatus( ImportStatus.ERROR );
importSummary.getImportCount().incrementIgnored();
return importSummary;
}
relationshipService.deleteRelationship( daoRelationship );
importSummary.setStatus( ImportStatus.SUCCESS );
importSummary.setDescription( "Deletion of relationship " + uid + " was successful" );
return importSummary.incrementDeleted();
}
else
{
importSummary.setStatus( ImportStatus.WARNING );
importSummary
.setDescription( "Relationship " + uid + " cannot be deleted as it is not present in the system" );
return importSummary.incrementIgnored();
}
}
/**
* Checks the relationship for any conflicts, like missing or invalid references.
*
* @param relationship
* @param importOptions
* @return
*/
private List<ImportConflict> checkRelationship( Relationship relationship, ImportOptions importOptions )
{
List<ImportConflict> conflicts = new ArrayList<>();
RelationshipType relationshipType = null;
if ( StringUtils.isEmpty( relationship.getRelationshipType() ) )
{
conflicts
.add( new ImportConflict( relationship.getRelationship(), "Missing property 'relationshipType'" ) );
}
else
{
relationshipType = relationshipTypeCache.get( relationship.getRelationshipType() );
}
if ( relationship.getFrom() == null || getUidOfRelationshipItem( relationship.getFrom() ).isEmpty() )
{
conflicts.add( new ImportConflict( relationship.getRelationship(), "Missing property 'from'" ) );
}
if ( relationship.getTo() == null || getUidOfRelationshipItem( relationship.getTo() ).isEmpty() )
{
conflicts.add( new ImportConflict( relationship.getRelationship(), "Missing property 'to'" ) );
}
if ( relationship.getFrom().equals( relationship.getTo() ) )
{
conflicts.add( new ImportConflict( relationship.getRelationship(), "Self-referencing relationships are not allowed." ));
}
if ( !conflicts.isEmpty() )
{
return conflicts;
}
if ( relationshipType == null )
{
conflicts.add( new ImportConflict( relationship.getRelationship(),
"relationshipType '" + relationship.getRelationshipType() + "' not found." ) );
return conflicts;
}
conflicts.addAll(
getRelationshipConstraintConflicts( relationshipType.getFromConstraint(), relationship.getFrom(),
relationship.getRelationship() ) );
conflicts.addAll( getRelationshipConstraintConflicts( relationshipType.getToConstraint(), relationship.getTo(),
relationship.getRelationship() ) );
return conflicts;
}
/**
* Finds and returns any conflicts between relationship and relationship type
*
* @param constraint the constraint to check
* @param relationshipItem the relationshipItem to check
* @param relationshipUid the uid of the relationship
* @return a list of conflicts
*/
private List<ImportConflict> getRelationshipConstraintConflicts( RelationshipConstraint constraint,
org.hisp.dhis.dxf2.events.trackedentity.RelationshipItem relationshipItem, String relationshipUid )
{
List<ImportConflict> conflicts = new ArrayList<>();
RelationshipEntity entity = constraint.getRelationshipEntity();
String itemUid = getUidOfRelationshipItem( relationshipItem );
if ( TRACKED_ENTITY_INSTANCE.equals( entity ) )
{
TrackedEntityInstance tei = trackedEntityInstanceCache.get( itemUid );
if ( tei == null )
{
conflicts.add( new ImportConflict( relationshipUid,
"TrackedEntityInstance '" + itemUid + "' not found." ) );
}
else if ( !tei.getTrackedEntityType().equals( constraint.getTrackedEntityType() ) )
{
conflicts.add( new ImportConflict( relationshipUid,
"TrackedEntityInstance '" + itemUid + "' has invalid TrackedEntityType." ) );
}
}
else if ( PROGRAM_INSTANCE.equals( entity ) )
{
ProgramInstance pi = programInstanceCache.get( itemUid );
if ( pi == null )
{
conflicts.add( new ImportConflict( relationshipUid,
"ProgramInstance '" + itemUid + "' not found." ) );
}
else if ( !pi.getProgram().equals( constraint.getProgram() ) )
{
conflicts.add( new ImportConflict( relationshipUid,
"ProgramInstance '" + itemUid + "' has invalid Program." ) );
}
}
else if ( PROGRAM_STAGE_INSTANCE.equals( entity ) )
{
ProgramStageInstance psi = programStageInstanceCache.get( itemUid );
if ( psi == null )
{
conflicts.add( new ImportConflict( relationshipUid,
"ProgramStageInstance '" + itemUid + "' not found." ) );
}
else
{
if ( constraint.getProgram() != null &&
!psi.getProgramStage().getProgram().equals( constraint.getProgram() ) )
{
conflicts.add( new ImportConflict( relationshipUid,
"ProgramStageInstance '" + itemUid + "' has invalid Program." ) );
}
else if ( constraint.getProgramStage() != null &&
!psi.getProgramStage().equals( constraint.getProgramStage() ) )
{
conflicts.add( new ImportConflict( relationshipUid,
"ProgramStageInstance '" + itemUid + "' has invalid ProgramStage." ) );
}
}
}
return conflicts;
}
private String getUidOfRelationshipItem( org.hisp.dhis.dxf2.events.trackedentity.RelationshipItem relationshipItem )
{
if ( relationshipItem.getTrackedEntityInstance() != null )
{
return relationshipItem.getTrackedEntityInstance().getTrackedEntityInstance();
}
else if ( relationshipItem.getEnrollment() != null )
{
return relationshipItem.getEnrollment().getEnrollment();
}
else if ( relationshipItem.getEvent() != null )
{
return relationshipItem.getEvent().getEvent();
}
return "";
}
protected org.hisp.dhis.relationship.Relationship createDAORelationship( Relationship relationship,
ImportOptions importOptions, ImportSummary importSummary )
{
RelationshipType relationshipType = relationshipTypeCache.get( relationship.getRelationshipType() );
org.hisp.dhis.relationship.Relationship daoRelationship = new org.hisp.dhis.relationship.Relationship();
RelationshipItem fromItem = null;
RelationshipItem toItem = null;
daoRelationship.setRelationshipType( relationshipType );
if ( relationship.getRelationship() != null )
{
daoRelationship.setUid( relationship.getRelationship() );
}
// FROM
if ( relationshipType.getFromConstraint().getRelationshipEntity().equals( TRACKED_ENTITY_INSTANCE ) )
{
fromItem = new RelationshipItem();
fromItem.setTrackedEntityInstance(
trackedEntityInstanceCache.get( getUidOfRelationshipItem( relationship.getFrom() ) ) );
}
else if ( relationshipType.getFromConstraint().getRelationshipEntity().equals( PROGRAM_INSTANCE ) )
{
fromItem = new RelationshipItem();
fromItem
.setProgramInstance( programInstanceCache.get( getUidOfRelationshipItem( relationship.getFrom() ) ) );
}
else if ( relationshipType.getFromConstraint().getRelationshipEntity().equals( PROGRAM_STAGE_INSTANCE ) )
{
fromItem = new RelationshipItem();
fromItem.setProgramStageInstance(
programStageInstanceCache.get( getUidOfRelationshipItem( relationship.getFrom() ) ) );
}
// TO
if ( relationshipType.getToConstraint().getRelationshipEntity().equals( TRACKED_ENTITY_INSTANCE ) )
{
toItem = new RelationshipItem();
toItem.setTrackedEntityInstance(
trackedEntityInstanceCache.get( getUidOfRelationshipItem( relationship.getTo() ) ) );
}
else if ( relationshipType.getToConstraint().getRelationshipEntity().equals( PROGRAM_INSTANCE ) )
{
toItem = new RelationshipItem();
toItem.setProgramInstance( programInstanceCache.get( getUidOfRelationshipItem( relationship.getTo() ) ) );
}
else if ( relationshipType.getToConstraint().getRelationshipEntity().equals( PROGRAM_STAGE_INSTANCE ) )
{
toItem = new RelationshipItem();
toItem.setProgramStageInstance(
programStageInstanceCache.get( getUidOfRelationshipItem( relationship.getTo() ) ) );
}
daoRelationship.setFrom( fromItem );
daoRelationship.setTo( toItem );
return daoRelationship;
}
private boolean cacheExists()
{
return !relationshipTypeCache.isEmpty();
}
private void prepareCaches( List<Relationship> relationships, User user )
{
Map<RelationshipEntity, List<String>> relationshipEntities = new HashMap<>();
Map<String, List<Relationship>> relationshipTypeMap = relationships.stream()
.collect( Collectors.groupingBy( Relationship::getRelationshipType ) );
// Find all the RelationshipTypes first, so we know what the uids refer to
Query query = Query.from( schemaService.getDynamicSchema( RelationshipType.class ) );
query.setUser( user );
query.add( Restrictions.in( "id", relationshipTypeMap.keySet() ) );
queryService.query( query ).forEach( rt -> relationshipTypeCache.put( rt.getUid(), (RelationshipType) rt ) );
// Group all uids into their respective RelationshipEntities
relationshipTypeCache.values().stream().forEach( relationshipType -> {
List<String> fromUids = relationshipTypeMap.get( relationshipType.getUid() ).stream()
.map( ( r ) -> getUidOfRelationshipItem( r.getFrom() ) ).collect( Collectors.toList() );
List<String> toUids = relationshipTypeMap.get( relationshipType.getUid() ).stream()
.map( ( r ) -> getUidOfRelationshipItem( r.getTo() ) ).collect( Collectors.toList() );
// Merge existing results with newly found ones.
relationshipEntities.merge( relationshipType.getFromConstraint().getRelationshipEntity(), fromUids,
( old, _new ) -> ListUtils.union( old, _new ) );
relationshipEntities.merge( relationshipType.getToConstraint().getRelationshipEntity(), toUids,
( old, _new ) -> ListUtils.union( old, _new ) );
} );
// Find and put all Relationship members in their respective cache
if ( relationshipEntities.get( TRACKED_ENTITY_INSTANCE ) != null )
{
teiDaoService.getTrackedEntityInstancesByUid( relationshipEntities.get( TRACKED_ENTITY_INSTANCE ), user)
.forEach( tei -> trackedEntityInstanceCache.put( tei.getUid(), tei ) );
}
if ( relationshipEntities.get( PROGRAM_INSTANCE ) != null )
{
Query piQuery = Query.from( schemaService.getDynamicSchema( ProgramInstance.class ) );
piQuery.setUser( user );
piQuery.add( Restrictions.in( "id", relationshipEntities.get( PROGRAM_INSTANCE ) ) );
queryService.query( piQuery )
.forEach( pi -> programInstanceCache.put( pi.getUid(), (ProgramInstance) pi ) );
}
if ( relationshipEntities.get( PROGRAM_STAGE_INSTANCE ) != null )
{
Query psiQuery = Query.from( schemaService.getDynamicSchema( ProgramStageInstance.class ) );
psiQuery.setUser( user );
psiQuery.add( Restrictions.in( "id", relationshipEntities.get( PROGRAM_STAGE_INSTANCE ) ) );
queryService.query( psiQuery )
.forEach( psi -> programStageInstanceCache.put( psi.getUid(), (ProgramStageInstance) psi ) );
}
}
private void clearSession()
{
relationshipTypeCache.clear();
trackedEntityInstanceCache.clear();
programInstanceCache.clear();
programStageInstanceCache.clear();
dbmsManager.clearSession();
}
protected ImportOptions updateImportOptions( ImportOptions importOptions )
{
if ( importOptions == null )
{
importOptions = new ImportOptions();
}
if ( importOptions.getUser() == null )
{
importOptions.setUser( currentUserService.getCurrentUser() );
}
return importOptions;
}
protected void reloadUser( ImportOptions importOptions )
{
if ( importOptions == null || importOptions.getUser() == null )
{
return;
}
importOptions.setUser( userService.getUser( importOptions.getUser().getId() ) );
}
private Function<org.hisp.dhis.relationship.Relationship, Relationship> mapDaoToDto( User user )
{
return relationship -> getRelationship( relationship, user );
}
}
| |
/*
* Copyright 2011 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.regex;
import java.util.Arrays;
/**
* An immutable sparse bitset that deals well where the data is chunky:
* where P(bit[x+1] == bit[x]). E.g. [101,102,103,104,105,1001,1002,1003,1004]
* is chunky.
*
* @author mikesamuel@gmail.com (Mike Samuel)
*/
final class CharRanges {
/**
* A strictly increasing set of bit indices where even members are the
* inclusive starts of ranges, and odd members are the exclusive ends.
* <p>
* E.g., { 1, 5, 6, 10 } represents the set ( 1, 2, 3, 4, 6, 7, 8, 9 ).
*/
private final int[] ranges;
public static final CharRanges EMPTY = new CharRanges(new int[0]);
public static final CharRanges ALL_CODE_UNITS
= new CharRanges(new int[] { 0, 0x10000 });
public static CharRanges inclusive(int start, int end) {
if (start > end) {
throw new IndexOutOfBoundsException(start + " > " + end);
}
return new CharRanges(new int[] { start, end + 1 });
}
/**
* Returns an instance containing all and only the given members.
*/
public static CharRanges withMembers(int... members) {
return new CharRanges(intArrayToRanges(members.clone()));
}
/**
* Returns an instance containing the given ranges.
* @param ranges An even-length ordered sequence of non-overlapping,
* non-contiguous, [inclusive start, exclusive end) ranges.
*/
public static CharRanges withRanges(int... ranges) {
ranges = ranges.clone();
if ((ranges.length & 1) != 0) { throw new IllegalArgumentException(); }
for (int i = 1; i < ranges.length; ++i) {
if (ranges[i] <= ranges[i - 1]) {
throw new IllegalArgumentException(ranges[i] + " > " + ranges[i - 1]);
}
}
return new CharRanges(ranges);
}
private CharRanges(int[] ranges) {
this.ranges = ranges;
}
private static int[] intArrayToRanges(int[] members) {
int nMembers = members.length;
if (nMembers == 0) {
return new int[0];
}
Arrays.sort(members);
// Count the number of runs.
int nRuns = 1;
for (int i = 1; i < nMembers; ++i) {
int current = members[i], last = members[i - 1];
if (current == last) { continue; }
if (current != last + 1) { ++nRuns; }
}
int[] ranges = new int[nRuns * 2];
ranges[0] = members[0];
int k = 0;
for (int i = 1; k + 2 < ranges.length; ++i) {
int current = members[i], last = members[i - 1];
if (current == last) { continue; }
if (current != last + 1) {
ranges[++k] = last + 1; // add 1 to make end exclusive
ranges[++k] = current;
}
}
ranges[++k] = members[nMembers - 1] + 1; // add 1 to make end exclusive
return ranges;
}
public boolean contains(int bit) {
return (Arrays.binarySearch(ranges, bit) & 1) == 0;
// By the contract of Arrays.binarySearch, its result is either the position
// of bit in ranges or it is the bitwise inverse of the position of the
// least element greater than bit.
// Two cases
// case (idx >= 0)
// We ended up exactly on a range boundary.
// Starts are inclusive and ends are both exclusive, so this contains
// bit iff idx is even.
//
// case (idx < 0)
// If the least element greater than bit is an odd element,
// then bit must be greater than a start and less than an end, so
// contained.
//
// If bit is greater than all elements, then idx will be past the end of
// the array, and will be even since ranges.length is even.
//
// Otherwise, bit must be in the space between two runs, so not
// contained.
//
// In all cases, oddness is equivalent to containedness.
// Those two cases lead to
// idx >= 0 ? ((idx & 1) == 0) : ((~idx & 1) == 1)
// But ~n & bit == bit <=> n & bit == 0, so
// idx >= 0 ? ((idx & 1) == 0) : ((~idx & 1) == 1)
// => idx >= 0 ? ((idx & 1) == 0) : ((idx & 1) == 0)
// => (idx & 1) == 0
}
public int minSetBit() {
return ranges.length >= 0 ? ranges[0] : Integer.MIN_VALUE;
}
public boolean isEmpty() {
return ranges.length == 0;
}
public int getNumRanges() { return ranges.length >> 1; }
public int start(int i) { return ranges[i << 1]; }
public int end(int i) { return ranges[(i << 1) | 1]; }
public CharRanges union(CharRanges other) {
// Index of the input ranges
int[] q = this.ranges, r = other.ranges;
// Lengths of the inputs
int m = q.length, n = r.length;
if (m == 0) { return other; }
if (n == 0) { return this; }
// The output array. The length is m+n in the worst case when all the
// ranges in a are disjoint from the ranges in b.
int[] out = new int[m + n];
// Indexes into the various arrays
int i = 0, j = 0, k = 0;
// Since there are three arrays, and indices into them the following
// should never occur in this function:
// (1) q[j] or q[k] -- q is indexed by i
// (2) r[i] or r[k] -- r is indexed by j
// (3) out[i] or out[j] -- out is indexed by k
// (4) i < n or j < m -- index compared to wrong limit
// This loop exits because we always increment at least one of i,j.
while (i < m && j < n) {
// Range starts and ends.
int a0 = q[i], a1 = q[i + 1],
b0 = r[j], b1 = r[j + 1];
if (a1 < b0) { // [a0, a1) ends before [b0, b1) starts
out[k++] = a0;
out[k++] = a1;
i += 2;
} else if (b1 < a0) { // [b0, b1) ends before [a0, a1) starts
out[k++] = b0;
out[k++] = b1;
j += 2;
} else { // ranges overlap
// We need to compute a new range based on the set of ranges that
// transitively overlap.
// AAAAAAAAA AAA
// BBB BBB* BBB
// In the range above, the start comes from one set, and the end from
// another. The range with the asterisk next to it is subsumed entirely
// by a range from the other, and so not all ranges on the input
// contribute a value to the output.
// The last BBB run serves only as a bridge -- it overlaps two
// disjoint ranges in the other one so establishes that they
// transitively overlap.
int start = Math.min(a0, b0);
// Guess at the end, and lookahead to come up with a more complete
// estimate.
int end = Math.max(a1, b1);
i += 2;
j += 2;
while (i < m || j < n) {
if (i < m && q[i] <= end) {
end = Math.max(end, q[i + 1]);
i += 2;
} else if (j < n && r[j] <= end) {
end = Math.max(end, r[j + 1]);
j += 2;
} else {
break;
}
}
out[k++] = start;
out[k++] = end;
}
}
// There may be unprocessed ranges at the end of one of the inputs.
if (i < m) {
System.arraycopy(q, i, out, k, m - i);
k += m - i;
} else if (j < n) {
System.arraycopy(r, j, out, k, n - j);
k += n - j;
}
// We guessed at the output length above. Cut off the tail.
if (k != out.length) {
int[] clipped = Arrays.copyOf(out, k);
out = clipped;
}
return new CharRanges(out);
}
public CharRanges intersection(CharRanges other) {
int[] aRanges = ranges, bRanges = other.ranges;
int aLen = aRanges.length, bLen = bRanges.length;
if (aLen == 0) { return this; }
if (bLen == 0) { return other; }
int aIdx = 0, bIdx = 0;
int[] intersection = new int[Math.min(aLen, bLen)];
int intersectionIdx = 0;
int pos = Math.min(aRanges[0], bRanges[0]);
while (aIdx < aLen && bIdx < bLen) {
if (aRanges[aIdx + 1] <= pos) {
aIdx += 2;
} else if (bRanges[bIdx + 1] <= pos) {
bIdx += 2;
} else {
int start = Math.max(aRanges[aIdx], bRanges[bIdx]);
if (pos < start) { // Advance to start of common block.
pos = start;
} else {
// Now we know that pos is less than the ends of the two ranges and
// greater or equal to the starts of the two ranges.
int end = Math.min(aRanges[aIdx + 1], bRanges[bIdx + 1]);
if (intersectionIdx != 0
&& pos == intersection[intersectionIdx - 1]) {
intersection[intersectionIdx - 1] = end;
} else {
if (intersectionIdx == intersection.length) {
int[] newArr = new int[intersectionIdx * 2];
System.arraycopy(intersection, 0, newArr, 0, intersectionIdx);
intersection = newArr;
}
intersection[intersectionIdx++] = pos;
intersection[intersectionIdx++] = end;
}
pos = end;
}
}
}
if (intersectionIdx != intersection.length) {
int[] newArr = Arrays.copyOf(intersection, intersectionIdx);
intersection = newArr;
}
return new CharRanges(intersection);
}
public CharRanges difference(CharRanges subtrahendRanges) {
// difference = minuend - subtrahend
int[] minuend = this.ranges;
int[] subtrahend = subtrahendRanges.ranges;
int mn = minuend.length, sn = subtrahend.length;
if (mn == 0 || sn == 0) { return this; }
int[] difference = new int[minuend.length];
// Indices into minuend.ranges, subtrahend.ranges, and difference.
int mIdx = 0, sIdx = 0, dIdx = 0;
int pos = minuend[0];
while (mIdx < mn) {
if (pos >= minuend[mIdx + 1]) {
mIdx += 2;
} else if (pos < minuend[mIdx]) {
// Skip gaps in the minuend.
pos = minuend[mIdx];
} else if (sIdx < sn && pos >= subtrahend[sIdx]) {
// Skip over a removed part.
pos = subtrahend[sIdx + 1];
sIdx += 2;
} else {
// Now we know that pos is between [minuend[i], minuend[i + 1])
// and outside [subtrahend[j], subtrahend[j + 1]).
int end = sIdx < sn
? Math.min(minuend[mIdx + 1], subtrahend[sIdx]) : minuend[mIdx + 1];
if (dIdx != 0 && difference[dIdx - 1] == pos) {
difference[dIdx - 1] = pos;
} else {
if (dIdx == difference.length) {
int[] newArr = new int[dIdx * 2];
System.arraycopy(difference, 0, newArr, 0, dIdx);
difference = newArr;
}
difference[dIdx++] = pos;
difference[dIdx++] = end;
}
pos = end;
}
}
if (dIdx != difference.length) {
int[] newArr = Arrays.copyOf(difference, dIdx);
difference = newArr;
}
return new CharRanges(difference);
}
public boolean containsAll(CharRanges sub) {
int[] superRanges = this.ranges;
int[] subRanges = sub.ranges;
int superIdx = 0, subIdx = 0;
int superLen = superRanges.length, subLen = subRanges.length;
while (subIdx < subLen) {
if (superIdx == superLen) {
return false;
}
if (superRanges[superIdx + 1] <= subRanges[subIdx]) {
// Super range ends before subRange starts.
superIdx += 2;
} else if (superRanges[superIdx] > subRanges[subIdx]) {
// Uncontained portion at start of sub range.
return false;
} else if (superRanges[superIdx + 1] >= subRanges[subIdx + 1]) {
// A sub range is completely contained in the super range.
// We know this because of the above condition and we have already
// ruled out that subRanges[subIdx] < superRanges[superIdx].
subIdx += 2;
} else {
// Uncontained portion at end of sub range.
return false;
}
}
return subIdx == subLen;
}
/**
* Shifts the bits matched by the given delta.
* So if this has the bits (a, b, c, ..., z) set then the result has the bits
* ((a - delta), (b - delta), (c - delta), ...., (z - delta)) set.
*
* @throws IndexOutOfBoundsException if shifting by delta would cause an
* overflow or underflow in a 32 bit {@code signed int} range boundary.
* Since the end boundaries of ranges are exclusive, even if there is no
* range containing {@link Integer#MAX_VALUE}, shifting by a delta of 1
* can cause an overflow.
*/
public CharRanges shift(int delta) {
int n = ranges.length;
if (delta == 0 || n == 0) { return this; }
// Test overflow/underflow
if (delta < 0) {
long lmin = ranges[0] + delta;
if (lmin < Integer.MIN_VALUE) { throw new IndexOutOfBoundsException(); }
} else {
long lmax = ranges[n - 1] + delta;
if (lmax > Integer.MAX_VALUE) { throw new IndexOutOfBoundsException(); }
}
// Create a shifted range.
int[] shiftedRanges = new int[n];
for (int i = n; --i >= 0;) {
shiftedRanges[i] = ranges[i] + delta;
}
return new CharRanges(shiftedRanges);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append('[');
for (int i = 0; i < ranges.length; ++i) {
if ((i & 1) != 0 && ranges[i] == ranges[i - 1] + 1) { continue; }
if (i != 0) { sb.append((i & 1) == 0 ? ' ' : '-'); }
sb.append("0x").append(Integer.toString(ranges[i] - (i & 1), 16));
}
sb.append(']');
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof CharRanges)) { return false; }
return Arrays.equals(this.ranges, ((CharRanges) o).ranges);
}
@Override
public int hashCode() {
int hc = 0;
for (int i = 0, n = Math.min(16, ranges.length); i < n; ++i) {
hc = (hc << 2) + ranges[i];
}
return hc;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.execution.AddColumnTask;
import com.facebook.presto.execution.CallTask;
import com.facebook.presto.execution.CommitTask;
import com.facebook.presto.execution.CreateSchemaTask;
import com.facebook.presto.execution.CreateTableTask;
import com.facebook.presto.execution.CreateViewTask;
import com.facebook.presto.execution.DataDefinitionTask;
import com.facebook.presto.execution.DeallocateTask;
import com.facebook.presto.execution.DropColumnTask;
import com.facebook.presto.execution.DropSchemaTask;
import com.facebook.presto.execution.DropTableTask;
import com.facebook.presto.execution.DropViewTask;
import com.facebook.presto.execution.ForQueryExecution;
import com.facebook.presto.execution.GrantTask;
import com.facebook.presto.execution.PrepareTask;
import com.facebook.presto.execution.QueryExecution;
import com.facebook.presto.execution.QueryExecutionMBean;
import com.facebook.presto.execution.QueryIdGenerator;
import com.facebook.presto.execution.QueryInfo;
import com.facebook.presto.execution.QueryManager;
import com.facebook.presto.execution.QueryQueueManager;
import com.facebook.presto.execution.QueryQueueRule;
import com.facebook.presto.execution.QueryQueueRuleFactory;
import com.facebook.presto.execution.RemoteTaskFactory;
import com.facebook.presto.execution.RenameColumnTask;
import com.facebook.presto.execution.RenameSchemaTask;
import com.facebook.presto.execution.RenameTableTask;
import com.facebook.presto.execution.ResetSessionTask;
import com.facebook.presto.execution.RevokeTask;
import com.facebook.presto.execution.RollbackTask;
import com.facebook.presto.execution.SetSessionTask;
import com.facebook.presto.execution.SqlQueryManager;
import com.facebook.presto.execution.SqlQueryQueueManager;
import com.facebook.presto.execution.StartTransactionTask;
import com.facebook.presto.execution.TaskInfo;
import com.facebook.presto.execution.resourceGroups.InternalResourceGroupManager;
import com.facebook.presto.execution.resourceGroups.LegacyResourceGroupConfigurationManagerFactory;
import com.facebook.presto.execution.resourceGroups.ResourceGroupManager;
import com.facebook.presto.execution.scheduler.AllAtOnceExecutionPolicy;
import com.facebook.presto.execution.scheduler.ExecutionPolicy;
import com.facebook.presto.execution.scheduler.PhasedExecutionPolicy;
import com.facebook.presto.execution.scheduler.SplitSchedulerStats;
import com.facebook.presto.memory.ClusterMemoryManager;
import com.facebook.presto.memory.ForMemoryManager;
import com.facebook.presto.operator.ForScheduler;
import com.facebook.presto.server.remotetask.RemoteTaskStats;
import com.facebook.presto.spi.memory.ClusterMemoryPoolManager;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.analyzer.QueryExplainer;
import com.facebook.presto.sql.tree.AddColumn;
import com.facebook.presto.sql.tree.Call;
import com.facebook.presto.sql.tree.Commit;
import com.facebook.presto.sql.tree.CreateSchema;
import com.facebook.presto.sql.tree.CreateTable;
import com.facebook.presto.sql.tree.CreateTableAsSelect;
import com.facebook.presto.sql.tree.CreateView;
import com.facebook.presto.sql.tree.Deallocate;
import com.facebook.presto.sql.tree.Delete;
import com.facebook.presto.sql.tree.DescribeInput;
import com.facebook.presto.sql.tree.DescribeOutput;
import com.facebook.presto.sql.tree.DropColumn;
import com.facebook.presto.sql.tree.DropSchema;
import com.facebook.presto.sql.tree.DropTable;
import com.facebook.presto.sql.tree.DropView;
import com.facebook.presto.sql.tree.Explain;
import com.facebook.presto.sql.tree.Grant;
import com.facebook.presto.sql.tree.Insert;
import com.facebook.presto.sql.tree.Prepare;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.RenameColumn;
import com.facebook.presto.sql.tree.RenameSchema;
import com.facebook.presto.sql.tree.RenameTable;
import com.facebook.presto.sql.tree.ResetSession;
import com.facebook.presto.sql.tree.Revoke;
import com.facebook.presto.sql.tree.Rollback;
import com.facebook.presto.sql.tree.SetSession;
import com.facebook.presto.sql.tree.ShowCatalogs;
import com.facebook.presto.sql.tree.ShowColumns;
import com.facebook.presto.sql.tree.ShowCreate;
import com.facebook.presto.sql.tree.ShowFunctions;
import com.facebook.presto.sql.tree.ShowGrants;
import com.facebook.presto.sql.tree.ShowPartitions;
import com.facebook.presto.sql.tree.ShowSchemas;
import com.facebook.presto.sql.tree.ShowSession;
import com.facebook.presto.sql.tree.ShowStats;
import com.facebook.presto.sql.tree.ShowTables;
import com.facebook.presto.sql.tree.StartTransaction;
import com.facebook.presto.sql.tree.Statement;
import com.facebook.presto.sql.tree.Use;
import com.google.inject.Binder;
import com.google.inject.Scopes;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.MapBinder;
import io.airlift.configuration.AbstractConfigurationAwareModule;
import io.airlift.units.Duration;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import java.util.List;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.execution.DataDefinitionExecution.DataDefinitionExecutionFactory;
import static com.facebook.presto.execution.QueryExecution.QueryExecutionFactory;
import static com.facebook.presto.execution.SqlQueryExecution.SqlQueryExecutionFactory;
import static com.google.inject.multibindings.MapBinder.newMapBinder;
import static io.airlift.concurrent.Threads.threadsNamed;
import static io.airlift.discovery.client.DiscoveryBinder.discoveryBinder;
import static io.airlift.http.client.HttpClientBinder.httpClientBinder;
import static io.airlift.http.server.HttpServerBinder.httpServerBinder;
import static io.airlift.jaxrs.JaxrsBinder.jaxrsBinder;
import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.weakref.jmx.ObjectNames.generatedNameOf;
import static org.weakref.jmx.guice.ExportBinder.newExporter;
public class CoordinatorModule
extends AbstractConfigurationAwareModule
{
@Override
protected void setup(Binder binder)
{
httpServerBinder(binder).bindResource("/", "webapp").withWelcomeFile("index.html");
// presto coordinator announcement
discoveryBinder(binder).bindHttpAnnouncement("presto-coordinator");
// statement resource
jsonCodecBinder(binder).bindJsonCodec(QueryInfo.class);
jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class);
jsonCodecBinder(binder).bindJsonCodec(QueryResults.class);
jaxrsBinder(binder).bind(StatementResource.class);
// query execution visualizer
jaxrsBinder(binder).bind(QueryExecutionResource.class);
// query manager
jaxrsBinder(binder).bind(QueryResource.class);
jaxrsBinder(binder).bind(StageResource.class);
jaxrsBinder(binder).bind(QueryStateInfoResource.class);
jaxrsBinder(binder).bind(ResourceGroupStateInfoResource.class);
binder.bind(QueryIdGenerator.class).in(Scopes.SINGLETON);
binder.bind(QueryManager.class).to(SqlQueryManager.class).in(Scopes.SINGLETON);
binder.bind(SessionSupplier.class).to(QuerySessionSupplier.class).in(Scopes.SINGLETON);
binder.bind(InternalResourceGroupManager.class).in(Scopes.SINGLETON);
newExporter(binder).export(InternalResourceGroupManager.class).withGeneratedName();
binder.bind(ResourceGroupManager.class).to(InternalResourceGroupManager.class);
binder.bind(LegacyResourceGroupConfigurationManagerFactory.class).in(Scopes.SINGLETON);
if (buildConfigObject(FeaturesConfig.class).isResourceGroupsEnabled()) {
binder.bind(QueryQueueManager.class).to(InternalResourceGroupManager.class);
}
else {
binder.bind(QueryQueueManager.class).to(SqlQueryQueueManager.class).in(Scopes.SINGLETON);
binder.bind(new TypeLiteral<List<QueryQueueRule>>() {}).toProvider(QueryQueueRuleFactory.class).in(Scopes.SINGLETON);
}
newExporter(binder).export(QueryManager.class).withGeneratedName();
// cluster memory manager
binder.bind(ClusterMemoryManager.class).in(Scopes.SINGLETON);
binder.bind(ClusterMemoryPoolManager.class).to(ClusterMemoryManager.class).in(Scopes.SINGLETON);
httpClientBinder(binder).bindHttpClient("memoryManager", ForMemoryManager.class)
.withTracing()
.withConfigDefaults(config -> {
config.setIdleTimeout(new Duration(30, SECONDS));
config.setRequestTimeout(new Duration(10, SECONDS));
});
newExporter(binder).export(ClusterMemoryManager.class).withGeneratedName();
// cluster statistics
jaxrsBinder(binder).bind(ClusterStatsResource.class);
// query explainer
binder.bind(QueryExplainer.class).in(Scopes.SINGLETON);
// execution scheduler
binder.bind(RemoteTaskFactory.class).to(HttpRemoteTaskFactory.class).in(Scopes.SINGLETON);
newExporter(binder).export(RemoteTaskFactory.class).withGeneratedName();
binder.bind(RemoteTaskStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(RemoteTaskStats.class).withGeneratedName();
httpClientBinder(binder).bindHttpClient("scheduler", ForScheduler.class)
.withTracing()
.withConfigDefaults(config -> {
config.setIdleTimeout(new Duration(30, SECONDS));
config.setRequestTimeout(new Duration(10, SECONDS));
config.setMaxConnectionsPerServer(250);
});
// query execution
binder.bind(ExecutorService.class).annotatedWith(ForQueryExecution.class)
.toInstance(newCachedThreadPool(threadsNamed("query-execution-%s")));
binder.bind(QueryExecutionMBean.class).in(Scopes.SINGLETON);
newExporter(binder).export(QueryExecutionMBean.class).as(generatedNameOf(QueryExecution.class));
MapBinder<Class<? extends Statement>, QueryExecutionFactory<?>> executionBinder = newMapBinder(binder,
new TypeLiteral<Class<? extends Statement>>() {}, new TypeLiteral<QueryExecutionFactory<?>>() {});
binder.bind(SplitSchedulerStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(SplitSchedulerStats.class).withGeneratedName();
binder.bind(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(Query.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(Explain.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowCreate.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowColumns.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowStats.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowPartitions.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowFunctions.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowTables.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowSchemas.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowCatalogs.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(Use.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowSession.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(ShowGrants.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(CreateTableAsSelect.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(Insert.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(Delete.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(DescribeInput.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
executionBinder.addBinding(DescribeOutput.class).to(SqlQueryExecutionFactory.class).in(Scopes.SINGLETON);
binder.bind(DataDefinitionExecutionFactory.class).in(Scopes.SINGLETON);
bindDataDefinitionTask(binder, executionBinder, CreateSchema.class, CreateSchemaTask.class);
bindDataDefinitionTask(binder, executionBinder, DropSchema.class, DropSchemaTask.class);
bindDataDefinitionTask(binder, executionBinder, RenameSchema.class, RenameSchemaTask.class);
bindDataDefinitionTask(binder, executionBinder, AddColumn.class, AddColumnTask.class);
bindDataDefinitionTask(binder, executionBinder, CreateTable.class, CreateTableTask.class);
bindDataDefinitionTask(binder, executionBinder, RenameTable.class, RenameTableTask.class);
bindDataDefinitionTask(binder, executionBinder, RenameColumn.class, RenameColumnTask.class);
bindDataDefinitionTask(binder, executionBinder, DropColumn.class, DropColumnTask.class);
bindDataDefinitionTask(binder, executionBinder, DropTable.class, DropTableTask.class);
bindDataDefinitionTask(binder, executionBinder, CreateView.class, CreateViewTask.class);
bindDataDefinitionTask(binder, executionBinder, DropView.class, DropViewTask.class);
bindDataDefinitionTask(binder, executionBinder, SetSession.class, SetSessionTask.class);
bindDataDefinitionTask(binder, executionBinder, ResetSession.class, ResetSessionTask.class);
bindDataDefinitionTask(binder, executionBinder, StartTransaction.class, StartTransactionTask.class);
bindDataDefinitionTask(binder, executionBinder, Commit.class, CommitTask.class);
bindDataDefinitionTask(binder, executionBinder, Rollback.class, RollbackTask.class);
bindDataDefinitionTask(binder, executionBinder, Call.class, CallTask.class);
bindDataDefinitionTask(binder, executionBinder, Grant.class, GrantTask.class);
bindDataDefinitionTask(binder, executionBinder, Revoke.class, RevokeTask.class);
bindDataDefinitionTask(binder, executionBinder, Prepare.class, PrepareTask.class);
bindDataDefinitionTask(binder, executionBinder, Deallocate.class, DeallocateTask.class);
MapBinder<String, ExecutionPolicy> executionPolicyBinder = newMapBinder(binder, String.class, ExecutionPolicy.class);
executionPolicyBinder.addBinding("all-at-once").to(AllAtOnceExecutionPolicy.class);
executionPolicyBinder.addBinding("phased").to(PhasedExecutionPolicy.class);
// cleanup
binder.bind(ExecutorCleanup.class).in(Scopes.SINGLETON);
}
private static <T extends Statement> void bindDataDefinitionTask(
Binder binder,
MapBinder<Class<? extends Statement>, QueryExecutionFactory<?>> executionBinder,
Class<T> statement,
Class<? extends DataDefinitionTask<T>> task)
{
MapBinder<Class<? extends Statement>, DataDefinitionTask<?>> taskBinder = newMapBinder(binder,
new TypeLiteral<Class<? extends Statement>>() {}, new TypeLiteral<DataDefinitionTask<?>>() {});
taskBinder.addBinding(statement).to(task).in(Scopes.SINGLETON);
executionBinder.addBinding(statement).to(DataDefinitionExecutionFactory.class).in(Scopes.SINGLETON);
}
public static class ExecutorCleanup
{
private final ExecutorService executor;
@Inject
public ExecutorCleanup(@ForQueryExecution ExecutorService executor)
{
this.executor = requireNonNull(executor, "executor is null");
}
@PreDestroy
public void shutdown()
{
executor.shutdownNow();
}
}
}
| |
package org.jcodec.common;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.jcodec.codecs.vp8.BooleanArithmeticDecoder;
import org.junit.Assert;
import org.junit.Test;
/**
* See for theoretical details: http://www.youtube.com/playlist?list=PLE125425EC837021F
*/
public class ArithmeticCoderTest {
private static final int PRECISSION = 8;
private static final int[] rs = new int[]{ 10, 25, 11, 15, 10 };
public static String printArrayAsHex(byte[] b){
StringBuilder sb = new StringBuilder("{");
if (b.length > 0){
sb.append("0x").append(Integer.toHexString(b[0]&0xff).toUpperCase());
for (int i=1;i<b.length;i++)
sb.append(", 0x").append(Integer.toHexString(b[i]&0xff).toUpperCase());
}
sb.append("}");
return sb.toString();
}
@Test
public void testPrinting() throws Exception {
Assert.assertEquals("{0xD8}", printArrayAsHex(new byte[]{(byte)0xD8}));
Assert.assertEquals("{0xD8, 0x44}", printArrayAsHex(new byte[]{(byte)0xD8, 0x44}));
}
@Test
public void testEncoder() throws IOException {
ArithmeticCoder ac = new ArithmeticCoder(PRECISSION, rs);
ac.encode(Arrays.asList(new Integer[] { 1, 2, 3, 4, 0 }));
Assert.assertArrayEquals(new byte[]{0x5C, 0x18}, ac.e.getArray());
ac.encode(Arrays.asList(new Integer[] { 4, 0 }));
Assert.assertArrayEquals(new byte[]{(byte)0xDC}, ac.e.getArray());
ac.encode(Arrays.asList(new Integer[] { 1, 1, 1, 2, 0 }));
Assert.assertArrayEquals(new byte[]{0x3A, (byte)0x80}, ac.e.getArray());
}
@Test
public void testDecoder() throws Exception {
ArithmeticDecoder ad = new ArithmeticDecoder(PRECISSION, rs);
ad.decode(new byte[] { 0x5C, 0x18 });
System.out.println(ad.data);
ad.decode(new byte[] { (byte) 0xDC });
System.out.println(ad.data);
ad.decode(new byte[] { 0x3A, (byte)0x80 });
System.out.println(ad.data);
}
@Test
public void testCodingAndDecoding() throws Exception {
int[] smallRs = new int[]{ 2, 5, 1, 3, 2 };
ArithmeticCoder ac = new ArithmeticCoder(PRECISSION, smallRs);
ArithmeticDecoder ad = new ArithmeticDecoder(PRECISSION, smallRs);
List<Integer> asList = Arrays.asList(new Integer[] { 1, 2, 3, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
asList = Arrays.asList(new Integer[] { 2, 3, 4, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
asList = Arrays.asList(new Integer[] { 1, 2, 4, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
asList = Arrays.asList(new Integer[] { 1, 3, 4, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
asList = Arrays.asList(new Integer[] { 4, 3, 4, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
asList = Arrays.asList(new Integer[] { 1, 2, 3, 4, 0 });
ac.encode(asList);
ad.decode(ac.e.getArray());
Assert.assertEquals(asList, ad.data);
}
public static class ArithmeticCoder {
public final long precission;
public final long whole;
public final long half;
public final long quater;
public final int[] r; // R = 13
public final int[] c; // probability borders, c[0]=0, c[i] = r[0]+...+r[i-1]
public final int[] d; // probablility gap sizes, d[i] = c[i]+r[i]
public final int R;
public Emitter e;
public ArithmeticCoder(int precission, int[] r){
this.precission = precission;
this.whole = (1L << precission);
this.half = whole >> 1;
this.quater = whole >> 2;
this.r = r;
this.c = new int[r.length];
this.d = new int[r.length];
this.d[0] = this.r[0];
this.c[0] = 0;
int bigR = r[0];
for(int i=1;i<r.length;i++){
for(int k=0;k<i;k++){
c[i] += r[k];
}
d[i] = c[i]+r[i];
bigR += r[i];
}
this.R = bigR;
}
public static long sOnes(long s) {
return (1 << s) - 1;
}
public void emitZeroAndSOnes(long s) throws IOException {
// System.out.print(0);
e.emit(0);
while (s > 0) {
// System.out.print(1);
e.emit(1);
s--;
}
}
private void emitOneAndSZeros(long s) throws IOException {
// System.out.print(1);
e.emit(1);
while (s > 0) {
// System.out.print(0);
e.emit(0);
s--;
}
}
public void encode(List<Integer> input) throws IOException {
e = new Emitter();
long a = 0L;
long b = whole;
long s = 0;
for (int index = 0; index < input.size(); index++) {
long omega = b - a;
b = a + Math.round((omega * d[input.get(index)]) / (R*1.0));
a = a + Math.round((omega * c[input.get(index)]) / (R*1.0));
while (b < half || a > half) {
if (b < half) {
// emit 0 and s 1's
// result = result << (s+1) | sOnes(result);
emitZeroAndSOnes(s);
s = 0;
a = 2*a; // a=2a
b = 2*b; // b=2b
} else if (a > half) {
// emit 1 and s 0's
// result = (result<<1 | 0x01)<<s;
emitOneAndSZeros(s);
s = 0;
a = 2*(a - half);
b = 2*(b - half);
}
}
while (a > quater && b < 3 * quater) {
s++;
a = 2*(a - quater);
b = 2*(b - quater);
}
}
s++;
if (a <= quater) {
// emit 0 and s 1's
// result = result << (s + 1) | sOnes(result);
emitZeroAndSOnes(s);
} else {
// emit 1 and s 0's
emitOneAndSZeros(s);
// result = (result << 1 | 0x01) << s;
}
}
}
public static class ArithmeticDecoder {
public final long precission;
public final long whole;
public final long half;
public final long quater;
public final int[] r; // R = 13
public final int[] c; // probability borders, c[0]=0, c[i] = r[0]+...+r[i-1]
public final int[] d; // probablility gap sizes, d[i] = c[i]+r[i]
public final int R;
public List<Integer> data;
public ArithmeticDecoder(int precission, int[] r){
this.precission = precission;
this.whole = (1L << precission);
this.half = whole >> 1;
this.quater = whole >> 2;
this.r = r;
this.c = new int[r.length];
this.d = new int[r.length];
this.d[0] = this.r[0];
this.c[0] = 0;
int bigR = r[0];
for(int i=1;i<r.length;i++){
for(int k=0;k<i;k++){
c[i] += r[k];
}
d[i] = c[i]+r[i];
bigR += r[i];
}
this.R = bigR;
}
public void decode(byte[] bs) {
data = new ArrayList<Integer>();
long a = 0;
long b = whole;
long z = 0;
long i = 0;
while (i < precission && i < bs.length * 8) {
if (BooleanArithmeticDecoder.getBitInBytes(bs, (int)i) != 0x00) {
z += (1L << (precission - i - 1));
}
i++;
}
while (true) {
for (int j = 0; j < 5; j++) {
long omega = b - a;
long bzero = a + Math.round((omega * d[j]) / (R*1.0));
long azero = a + Math.round((omega * c[j]) / (R*1.0));
if (azero <= z && z < bzero) {
data.add(j);
a = azero;
b = bzero;
if (j == 0) {
return;
}
break;
}
}
while (b < half || a > half) {
if (b < half) {
a = 2*a;
b = 2*b;
z = 2*z;
} else if (a > half) {
a = 2*(a - half);
b = 2*(b - half);
z = 2*(z - half);
}
if (i < (bs.length * 8)){
if (BooleanArithmeticDecoder.getBitInBytes(bs, (int)i) == 0x01)
z++;
i++;
}
}
while (a > quater && b < 3 * quater) {
a = (a - quater) << 1;
b = (b - quater) << 1;
z = (z - quater) << 1;
if (i < (bs.length * 8) ){
if (BooleanArithmeticDecoder.getBitInBytes(bs, (int)i) == 0x01)
z++;
i++;
}
}
}
}
}
@Test
public void testEmiter() throws Exception {
Emitter p = new Emitter();
Assert.assertArrayEquals(new byte[]{}, p.getArray());
p.emit(1);p.emit(1);p.emit(0);p.emit(1);p.emit(1);p.emit(0);
Assert.assertArrayEquals(new byte[]{(byte)0xD8}, p.getArray());
p = new Emitter();
// 01000011001
p.emit(0);p.emit(1);p.emit(0);p.emit(0);p.emit(0);p.emit(0);p.emit(1);p.emit(1);p.emit(0);p.emit(0);p.emit(1);
Assert.assertArrayEquals(new byte[]{0x43, 0x20}, p.getArray());
}
public static class Emitter{
private int i=0;
private byte buffer=0;
private ByteArrayOutputStream baos = new ByteArrayOutputStream();
public void emit(int b) throws IOException {
if (b != 1 && b != 0)
throw new IllegalArgumentException("Only 0's and 1's are accepted");
buffer |= b<<(7-i);
i++;
if (i>7){
i=0;
baos.write(new byte[]{buffer});
buffer=0;
}
}
public byte[] getArray() throws IOException {
if (i!=0){
i=0;
baos.write(new byte[]{buffer});
buffer=0;
}
return baos.toByteArray();
}
}
}
| |
/**
*
*/
package gov.nih.nci.cabig.caaers.web.security;
import gov.nih.nci.security.UserProvisioningManager;
import gov.nih.nci.security.authorization.domainobjects.Application;
import gov.nih.nci.security.authorization.domainobjects.ApplicationContext;
import gov.nih.nci.security.authorization.domainobjects.FilterClause;
import gov.nih.nci.security.authorization.domainobjects.Group;
import gov.nih.nci.security.authorization.domainobjects.InstanceLevelMappingElement;
import gov.nih.nci.security.authorization.domainobjects.Privilege;
import gov.nih.nci.security.authorization.domainobjects.ProtectionElement;
import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup;
import gov.nih.nci.security.authorization.domainobjects.Role;
import gov.nih.nci.security.authorization.domainobjects.User;
import gov.nih.nci.security.authorization.jaas.AccessPermission;
import gov.nih.nci.security.dao.SearchCriteria;
import gov.nih.nci.security.exceptions.CSDataAccessException;
import gov.nih.nci.security.exceptions.CSException;
import gov.nih.nci.security.exceptions.CSObjectNotFoundException;
import gov.nih.nci.security.exceptions.CSTransactionException;
import java.net.URL;
import java.security.Principal;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import javax.security.auth.Subject;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a>
*
*/
public class MockUserProvisioningManagerAdapter implements UserProvisioningManager {
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignGroupRoleToProtectionGroup(java.lang.String,
* java.lang.String, java.lang.String[])
*/
public void assignGroupRoleToProtectionGroup(String arg0, String arg1, String[] arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignGroupsToUser(java.lang.String,
* java.lang.String[])
*/
public void assignGroupsToUser(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignOwners(java.lang.String,
* java.lang.String[])
*/
public void assignOwners(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignParentProtectionGroup(java.lang.String,
* java.lang.String)
*/
public void assignParentProtectionGroup(String arg0, String arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignPrivilegesToRole(java.lang.String,
* java.lang.String[])
*/
public void assignPrivilegesToRole(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignProtectionElements(java.lang.String,
* java.lang.String[])
*/
public void assignProtectionElements(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignToProtectionGroups(java.lang.String,
* java.lang.String[])
*/
public void assignToProtectionGroups(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignUserRoleToProtectionGroup(java.lang.String,
* java.lang.String[], java.lang.String)
*/
public void assignUserRoleToProtectionGroup(String arg0, String[] arg1, String arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignUserToGroup(java.lang.String,
* java.lang.String)
*/
public void assignUserToGroup(String arg0, String arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#assignUsersToGroup(java.lang.String,
* java.lang.String[])
*/
public void assignUsersToGroup(String arg0, String[] arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createApplication(gov.nih.nci.security.authorization.domainobjects.Application)
*/
public void createApplication(Application arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createGroup(gov.nih.nci.security.authorization.domainobjects.Group)
*/
public void createGroup(Group arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createPrivilege(gov.nih.nci.security.authorization.domainobjects.Privilege)
*/
public void createPrivilege(Privilege arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createProtectionGroup(gov.nih.nci.security.authorization.domainobjects.ProtectionGroup)
*/
public void createProtectionGroup(ProtectionGroup arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createRole(gov.nih.nci.security.authorization.domainobjects.Role)
*/
public void createRole(Role arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#createUser(gov.nih.nci.security.authorization.domainobjects.User)
*/
public void createUser(User arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getApplicationById(java.lang.String)
*/
public Application getApplicationById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getGroupById(java.lang.String)
*/
public Group getGroupById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getGroups(java.lang.String)
*/
public Set getGroups(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getObjects(gov.nih.nci.security.dao.SearchCriteria)
*/
public List getObjects(SearchCriteria arg0) {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getOwners(java.lang.String)
*/
public Set getOwners(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getPrivilegeById(java.lang.String)
*/
public Privilege getPrivilegeById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getPrivileges(java.lang.String)
*/
public Set getPrivileges(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionElementPrivilegeContextForGroup(java.lang.String)
*/
public Set getProtectionElementPrivilegeContextForGroup(String arg0)
throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionElementPrivilegeContextForUser(java.lang.String)
*/
public Set getProtectionElementPrivilegeContextForUser(String arg0)
throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionElements(java.lang.String)
*/
public Set getProtectionElements(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionGroupById(java.lang.String)
*/
public ProtectionGroup getProtectionGroupById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionGroupRoleContextForGroup(java.lang.String)
*/
public Set getProtectionGroupRoleContextForGroup(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getProtectionGroupRoleContextForUser(java.lang.String)
*/
public Set getProtectionGroupRoleContextForUser(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getRoleById(java.lang.String)
*/
public Role getRoleById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getUserById(java.lang.String)
*/
public User getUserById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#getUsers(java.lang.String)
*/
public Set getUsers(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyApplication(gov.nih.nci.security.authorization.domainobjects.Application)
*/
public void modifyApplication(Application arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyGroup(gov.nih.nci.security.authorization.domainobjects.Group)
*/
public void modifyGroup(Group arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyPrivilege(gov.nih.nci.security.authorization.domainobjects.Privilege)
*/
public void modifyPrivilege(Privilege arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyProtectionElement(gov.nih.nci.security.authorization.domainobjects.ProtectionElement)
*/
public void modifyProtectionElement(ProtectionElement arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyProtectionGroup(gov.nih.nci.security.authorization.domainobjects.ProtectionGroup)
*/
public void modifyProtectionGroup(ProtectionGroup arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyRole(gov.nih.nci.security.authorization.domainobjects.Role)
*/
public void modifyRole(Role arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#modifyUser(gov.nih.nci.security.authorization.domainobjects.User)
*/
public void modifyUser(User arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeApplication(java.lang.String)
*/
public void removeApplication(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeGroup(java.lang.String)
*/
public void removeGroup(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeGroupFromProtectionGroup(java.lang.String,
* java.lang.String)
*/
public void removeGroupFromProtectionGroup(String arg0, String arg1)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeGroupRoleFromProtectionGroup(java.lang.String,
* java.lang.String, java.lang.String[])
*/
public void removeGroupRoleFromProtectionGroup(String arg0, String arg1, String[] arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removePrivilege(java.lang.String)
*/
public void removePrivilege(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeProtectionElement(java.lang.String)
*/
public void removeProtectionElement(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeProtectionElementsFromProtectionGroup(java.lang.String,
* java.lang.String[])
*/
public void removeProtectionElementsFromProtectionGroup(String arg0, String[] arg1)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeProtectionGroup(java.lang.String)
*/
public void removeProtectionGroup(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeRole(java.lang.String)
*/
public void removeRole(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeUser(java.lang.String)
*/
public void removeUser(String arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeUserFromGroup(java.lang.String,
* java.lang.String)
*/
public void removeUserFromGroup(String arg0, String arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeUserFromProtectionGroup(java.lang.String,
* java.lang.String)
*/
public void removeUserFromProtectionGroup(String arg0, String arg1)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.UserProvisioningManager#removeUserRoleFromProtectionGroup(java.lang.String,
* java.lang.String, java.lang.String[])
*/
public void removeUserRoleFromProtectionGroup(String arg0, String arg1, String[] arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#assignProtectionElement(java.lang.String,
* java.lang.String)
*/
public void assignProtectionElement(String arg0, String arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#assignProtectionElement(java.lang.String,
* java.lang.String, java.lang.String)
*/
public void assignProtectionElement(String arg0, String arg1, String arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkOwnership(java.lang.String,
* java.lang.String)
*/
public boolean checkOwnership(String arg0, String arg1) {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermission(gov.nih.nci.security.authorization.jaas.AccessPermission,
* javax.security.auth.Subject)
*/
public boolean checkPermission(AccessPermission arg0, Subject arg1) throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermission(gov.nih.nci.security.authorization.jaas.AccessPermission,
* java.lang.String)
*/
public boolean checkPermission(AccessPermission arg0, String arg1) throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermission(java.lang.String,
* java.lang.String, java.lang.String)
*/
public boolean checkPermission(String arg0, String arg1, String arg2) throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermission(java.lang.String,
* java.lang.String, java.lang.String, java.lang.String)
*/
public boolean checkPermission(String arg0, String arg1, String arg2, String arg3)
throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermissionForGroup(java.lang.String,
* java.lang.String, java.lang.String)
*/
public boolean checkPermissionForGroup(String arg0, String arg1, String arg2)
throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#checkPermissionForGroup(java.lang.String,
* java.lang.String, java.lang.String, java.lang.String)
*/
public boolean checkPermissionForGroup(String arg0, String arg1, String arg2, String arg3)
throws CSException {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#createProtectionElement(gov.nih.nci.security.authorization.domainobjects.ProtectionElement)
*/
public void createProtectionElement(ProtectionElement arg0) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#deAssignProtectionElements(java.lang.String,
* java.lang.String)
*/
public void deAssignProtectionElements(String arg0, String arg1) throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getAccessibleGroups(java.lang.String,
* java.lang.String)
*/
public List getAccessibleGroups(String arg0, String arg1) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getAccessibleGroups(java.lang.String,
* java.lang.String, java.lang.String)
*/
public List getAccessibleGroups(String arg0, String arg1, String arg2) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getApplication(java.lang.String)
*/
public Application getApplication(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getApplicationContext()
*/
public ApplicationContext getApplicationContext() {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getPrincipals(java.lang.String)
*/
public Principal[] getPrincipals(String arg0) {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getPrivilegeMap(java.lang.String,
* java.util.Collection)
*/
public Collection getPrivilegeMap(String arg0, Collection arg1) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getProtectionElement(java.lang.String)
*/
public ProtectionElement getProtectionElement(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getProtectionElement(java.lang.String,
* java.lang.String)
*/
public ProtectionElement getProtectionElement(String arg0, String arg1) {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getProtectionElementById(java.lang.String)
*/
public ProtectionElement getProtectionElementById(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getProtectionGroups()
*/
public List getProtectionGroups() {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getProtectionGroups(java.lang.String)
*/
public Set getProtectionGroups(String arg0) throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#getUser(java.lang.String)
*/
public User getUser(String arg0) {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#initialize(java.lang.String)
*/
public void initialize(String arg0) {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#removeOwnerForProtectionElement(java.lang.String,
* java.lang.String[])
*/
public void removeOwnerForProtectionElement(String arg0, String[] arg1)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#removeOwnerForProtectionElement(java.lang.String,
* java.lang.String, java.lang.String)
*/
public void removeOwnerForProtectionElement(String arg0, String arg1, String arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#secureCollection(java.lang.String,
* java.util.Collection)
*/
public Collection secureCollection(String arg0, Collection arg1) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#secureObject(java.lang.String,
* java.lang.Object)
*/
public Object secureObject(String arg0, Object arg1) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#secureUpdate(java.lang.String,
* java.lang.Object, java.lang.Object)
*/
public Object secureUpdate(String arg0, Object arg1, Object arg2) throws CSException {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#setAuditUserInfo(java.lang.String,
* java.lang.String)
*/
public void setAuditUserInfo(String arg0, String arg1) {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#setEncryptionEnabled(boolean)
*/
public void setEncryptionEnabled(boolean arg0) {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#setOwnerForProtectionElement(java.lang.String,
* java.lang.String[])
*/
public void setOwnerForProtectionElement(String arg0, String[] arg1)
throws CSTransactionException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.security.AuthorizationManager#setOwnerForProtectionElement(java.lang.String,
* java.lang.String, java.lang.String)
*/
public void setOwnerForProtectionElement(String arg0, String arg1, String arg2)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addGroupRoleToProtectionGroup(String protectionGroupId,
String groupId, String[] rolesId) throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addGroupsToUser(String userId, String[] groupIds)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addOwners(String protectionElementId, String[] userIds)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addPrivilegesToRole(String roleId, String[] privilegeIds)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addProtectionElements(String protectionGroupId,
String[] protectionElementIds) throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addToProtectionGroups(String protectionElementId,
String[] protectionGroupIds) throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addUserRoleToProtectionGroup(String userId, String[] rolesId,
String protectionGroupId) throws CSTransactionException {
// TODO Auto-generated method stub
}
public void addUsersToGroup(String groupId, String[] userIds)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public boolean checkPermission(String userName, String objectId,
String attributeName, String attributeValue, String privilegeName)
throws CSException {
// TODO Auto-generated method stub
return false;
}
public boolean checkPermissionForGroup(String groupName, String objectId,
String attributeName, String attributeValue, String privilegeName)
throws CSException {
// TODO Auto-generated method stub
return false;
}
public void createFilterClause(FilterClause filterClause)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void createInstanceLevelMappingElement(
InstanceLevelMappingElement instanceLevelMappingElement)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public List getAttributeMap(String userName, String className,
String privilegeName) {
// TODO Auto-generated method stub
return null;
}
public List getAttributeMapForGroup(String groupName, String className,
String privilegeName) {
// TODO Auto-generated method stub
return null;
}
public FilterClause getFilterClauseById(String filterClauseId)
throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
public InstanceLevelMappingElement getInstanceLevelMappingElementById(
String instanceLevelMappingElementId)
throws CSObjectNotFoundException {
// TODO Auto-generated method stub
return null;
}
public void initialize(String applicationContextName, URL url) {
// TODO Auto-generated method stub
}
public void maintainInstanceTables(String instanceLevelMappingElementId)
throws CSObjectNotFoundException, CSDataAccessException {
// TODO Auto-generated method stub
}
public void modifyFilterClause(FilterClause filterClause)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void modifyInstanceLevelMappingElement(
InstanceLevelMappingElement instanceLevelMappingElement)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void refreshInstanceTables(boolean instanceLevelSecurityForUser)
throws CSObjectNotFoundException, CSDataAccessException {
// TODO Auto-generated method stub
}
public void removeFilterClause(String filterClauseId)
throws CSTransactionException {
// TODO Auto-generated method stub
}
public void removeInstanceLevelMappingElement(
String instanceLevelMappingElementId) throws CSTransactionException {
// TODO Auto-generated method stub
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.deduplication.hibernate;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.util.List;
import org.hisp.dhis.IntegrationTestBase;
import org.hisp.dhis.deduplication.PotentialDuplicateStore;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.trackedentity.TrackedEntityAttribute;
import org.hisp.dhis.trackedentity.TrackedEntityAttributeService;
import org.hisp.dhis.trackedentity.TrackedEntityInstance;
import org.hisp.dhis.trackedentity.TrackedEntityInstanceService;
import org.hisp.dhis.trackedentityattributevalue.TrackedEntityAttributeValueService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.collect.Lists;
@Disabled( "moveAttributes method do not really belong to a store now. We should a better place for it" )
class PotentialDuplicateStoreTEAVTest extends IntegrationTestBase
{
@Autowired
private PotentialDuplicateStore potentialDuplicateStore;
@Autowired
private TrackedEntityInstanceService trackedEntityInstanceService;
@Autowired
private TrackedEntityAttributeService trackedEntityAttributeService;
@Autowired
private TrackedEntityAttributeValueService trackedEntityAttributeValueService;
@Autowired
private OrganisationUnitService organisationUnitService;
private TrackedEntityInstance original;
private TrackedEntityInstance duplicate;
private TrackedEntityInstance control;
private TrackedEntityAttribute trackedEntityAttributeA;
private TrackedEntityAttribute trackedEntityAttributeB;
private TrackedEntityAttribute trackedEntityAttributeC;
private TrackedEntityAttribute trackedEntityAttributeD;
private TrackedEntityAttribute trackedEntityAttributeE;
@BeforeEach
void setupTest()
{
OrganisationUnit ou = createOrganisationUnit( "OU_A" );
organisationUnitService.addOrganisationUnit( ou );
original = createTrackedEntityInstance( ou );
duplicate = createTrackedEntityInstance( ou );
control = createTrackedEntityInstance( ou );
trackedEntityInstanceService.addTrackedEntityInstance( original );
trackedEntityInstanceService.addTrackedEntityInstance( duplicate );
trackedEntityInstanceService.addTrackedEntityInstance( control );
trackedEntityAttributeA = createTrackedEntityAttribute( 'A' );
trackedEntityAttributeB = createTrackedEntityAttribute( 'B' );
trackedEntityAttributeC = createTrackedEntityAttribute( 'C' );
trackedEntityAttributeD = createTrackedEntityAttribute( 'D' );
trackedEntityAttributeE = createTrackedEntityAttribute( 'E' );
trackedEntityAttributeService.addTrackedEntityAttribute( trackedEntityAttributeA );
trackedEntityAttributeService.addTrackedEntityAttribute( trackedEntityAttributeB );
trackedEntityAttributeService.addTrackedEntityAttribute( trackedEntityAttributeC );
trackedEntityAttributeService.addTrackedEntityAttribute( trackedEntityAttributeD );
trackedEntityAttributeService.addTrackedEntityAttribute( trackedEntityAttributeE );
original.addAttributeValue( createTrackedEntityAttributeValue( 'A', original, trackedEntityAttributeA ) );
original.addAttributeValue( createTrackedEntityAttributeValue( 'A', original, trackedEntityAttributeB ) );
original.addAttributeValue( createTrackedEntityAttributeValue( 'A', original, trackedEntityAttributeC ) );
duplicate.addAttributeValue( createTrackedEntityAttributeValue( 'B', duplicate, trackedEntityAttributeA ) );
duplicate.addAttributeValue( createTrackedEntityAttributeValue( 'B', duplicate, trackedEntityAttributeB ) );
duplicate.addAttributeValue( createTrackedEntityAttributeValue( 'B', duplicate, trackedEntityAttributeC ) );
duplicate.addAttributeValue( createTrackedEntityAttributeValue( 'B', duplicate, trackedEntityAttributeD ) );
duplicate.addAttributeValue( createTrackedEntityAttributeValue( 'B', duplicate, trackedEntityAttributeE ) );
control.addAttributeValue( createTrackedEntityAttributeValue( 'C', control, trackedEntityAttributeA ) );
control.addAttributeValue( createTrackedEntityAttributeValue( 'C', control, trackedEntityAttributeB ) );
control.addAttributeValue( createTrackedEntityAttributeValue( 'C', control, trackedEntityAttributeC ) );
original.getTrackedEntityAttributeValues()
.forEach( trackedEntityAttributeValueService::addTrackedEntityAttributeValue );
duplicate.getTrackedEntityAttributeValues()
.forEach( trackedEntityAttributeValueService::addTrackedEntityAttributeValue );
control.getTrackedEntityAttributeValues()
.forEach( trackedEntityAttributeValueService::addTrackedEntityAttributeValue );
}
@Test
void moveTrackedEntityAttributeValuesSingleTea()
{
List<String> teas = Lists.newArrayList( trackedEntityAttributeA.getUid() );
transactionTemplate.execute( status -> {
potentialDuplicateStore.moveTrackedEntityAttributeValues( original, duplicate, teas );
return null;
} );
transactionTemplate.execute( status -> {
// Clear the session so we get new data from the DB for the next
// queries.
dbmsManager.clearSession();
TrackedEntityInstance _original = trackedEntityInstanceService
.getTrackedEntityInstance( original.getUid() );
TrackedEntityInstance _duplicate = trackedEntityInstanceService
.getTrackedEntityInstance( duplicate.getUid() );
assertNotNull( _original );
assertNotNull( _duplicate );
assertEquals( 3, _original.getTrackedEntityAttributeValues().size() );
assertEquals( 4, _duplicate.getTrackedEntityAttributeValues().size() );
_original.getTrackedEntityAttributeValues().forEach( teav -> {
if ( teas.contains( teav.getAttribute().getUid() ) )
{
assertEquals( "AttributeB", teav.getValue() );
}
else
{
assertEquals( "AttributeA", teav.getValue() );
}
} );
TrackedEntityInstance _control = trackedEntityInstanceService.getTrackedEntityInstance( control.getUid() );
assertNotNull( _control );
assertEquals( 3, _control.getTrackedEntityAttributeValues().size() );
return null;
} );
}
@Test
void moveTrackedEntityAttributeValuesMultipleTeas()
{
List<String> teas = Lists.newArrayList( trackedEntityAttributeA.getUid(), trackedEntityAttributeB.getUid() );
transactionTemplate.execute( status -> {
potentialDuplicateStore.moveTrackedEntityAttributeValues( original, duplicate, teas );
return null;
} );
transactionTemplate.execute( status -> {
// Clear the session so we get new data from the DB for the next
// queries.
dbmsManager.clearSession();
TrackedEntityInstance _original = trackedEntityInstanceService
.getTrackedEntityInstance( original.getUid() );
TrackedEntityInstance _duplicate = trackedEntityInstanceService
.getTrackedEntityInstance( duplicate.getUid() );
assertNotNull( _original );
assertNotNull( _duplicate );
assertEquals( 3, _original.getTrackedEntityAttributeValues().size() );
assertEquals( 3, _duplicate.getTrackedEntityAttributeValues().size() );
_original.getTrackedEntityAttributeValues().forEach( teav -> {
if ( teas.contains( teav.getAttribute().getUid() ) )
{
assertEquals( "AttributeB", teav.getValue() );
}
else
{
assertEquals( "AttributeA", teav.getValue() );
}
} );
TrackedEntityInstance _control = trackedEntityInstanceService.getTrackedEntityInstance( control.getUid() );
assertNotNull( _control );
assertEquals( 3, _control.getTrackedEntityAttributeValues().size() );
return null;
} );
}
@Test
void moveTrackedEntityAttributeValuesByOverwritingAndCreatingNew()
{
List<String> teas = Lists.newArrayList( trackedEntityAttributeD.getUid(), trackedEntityAttributeB.getUid() );
transactionTemplate.execute( status -> {
potentialDuplicateStore.moveTrackedEntityAttributeValues( original, duplicate, teas );
return null;
} );
transactionTemplate.execute( status -> {
// Clear the session so we get new data from the DB for the next
// queries.
dbmsManager.clearSession();
TrackedEntityInstance _original = trackedEntityInstanceService
.getTrackedEntityInstance( original.getUid() );
TrackedEntityInstance _duplicate = trackedEntityInstanceService
.getTrackedEntityInstance( duplicate.getUid() );
assertNotNull( _original );
assertNotNull( _duplicate );
assertEquals( 4, _original.getTrackedEntityAttributeValues().size() );
assertEquals( 3, _duplicate.getTrackedEntityAttributeValues().size() );
_original.getTrackedEntityAttributeValues().forEach( teav -> {
if ( teas.contains( teav.getAttribute().getUid() ) )
{
assertEquals( "AttributeB", teav.getValue() );
}
else
{
assertEquals( "AttributeA", teav.getValue() );
}
} );
TrackedEntityInstance _control = trackedEntityInstanceService.getTrackedEntityInstance( control.getUid() );
assertNotNull( _control );
assertEquals( 3, _control.getTrackedEntityAttributeValues().size() );
return null;
} );
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.benchmark.search.aggregations;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import com.google.common.collect.Maps;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.SizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.node.Node;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.client.Requests.getRequest;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
public class PercentilesAggregationSearchBenchmark {
private static final int AMPLITUDE = 10000;
private static final int NUM_DOCS = (int) SizeValue.parseSizeValue("1m").singles();
private static final int BATCH = 100;
private static final String CLUSTER_NAME = PercentilesAggregationSearchBenchmark.class.getSimpleName();
private static final double[] PERCENTILES = new double[] { 0, 0.01, 0.1, 1, 10, 25, 50, 75, 90, 99, 99.9, 99.99, 100};
private static final int QUERY_WARMUP = 10;
private static final int QUERY_COUNT = 20;
private static Random R = new Random(0);
// we generate ints to not disadvantage qdigest which only works with integers
private enum Distribution {
UNIFORM {
@Override
int next() {
return (int) (R.nextDouble() * AMPLITUDE);
}
},
GAUSS {
@Override
int next() {
return (int) (R.nextDouble() * AMPLITUDE);
}
},
LOG_NORMAL {
@Override
int next() {
return (int) Math.exp(R.nextDouble() * Math.log(AMPLITUDE));
}
};
String indexName() {
return name().toLowerCase(Locale.ROOT);
}
abstract int next();
}
private static double accuratePercentile(double percentile, int[] sortedValues) {
final double index = percentile / 100 * (sortedValues.length - 1);
final int intIndex = (int) index;
final double delta = index - intIndex;
if (delta == 0) {
return sortedValues[intIndex];
} else {
return sortedValues[intIndex] * (1 - delta) + sortedValues[intIndex + 1] * delta;
}
}
public static void main(String[] args) throws Exception {
Settings settings = settingsBuilder()
.put("index.refresh_interval", "-1")
.put(SETTING_NUMBER_OF_SHARDS, 100) // to also test performance and accuracy of the reduce phase
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.build();
Node[] nodes = new Node[1];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = nodeBuilder().clusterName(CLUSTER_NAME)
.settings(settingsBuilder().put(settings).put("name", "node" + i))
.node();
}
Node clientNode = nodeBuilder()
.clusterName(CLUSTER_NAME)
.settings(settingsBuilder().put(settings).put("name", "client")).client(true).node();
Client client = clientNode.client();
for (Distribution d : Distribution.values()) {
try {
// client.admin().indices().prepareDelete(d.indexName()).execute().actionGet();
client.admin().indices().create(createIndexRequest(d.indexName()).settings(settings)).actionGet();
} catch (Exception e) {
System.out.println("Index " + d.indexName() + " already exists, skipping index creation");
continue;
}
final int[] values = new int[NUM_DOCS];
for (int i = 0; i < NUM_DOCS; ++i) {
values[i] = d.next();
}
System.out.println("Indexing " + NUM_DOCS + " documents into " + d.indexName());
StopWatch stopWatch = new StopWatch().start();
for (int i = 0; i < NUM_DOCS; ) {
BulkRequestBuilder request = client.prepareBulk();
for (int j = 0; j < BATCH && i < NUM_DOCS; ++j) {
request.add(client.prepareIndex(d.indexName(), "values", Integer.toString(i)).setSource("v", values[i]));
++i;
}
BulkResponse response = request.execute().actionGet();
if (response.hasFailures()) {
System.err.println("--> failures...");
System.err.println(response.buildFailureMessage());
}
if ((i % 100000) == 0) {
System.out.println("--> Indexed " + i + " took " + stopWatch.stop().lastTaskTime());
stopWatch.start();
}
}
Arrays.sort(values);
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
for (double percentile : PERCENTILES) {
builder.field(Double.toString(percentile), accuratePercentile(percentile, values));
}
client.prepareIndex(d.indexName(), "values", "percentiles").setSource(builder.endObject()).execute().actionGet();
client.admin().indices().prepareRefresh(d.indexName()).execute().actionGet();
}
ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("10m").execute().actionGet();
if (clusterHealthResponse.isTimedOut()) {
System.err.println("--> Timed out waiting for cluster health");
}
System.out.println("## Precision");
for (Distribution d : Distribution.values()) {
System.out.println("#### " + d);
final long count = client.prepareCount(d.indexName()).setQuery(matchAllQuery()).execute().actionGet().getCount();
if (count != NUM_DOCS + 1) {
throw new Error("Expected " + NUM_DOCS + " documents, got " + (count - 1));
}
Map<String, Object> percentilesUnsorted = client.get(getRequest(d.indexName()).type("values").id("percentiles")).actionGet().getSourceAsMap();
SortedMap<Double, Double> percentiles = Maps.newTreeMap();
for (Map.Entry<String, Object> entry : percentilesUnsorted.entrySet()) {
percentiles.put(Double.parseDouble(entry.getKey()), (Double) entry.getValue());
}
System.out.println("Expected percentiles: " + percentiles);
System.out.println();
SearchResponse resp = client.prepareSearch(d.indexName()).setSearchType(SearchType.COUNT).addAggregation(percentiles("pcts").field("v").percentiles(PERCENTILES)).execute().actionGet();
Percentiles pcts = resp.getAggregations().get("pcts");
Map<Double, Double> asMap = Maps.newLinkedHashMap();
double sumOfErrorSquares = 0;
for (Percentile percentile : pcts) {
asMap.put(percentile.getPercent(), percentile.getValue());
double error = percentile.getValue() - percentiles.get(percentile.getPercent());
sumOfErrorSquares += error * error;
}
System.out.println("Percentiles: " + asMap);
System.out.println("Sum of error squares: " + sumOfErrorSquares);
System.out.println();
}
System.out.println("## Performance");
for (int i = 0; i < 3; ++i) {
for (Distribution d : Distribution.values()) {
System.out.println("#### " + d);
for (int j = 0; j < QUERY_WARMUP; ++j) {
client.prepareSearch(d.indexName()).setSearchType(SearchType.COUNT).addAggregation(percentiles("pcts").field("v").percentiles(PERCENTILES)).execute().actionGet();
}
long start = System.nanoTime();
for (int j = 0; j < QUERY_COUNT; ++j) {
client.prepareSearch(d.indexName()).setSearchType(SearchType.COUNT).addAggregation(percentiles("pcts").field("v").percentiles(PERCENTILES)).execute().actionGet();
}
System.out.println(new TimeValue((System.nanoTime() - start) / QUERY_COUNT, TimeUnit.NANOSECONDS));
}
}
}
}
| |
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.event;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.UndeclaredThrowableException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.StringJoiner;
import java.util.concurrent.CompletionStage;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import org.springframework.aop.support.AopUtils;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.PayloadApplicationEvent;
import org.springframework.context.expression.AnnotatedElementKey;
import org.springframework.core.BridgeMethodResolver;
import org.springframework.core.Ordered;
import org.springframework.core.ReactiveAdapter;
import org.springframework.core.ReactiveAdapterRegistry;
import org.springframework.core.ResolvableType;
import org.springframework.core.annotation.AnnotatedElementUtils;
import org.springframework.core.annotation.Order;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.util.concurrent.ListenableFuture;
/**
* {@link GenericApplicationListener} adapter that delegates the processing of
* an event to an {@link EventListener} annotated method.
*
* <p>Delegates to {@link #processEvent(ApplicationEvent)} to give subclasses
* a chance to deviate from the default. Unwraps the content of a
* {@link PayloadApplicationEvent} if necessary to allow a method declaration
* to define any arbitrary event type. If a condition is defined, it is
* evaluated prior to invoking the underlying method.
*
* @author Stephane Nicoll
* @author Juergen Hoeller
* @author Sam Brannen
* @since 4.2
*/
public class ApplicationListenerMethodAdapter implements GenericApplicationListener {
private static final boolean reactiveStreamsPresent = ClassUtils.isPresent(
"org.reactivestreams.Publisher", ApplicationListenerMethodAdapter.class.getClassLoader());
protected final Log logger = LogFactory.getLog(getClass());
private final String beanName;
private final Method method;
private final Method targetMethod;
private final AnnotatedElementKey methodKey;
private final List<ResolvableType> declaredEventTypes;
@Nullable
private final String condition;
private final int order;
@Nullable
private volatile String listenerId;
@Nullable
private ApplicationContext applicationContext;
@Nullable
private EventExpressionEvaluator evaluator;
/**
* Construct a new ApplicationListenerMethodAdapter.
* @param beanName the name of the bean to invoke the listener method on
* @param targetClass the target class that the method is declared on
* @param method the listener method to invoke
*/
public ApplicationListenerMethodAdapter(String beanName, Class<?> targetClass, Method method) {
this.beanName = beanName;
this.method = BridgeMethodResolver.findBridgedMethod(method);
this.targetMethod = (!Proxy.isProxyClass(targetClass) ?
AopUtils.getMostSpecificMethod(method, targetClass) : this.method);
this.methodKey = new AnnotatedElementKey(this.targetMethod, targetClass);
EventListener ann = AnnotatedElementUtils.findMergedAnnotation(this.targetMethod, EventListener.class);
this.declaredEventTypes = resolveDeclaredEventTypes(method, ann);
this.condition = (ann != null ? ann.condition() : null);
this.order = resolveOrder(this.targetMethod);
String id = (ann != null ? ann.id() : "");
this.listenerId = (!id.isEmpty() ? id : null);
}
private static List<ResolvableType> resolveDeclaredEventTypes(Method method, @Nullable EventListener ann) {
int count = method.getParameterCount();
if (count > 1) {
throw new IllegalStateException(
"Maximum one parameter is allowed for event listener method: " + method);
}
if (ann != null) {
Class<?>[] classes = ann.classes();
if (classes.length > 0) {
List<ResolvableType> types = new ArrayList<>(classes.length);
for (Class<?> eventType : classes) {
types.add(ResolvableType.forClass(eventType));
}
return types;
}
}
if (count == 0) {
throw new IllegalStateException(
"Event parameter is mandatory for event listener method: " + method);
}
return Collections.singletonList(ResolvableType.forMethodParameter(method, 0));
}
private static int resolveOrder(Method method) {
Order ann = AnnotatedElementUtils.findMergedAnnotation(method, Order.class);
return (ann != null ? ann.value() : Ordered.LOWEST_PRECEDENCE);
}
/**
* Initialize this instance.
*/
void init(ApplicationContext applicationContext, @Nullable EventExpressionEvaluator evaluator) {
this.applicationContext = applicationContext;
this.evaluator = evaluator;
}
@Override
public void onApplicationEvent(ApplicationEvent event) {
processEvent(event);
}
@Override
public boolean supportsEventType(ResolvableType eventType) {
for (ResolvableType declaredEventType : this.declaredEventTypes) {
if (declaredEventType.isAssignableFrom(eventType)) {
return true;
}
if (PayloadApplicationEvent.class.isAssignableFrom(eventType.toClass())) {
ResolvableType payloadType = eventType.as(PayloadApplicationEvent.class).getGeneric();
if (declaredEventType.isAssignableFrom(payloadType)) {
return true;
}
}
}
return eventType.hasUnresolvableGenerics();
}
@Override
public boolean supportsSourceType(@Nullable Class<?> sourceType) {
return true;
}
@Override
public int getOrder() {
return this.order;
}
@Override
public String getListenerId() {
String id = this.listenerId;
if (id == null) {
id = getDefaultListenerId();
this.listenerId = id;
}
return id;
}
/**
* Determine the default id for the target listener, to be applied in case of
* no {@link EventListener#id() annotation-specified id value}.
* <p>The default implementation builds a method name with parameter types.
* @since 5.3.5
* @see #getListenerId()
*/
protected String getDefaultListenerId() {
Method method = getTargetMethod();
StringJoiner sj = new StringJoiner(",", "(", ")");
for (Class<?> paramType : method.getParameterTypes()) {
sj.add(paramType.getName());
}
return ClassUtils.getQualifiedMethodName(method) + sj.toString();
}
/**
* Process the specified {@link ApplicationEvent}, checking if the condition
* matches and handling a non-null result, if any.
*/
public void processEvent(ApplicationEvent event) {
Object[] args = resolveArguments(event);
if (shouldHandle(event, args)) {
Object result = doInvoke(args);
if (result != null) {
handleResult(result);
}
else {
logger.trace("No result object given - no result to handle");
}
}
}
/**
* Resolve the method arguments to use for the specified {@link ApplicationEvent}.
* <p>These arguments will be used to invoke the method handled by this instance.
* Can return {@code null} to indicate that no suitable arguments could be resolved
* and therefore the method should not be invoked at all for the specified event.
*/
@Nullable
protected Object[] resolveArguments(ApplicationEvent event) {
ResolvableType declaredEventType = getResolvableType(event);
if (declaredEventType == null) {
return null;
}
if (this.method.getParameterCount() == 0) {
return new Object[0];
}
Class<?> declaredEventClass = declaredEventType.toClass();
if (!ApplicationEvent.class.isAssignableFrom(declaredEventClass) &&
event instanceof PayloadApplicationEvent) {
Object payload = ((PayloadApplicationEvent<?>) event).getPayload();
if (declaredEventClass.isInstance(payload)) {
return new Object[] {payload};
}
}
return new Object[] {event};
}
protected void handleResult(Object result) {
if (reactiveStreamsPresent && new ReactiveResultHandler().subscribeToPublisher(result)) {
if (logger.isTraceEnabled()) {
logger.trace("Adapted to reactive result: " + result);
}
}
else if (result instanceof CompletionStage) {
((CompletionStage<?>) result).whenComplete((event, ex) -> {
if (ex != null) {
handleAsyncError(ex);
}
else if (event != null) {
publishEvent(event);
}
});
}
else if (result instanceof ListenableFuture) {
((ListenableFuture<?>) result).addCallback(this::publishEvents, this::handleAsyncError);
}
else {
publishEvents(result);
}
}
private void publishEvents(Object result) {
if (result.getClass().isArray()) {
Object[] events = ObjectUtils.toObjectArray(result);
for (Object event : events) {
publishEvent(event);
}
}
else if (result instanceof Collection<?> events) {
for (Object event : events) {
publishEvent(event);
}
}
else {
publishEvent(result);
}
}
private void publishEvent(@Nullable Object event) {
if (event != null) {
Assert.notNull(this.applicationContext, "ApplicationContext must not be null");
this.applicationContext.publishEvent(event);
}
}
protected void handleAsyncError(Throwable t) {
logger.error("Unexpected error occurred in asynchronous listener", t);
}
private boolean shouldHandle(ApplicationEvent event, @Nullable Object[] args) {
if (args == null) {
return false;
}
String condition = getCondition();
if (StringUtils.hasText(condition)) {
Assert.notNull(this.evaluator, "EventExpressionEvaluator must not be null");
return this.evaluator.condition(
condition, event, this.targetMethod, this.methodKey, args, this.applicationContext);
}
return true;
}
/**
* Invoke the event listener method with the given argument values.
*/
@Nullable
protected Object doInvoke(Object... args) {
Object bean = getTargetBean();
// Detect package-protected NullBean instance through equals(null) check
if (bean.equals(null)) {
return null;
}
ReflectionUtils.makeAccessible(this.method);
try {
return this.method.invoke(bean, args);
}
catch (IllegalArgumentException ex) {
assertTargetBean(this.method, bean, args);
throw new IllegalStateException(getInvocationErrorMessage(bean, ex.getMessage(), args), ex);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException(getInvocationErrorMessage(bean, ex.getMessage(), args), ex);
}
catch (InvocationTargetException ex) {
// Throw underlying exception
Throwable targetException = ex.getTargetException();
if (targetException instanceof RuntimeException) {
throw (RuntimeException) targetException;
}
else {
String msg = getInvocationErrorMessage(bean, "Failed to invoke event listener method", args);
throw new UndeclaredThrowableException(targetException, msg);
}
}
}
/**
* Return the target bean instance to use.
*/
protected Object getTargetBean() {
Assert.notNull(this.applicationContext, "ApplicationContext must no be null");
return this.applicationContext.getBean(this.beanName);
}
/**
* Return the target listener method.
* @since 5.3
*/
protected Method getTargetMethod() {
return this.targetMethod;
}
/**
* Return the condition to use.
* <p>Matches the {@code condition} attribute of the {@link EventListener}
* annotation or any matching attribute on a composed annotation that
* is meta-annotated with {@code @EventListener}.
*/
@Nullable
protected String getCondition() {
return this.condition;
}
/**
* Add additional details such as the bean type and method signature to
* the given error message.
* @param message error message to append the HandlerMethod details to
*/
protected String getDetailedErrorMessage(Object bean, String message) {
StringBuilder sb = new StringBuilder(message).append('\n');
sb.append("HandlerMethod details: \n");
sb.append("Bean [").append(bean.getClass().getName()).append("]\n");
sb.append("Method [").append(this.method.toGenericString()).append("]\n");
return sb.toString();
}
/**
* Assert that the target bean class is an instance of the class where the given
* method is declared. In some cases the actual bean instance at event-
* processing time may be a JDK dynamic proxy (lazy initialization, prototype
* beans, and others). Event listener beans that require proxying should prefer
* class-based proxy mechanisms.
*/
private void assertTargetBean(Method method, Object targetBean, Object[] args) {
Class<?> methodDeclaringClass = method.getDeclaringClass();
Class<?> targetBeanClass = targetBean.getClass();
if (!methodDeclaringClass.isAssignableFrom(targetBeanClass)) {
String msg = "The event listener method class '" + methodDeclaringClass.getName() +
"' is not an instance of the actual bean class '" +
targetBeanClass.getName() + "'. If the bean requires proxying " +
"(e.g. due to @Transactional), please use class-based proxying.";
throw new IllegalStateException(getInvocationErrorMessage(targetBean, msg, args));
}
}
private String getInvocationErrorMessage(Object bean, String message, Object[] resolvedArgs) {
StringBuilder sb = new StringBuilder(getDetailedErrorMessage(bean, message));
sb.append("Resolved arguments: \n");
for (int i = 0; i < resolvedArgs.length; i++) {
sb.append('[').append(i).append("] ");
if (resolvedArgs[i] == null) {
sb.append("[null] \n");
}
else {
sb.append("[type=").append(resolvedArgs[i].getClass().getName()).append("] ");
sb.append("[value=").append(resolvedArgs[i]).append("]\n");
}
}
return sb.toString();
}
@Nullable
private ResolvableType getResolvableType(ApplicationEvent event) {
ResolvableType payloadType = null;
if (event instanceof PayloadApplicationEvent) {
PayloadApplicationEvent<?> payloadEvent = (PayloadApplicationEvent<?>) event;
ResolvableType eventType = payloadEvent.getResolvableType();
if (eventType != null) {
payloadType = eventType.as(PayloadApplicationEvent.class).getGeneric();
}
}
for (ResolvableType declaredEventType : this.declaredEventTypes) {
Class<?> eventClass = declaredEventType.toClass();
if (!ApplicationEvent.class.isAssignableFrom(eventClass) &&
payloadType != null && declaredEventType.isAssignableFrom(payloadType)) {
return declaredEventType;
}
if (eventClass.isInstance(event)) {
return declaredEventType;
}
}
return null;
}
@Override
public String toString() {
return this.method.toGenericString();
}
private class ReactiveResultHandler {
public boolean subscribeToPublisher(Object result) {
ReactiveAdapter adapter = ReactiveAdapterRegistry.getSharedInstance().getAdapter(result.getClass());
if (adapter != null) {
adapter.toPublisher(result).subscribe(new EventPublicationSubscriber());
return true;
}
return false;
}
}
private class EventPublicationSubscriber implements Subscriber<Object> {
@Override
public void onSubscribe(Subscription s) {
s.request(Integer.MAX_VALUE);
}
@Override
public void onNext(Object o) {
publishEvents(o);
}
@Override
public void onError(Throwable t) {
handleAsyncError(t);
}
@Override
public void onComplete() {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections4.properties;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.Writer;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.InvalidPropertiesFormatException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
/**
* Creates and loads {@link Properties}.
*
* @see Properties
* @since 4.4
*/
public class PropertiesFactory extends AbstractPropertiesFactory<Properties> {
private static class EmptyProperties extends Properties {
private static final long serialVersionUID = 1L;
@Override
public synchronized void clear() {
// Noop
}
@Override
public synchronized Object compute(final Object key,
final BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
Objects.requireNonNull(key);
throw new UnsupportedOperationException();
}
@Override
public synchronized Object computeIfAbsent(final Object key,
final Function<? super Object, ? extends Object> mappingFunction) {
Objects.requireNonNull(key);
throw new UnsupportedOperationException();
}
@Override
public synchronized Object computeIfPresent(final Object key,
final BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
Objects.requireNonNull(key);
throw new UnsupportedOperationException();
}
@Override
public synchronized boolean contains(final Object value) {
return false;
}
@Override
public synchronized boolean containsKey(final Object key) {
return false;
}
@Override
public boolean containsValue(final Object value) {
return false;
}
@Override
public synchronized Enumeration<Object> elements() {
return Collections.emptyEnumeration();
}
@Override
public Set<Entry<Object, Object>> entrySet() {
return Collections.emptySet();
}
@Override
public synchronized boolean equals(final Object o) {
return (o instanceof Properties) && ((Properties) o).isEmpty();
}
@Override
public synchronized void forEach(final BiConsumer<? super Object, ? super Object> action) {
Objects.requireNonNull(action);
}
@Override
public synchronized Object get(final Object key) {
return null;
}
@Override
public synchronized Object getOrDefault(final Object key, final Object defaultValue) {
return defaultValue;
}
@Override
public String getProperty(final String key) {
return null;
}
@Override
public String getProperty(final String key, final String defaultValue) {
return defaultValue;
}
@Override
public synchronized int hashCode() {
return 0;
}
@Override
public synchronized boolean isEmpty() {
return true;
}
@Override
public synchronized Enumeration<Object> keys() {
return Collections.emptyEnumeration();
}
@Override
public Set<Object> keySet() {
return Collections.emptySet();
}
@Override
public void list(final PrintStream out) {
// Implement as super
super.list(out);
}
@Override
public void list(final PrintWriter out) {
// Implement as super
super.list(out);
}
/**
* Throws {@link UnsupportedOperationException}.
* Caller should use try-with-resources statement.
*/
@SuppressWarnings("resource")
@Override
public synchronized void load(final InputStream inStream) throws IOException {
Objects.requireNonNull(inStream);
throw new UnsupportedOperationException();
}
/**
* Throws {@link UnsupportedOperationException}.
* Caller should use try-with-resources statement.
*/
@SuppressWarnings("resource")
@Override
public synchronized void load(final Reader reader) throws IOException {
Objects.requireNonNull(reader);
throw new UnsupportedOperationException();
}
/**
* Throws {@link UnsupportedOperationException}.
* Caller should use try-with-resources statement.
*/
@SuppressWarnings("resource")
@Override
public synchronized void loadFromXML(final InputStream in)
throws IOException, InvalidPropertiesFormatException {
Objects.requireNonNull(in);
throw new UnsupportedOperationException();
}
@Override
public synchronized Object merge(final Object key, final Object value,
final BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
public Enumeration<?> propertyNames() {
return Collections.emptyEnumeration();
}
@Override
public synchronized Object put(final Object key, final Object value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
public synchronized void putAll(final Map<? extends Object, ? extends Object> t) {
Objects.requireNonNull(t);
throw new UnsupportedOperationException();
}
@Override
public synchronized Object putIfAbsent(final Object key, final Object value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
protected void rehash() {
// Noop
}
@Override
public synchronized Object remove(final Object key) {
Objects.requireNonNull(key);
throw new UnsupportedOperationException();
}
@Override
public synchronized boolean remove(final Object key, final Object value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
public synchronized Object replace(final Object key, final Object value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
public synchronized boolean replace(final Object key, final Object oldValue, final Object newValue) {
Objects.requireNonNull(key);
Objects.requireNonNull(oldValue);
Objects.requireNonNull(newValue);
throw new UnsupportedOperationException();
}
@Override
public synchronized void replaceAll(
final BiFunction<? super Object, ? super Object, ? extends Object> function) {
Objects.requireNonNull(function);
throw new UnsupportedOperationException();
}
@SuppressWarnings("deprecation")
@Override
public void save(final OutputStream out, final String comments) {
// Implement as super
super.save(out, comments);
}
@Override
public synchronized Object setProperty(final String key, final String value) {
Objects.requireNonNull(key);
Objects.requireNonNull(value);
throw new UnsupportedOperationException();
}
@Override
public synchronized int size() {
return 0;
}
@Override
public void store(final OutputStream out, final String comments) throws IOException {
// Implement as super
super.store(out, comments);
}
@Override
public void store(final Writer writer, final String comments) throws IOException {
// Implement as super
super.store(writer, comments);
}
@Override
public void storeToXML(final OutputStream os, final String comment) throws IOException {
// Implement as super
super.storeToXML(os, comment);
}
@Override
public void storeToXML(final OutputStream os, final String comment, final String encoding) throws IOException {
// Implement as super
super.storeToXML(os, comment, encoding);
}
@Override
public Set<String> stringPropertyNames() {
return Collections.emptySet();
}
@Override
public synchronized String toString() {
// Implement as super
return super.toString();
}
@Override
public Collection<Object> values() {
return Collections.emptyList();
}
}
/**
* The empty map (immutable). This map is serializable.
*
* @since 4.5
*/
public static final Properties EMPTY_PROPERTIES = new EmptyProperties();
/**
* The singleton instance.
*/
public static final PropertiesFactory INSTANCE = new PropertiesFactory();
/**
* Constructs an instance.
*/
private PropertiesFactory() {
// There is only one instance.
}
/**
* Subclasses override to provide customized properties instances.
*
* @return a new Properties instance.
*/
@Override
protected Properties createProperties() {
return new Properties();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.services.impl;
import org.drools.compiler.kie.builder.impl.InternalKieContainer;
import org.drools.compiler.kie.builder.impl.InternalKieScanner;
import org.kie.api.KieServices;
import org.kie.server.api.marshalling.Marshaller;
import org.kie.server.api.marshalling.MarshallerFactory;
import org.kie.server.api.marshalling.MarshallingFormat;
import org.kie.server.api.model.KieContainerResource;
import org.kie.server.api.model.KieContainerStatus;
import org.kie.server.api.model.KieScannerResource;
import org.kie.server.api.model.KieScannerStatus;
import org.kie.server.api.model.ReleaseId;
import org.kie.server.services.api.KieContainerInstance;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
public class KieContainerInstanceImpl implements KieContainerInstance {
private KieContainerResource resource;
private InternalKieContainer kieContainer;
private InternalKieScanner scanner;
private transient Map<MarshallingFormat, Marshaller> marshallers;
private transient Map<String, Object> serviceContainer;
private transient Set<Class<?>> extraClasses = new HashSet<Class<?>>();
public KieContainerInstanceImpl(String containerId, KieContainerStatus status) {
this(containerId, status, null);
}
public KieContainerInstanceImpl(String containerId, KieContainerStatus status, InternalKieContainer kieContainer) {
this(containerId, status, kieContainer, null);
}
public KieContainerInstanceImpl(String containerId, KieContainerStatus status, InternalKieContainer kieContainer, ReleaseId releaseId) {
super();
this.kieContainer = kieContainer;
this.resource = new KieContainerResource(containerId, releaseId, status);
// set the default scanner state to DISPOSED (which is the actual default state)
// this way we don't need to do null checks all around for the scanner resource
this.resource.setScanner(new KieScannerResource(KieScannerStatus.DISPOSED));
this.marshallers = new ConcurrentHashMap<MarshallingFormat, Marshaller>();
this.serviceContainer = new ConcurrentHashMap<String, Object>();
updateReleaseId();
}
/**
* Maps the {@link InternalKieScanner.Status} to a scanner status used by KIE Server.
*
* @param status {@link InternalKieScanner.Status} to be converted
*
* @return {@link KieScannerStatus} which maps to the specified {@link InternalKieScanner.Status}
*/
public static KieScannerStatus mapScannerStatus(InternalKieScanner.Status status) {
switch (status) {
case STARTING:
return KieScannerStatus.CREATED;
case RUNNING:
return KieScannerStatus.STARTED;
case SCANNING:
case UPDATING:
return KieScannerStatus.SCANNING;
case STOPPED:
return KieScannerStatus.STOPPED;
case SHUTDOWN:
return KieScannerStatus.DISPOSED;
default:
return KieScannerStatus.UNKNOWN;
}
}
public String getContainerId() {
return resource.getContainerId();
}
public void setContainerId(String containerId) {
this.resource.setContainerId( containerId );
}
public InternalKieContainer getKieContainer() {
return kieContainer;
}
public void setKieContainer(InternalKieContainer kieContainer) {
this.kieContainer = kieContainer;
updateReleaseId();
}
public KieContainerStatus getStatus() {
return resource.getStatus();
}
public void setStatus(KieContainerStatus status) {
this.resource.setStatus( status );
}
@Override
public KieContainerResource getResource() {
updateReleaseId();
return resource;
}
public void setResource(KieContainerResource resource) {
this.resource = resource;
}
public InternalKieScanner getScanner() {
return this.scanner;
}
public void createScanner() {
this.scanner = (InternalKieScanner) KieServices.Factory.get().newKieScanner(kieContainer);
// we also need to update the underlaying scanner resource to avoid inconsistency
KieScannerStatus status = KieContainerInstanceImpl.mapScannerStatus(scanner.getStatus());
long pollingInterval = scanner.getPollingInterval();
resource.setScanner(new KieScannerResource(status, pollingInterval));
}
public void startScanner(long pollingInterval) {
if (this.scanner == null) {
throw new IllegalStateException("Can not start non-existing (null) scanner!");
}
this.scanner.start(pollingInterval);
this.getResource().setScanner(new KieScannerResource(KieScannerStatus.STARTED, pollingInterval));
}
public void scanNow() {
if (this.scanner == null) {
throw new IllegalStateException("Can not run (scanNow) non-existing (null) scanner!");
}
this.scanner.scanNow();
}
public void stopScanner() {
if (this.scanner == null) {
throw new IllegalStateException("Can not stop non-existing (null) scanner!");
}
this.scanner.stop();
this.getResource().getScanner().setStatus(KieScannerStatus.STOPPED);
}
public void disposeScanner() {
if (this.scanner == null) {
throw new IllegalStateException("Can not dispose non-existing (null) scanner!");
}
this.scanner.shutdown();
this.scanner = null;
this.getResource().setScanner(new KieScannerResource(KieScannerStatus.DISPOSED));
}
public Marshaller getMarshaller(MarshallingFormat format) {
synchronized ( marshallers ) {
Marshaller marshaller = marshallers.get( format );
if ( marshaller == null ) {
marshaller = MarshallerFactory.getMarshaller( getExtraClasses(), format, this.kieContainer.getClassLoader() );
this.marshallers.put( format, marshaller );
}
return marshaller;
}
}
public void disposeMarshallers() {
synchronized ( marshallers ) {
for ( Marshaller marshaller : this.marshallers.values() ) {
marshaller.dispose();
}
this.marshallers.clear();
}
}
@Override
public void addService(Object service) {
if (service == null) {
return;
}
if (serviceContainer.containsKey(service.getClass().getName())) {
throw new IllegalStateException("Service " + service.getClass().getName() + " already exists");
}
serviceContainer.put(service.getClass().getName(), service);
}
@Override
public boolean addExtraClasses(Set<Class<?>> extraJaxbClassList) {
return this.extraClasses.addAll( extraJaxbClassList );
}
@Override
public void clearExtraClasses() {
this.extraClasses.clear();
}
@Override
public Set<Class<?>> getExtraClasses() {
return this.extraClasses;
}
@Override
public <T> T getService(Class<T> serviceType) {
return (T) this.serviceContainer.get(serviceType.getName());
}
@Override
public <T> T removeService(Class<T> serviceType) {
return (T) this.serviceContainer.remove(serviceType.getName());
}
@Override
public String toString() {
return resource.toString();
}
protected void updateReleaseId() {
ReleaseId oldReleaseId = this.resource.getReleaseId();
ReleaseId oldResolvedReleaseId = this.resource.getResolvedReleaseId();
if ( kieContainer != null ) {
this.resource.setReleaseId( new ReleaseId( kieContainer.getContainerReleaseId() ) );
this.resource.setResolvedReleaseId( new ReleaseId( kieContainer.getReleaseId() ) );
}
// marshallers need to disposed in case the container was updated with different releaseId
// proper solution is to attach listener directly to the KieScanner and dispose the marshallers,
// but those listeners are not (yet) available, so this is a temporary hackish "solution"
if (releaseIdUpdated(oldReleaseId, this.resource.getReleaseId())
|| releaseIdUpdated(oldResolvedReleaseId, this.resource.getResolvedReleaseId())) {
disposeMarshallers();
}
}
/**
* Checks whether the releaseId was updated (i.e. the old one is different from the new one).
*
* @param oldReleaseId old ReleaseId
* @param newReleaseId new releaseId
* @return true if the second (new) releaseId is different and thus was updated; otherwise false
*/
private boolean releaseIdUpdated(ReleaseId oldReleaseId, ReleaseId newReleaseId) {
if (oldReleaseId == null && newReleaseId == null) {
return false;
}
if (oldReleaseId == null && newReleaseId != null) {
return true;
}
// now both releaseIds are non-null, so it is safe to call equals()
return !oldReleaseId.equals(newReleaseId);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.