repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
srottem/indy-sdk | wrappers/java/src/test/java/org/hyperledger/indy/sdk/pool/CreatePoolTest.java | 2146 | package org.hyperledger.indy.sdk.pool;
import org.hyperledger.indy.sdk.IndyIntegrationTest;
import org.hyperledger.indy.sdk.utils.PoolUtils;
import org.junit.Test;
import java.io.File;
import static org.hamcrest.CoreMatchers.isA;
import static org.junit.Assert.assertTrue;
public class CreatePoolTest extends IndyIntegrationTest {
@Test
public void testCreatePoolWorksForNullConfig() throws Exception {
File file = new File("testCreatePoolWorks.txn");
file.deleteOnExit();
assertTrue(file.createNewFile());
PoolUtils.writeTransactions(file);
Pool.createPoolLedgerConfig("testCreatePoolWorks", null).get();
}
@Test
public void testCreatePoolWorksForConfigJSON() throws Exception {
File genesisTxnFile = PoolUtils.createGenesisTxnFile("genesis.txn");
PoolJSONParameters.CreatePoolLedgerConfigJSONParameter createPoolLedgerConfigJSONParameter
= new PoolJSONParameters.CreatePoolLedgerConfigJSONParameter(genesisTxnFile.getAbsolutePath());
Pool.createPoolLedgerConfig("testCreatePoolWorks", createPoolLedgerConfigJSONParameter.toJson()).get();
}
@Test
public void testCreatePoolWorksForEmptyName() throws Exception {
thrown.expect(IllegalArgumentException.class);
File genesisTxnFile = PoolUtils.createGenesisTxnFile("genesis.txn");
PoolJSONParameters.CreatePoolLedgerConfigJSONParameter createPoolLedgerConfigJSONParameter
= new PoolJSONParameters.CreatePoolLedgerConfigJSONParameter(genesisTxnFile.getAbsolutePath());
Pool.createPoolLedgerConfig("", createPoolLedgerConfigJSONParameter.toJson()).get();
}
@Test
public void testCreatePoolWorksForTwice() throws Exception {
thrown.expectCause(isA(PoolLedgerConfigExistsException.class));
File genesisTxnFile = PoolUtils.createGenesisTxnFile("genesis.txn");
PoolJSONParameters.CreatePoolLedgerConfigJSONParameter createPoolLedgerConfigJSONParameter
= new PoolJSONParameters.CreatePoolLedgerConfigJSONParameter(genesisTxnFile.getAbsolutePath());
Pool.createPoolLedgerConfig("pool1", createPoolLedgerConfigJSONParameter.toJson()).get();
Pool.createPoolLedgerConfig("pool1", createPoolLedgerConfigJSONParameter.toJson()).get();
}
}
| apache-2.0 |
venukb/any23 | any23-core/src/main/java/org/deri/any23/extractor/html/HReviewExtractor.java | 5979 | /*
* Copyright 2008-2010 Digital Enterprise Research Institute (DERI)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deri.any23.extractor.html;
import org.deri.any23.extractor.ExtractionException;
import org.deri.any23.extractor.ExtractionResult;
import org.deri.any23.extractor.ExtractorDescription;
import org.deri.any23.extractor.ExtractorFactory;
import org.deri.any23.extractor.SimpleExtractorFactory;
import org.deri.any23.extractor.TagSoupExtractionResult;
import org.deri.any23.rdf.PopularPrefixes;
import org.deri.any23.vocab.DCTERMS;
import org.deri.any23.vocab.REVIEW;
import org.deri.any23.vocab.VCARD;
import org.openrdf.model.BNode;
import org.openrdf.model.Resource;
import org.openrdf.model.vocabulary.RDF;
import org.w3c.dom.Node;
import java.util.Arrays;
import java.util.List;
import static org.deri.any23.extractor.html.HTMLDocument.TextField;
/**
* Extractor for the <a href="http://microformats.org/wiki/hreview">hReview</a>
* microformat.
*
* @author Gabriele Renzi
*/
public class HReviewExtractor extends EntityBasedMicroformatExtractor {
private static final REVIEW vREVIEW = REVIEW.getInstance();
private static final VCARD vVCARD = VCARD.getInstance();
private static final DCTERMS vDCTERMS = DCTERMS.getInstance();
public final static ExtractorFactory<HReviewExtractor> factory =
SimpleExtractorFactory.create(
"html-mf-hreview",
PopularPrefixes.createSubset("rdf", "vcard", "rev"),
Arrays.asList("text/html;q=0.1", "application/xhtml+xml;q=0.1"),
"example-mf-hreview.html",
HReviewExtractor.class
);
public ExtractorDescription getDescription() {
return factory;
}
protected String getBaseClassName() {
return "hreview";
}
@Override
protected void resetExtractor() {
// Empty.
}
protected boolean extractEntity(Node node, ExtractionResult out) throws ExtractionException {
BNode rev = getBlankNodeFor(node);
out.writeTriple(rev, RDF.TYPE, vREVIEW.Review);
final HTMLDocument fragment = new HTMLDocument(node);
addRating(fragment, rev);
addSummary(fragment, rev);
addTime(fragment, rev);
addType(fragment, rev);
addDescription(fragment, rev);
addItem(fragment, rev);
addReviewer(fragment, rev);
final TagSoupExtractionResult tser = (TagSoupExtractionResult) out;
tser.addResourceRoot(
DomUtils.getXPathListForNode(node),
rev,
this.getClass()
);
return true;
}
private void addType(HTMLDocument doc, Resource rev) {
TextField value = doc.getSingularTextField("type");
conditionallyAddStringProperty(
value.source(),
rev, vREVIEW.type, value.value()
);
}
private void addReviewer(HTMLDocument doc, Resource rev) {
List<Node> nodes = doc.findAllByClassName("reviewer");
if (nodes.size() > 0) {
Node node0 = nodes.get(0);
addBNodeProperty(
node0,
rev, vREVIEW.reviewer, getBlankNodeFor(node0)
);
}
}
private void addItem(HTMLDocument root, BNode rev) throws ExtractionException {
List<Node> nodes = root.findAllByClassName("item");
for (Node node : nodes) {
Resource item = findDummy(new HTMLDocument(node));
addBNodeProperty(
node,
item, vREVIEW.hasReview, rev
);
}
}
private Resource findDummy(HTMLDocument item) throws ExtractionException {
Resource blank = getBlankNodeFor(item.getDocument());
TextField val = item.getSingularTextField("fn");
conditionallyAddStringProperty(
val.source(),
blank, vVCARD.fn, val.value()
);
final TextField url = item.getSingularUrlField("url");
conditionallyAddResourceProperty(blank, vVCARD.url, getHTMLDocument().resolveURI(url.value()));
TextField pics[] = item.getPluralUrlField("photo");
for (TextField pic : pics) {
addURIProperty(blank, vVCARD.photo, getHTMLDocument().resolveURI(pic.value()));
}
return blank;
}
private void addRating(HTMLDocument doc, Resource rev) {
HTMLDocument.TextField value = doc.getSingularTextField("rating");
conditionallyAddStringProperty(
value.source(), rev, vREVIEW.rating, value.value()
);
}
private void addSummary(HTMLDocument doc, Resource rev) {
TextField value = doc.getSingularTextField("summary");
conditionallyAddStringProperty(
value.source(),
rev, vREVIEW.title, value.value()
);
}
private void addTime(HTMLDocument doc, Resource rev) {
TextField value = doc.getSingularTextField("dtreviewed");
conditionallyAddStringProperty(
value.source(),
rev, vDCTERMS.date, value.value()
);
}
private void addDescription(HTMLDocument doc, Resource rev) {
TextField value = doc.getSingularTextField("description");
conditionallyAddStringProperty(
value.source(),
rev, vREVIEW.text, value.value()
);
}
} | apache-2.0 |
neeph/OpenGTSFull | src/org/opengts/db/CommandPingDispatcher.java | 2659 | // ----------------------------------------------------------------------------
// Copyright 2006-2010, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2009/07/01 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.db;
import java.lang.*;
import java.util.*;
import java.io.*;
import java.net.*;
import java.sql.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.*;
import org.opengts.db.tables.*;
public class CommandPingDispatcher
implements PingDispatcher
{
// ------------------------------------------------------------------------
public CommandPingDispatcher()
{
}
// ------------------------------------------------------------------------
/**
*** Returns true if 'ping' is supported for the specified device
*** @param device The device
*** @return True if 'ping' is supported for the specified device
**/
public boolean isPingSupported(Device device)
{
return DCServerFactory.supportsCommandDispatcher(device);
}
/**
*** Sends a command notification to the specified Device
*** @param device The device to which the command is to be sent.
*** @param cmdType The command type
*** @param cmdName The command name
*** @param cmdArgs The arguments to the command sent to the device.
*** @return True if the command was sent successfully.
**/
public boolean sendDeviceCommand(Device device, String cmdType, String cmdName, String cmdArgs[])
{
RTProperties resp = DCServerFactory.sendServerCommand(device, cmdType, cmdName, cmdArgs);
return DCServerFactory.isCommandResultOK(resp);
}
// ------------------------------------------------------------------------
}
| apache-2.0 |
vpavic/spring-boot | spring-boot-project/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BinderTests.java | 16024 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties.bind;
import java.beans.PropertyEditorSupport;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import jakarta.validation.Validation;
import org.junit.jupiter.api.Test;
import org.mockito.Answers;
import org.mockito.InOrder;
import org.springframework.boot.context.properties.bind.Bindable.BindRestriction;
import org.springframework.boot.context.properties.bind.validation.ValidationBindHandler;
import org.springframework.boot.context.properties.source.ConfigurationPropertyName;
import org.springframework.boot.context.properties.source.ConfigurationPropertySource;
import org.springframework.boot.context.properties.source.ConfigurationPropertySources;
import org.springframework.boot.context.properties.source.MockConfigurationPropertySource;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.Resource;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.test.context.support.TestPropertySourceUtils;
import org.springframework.validation.Validator;
import org.springframework.validation.annotation.Validated;
import org.springframework.validation.beanvalidation.SpringValidatorAdapter;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link Binder}.
*
* @author Phillip Webb
* @author Madhura Bhave
*/
class BinderTests {
private final List<ConfigurationPropertySource> sources = new ArrayList<>();
private Binder binder = new Binder(this.sources);
@Test
void createWhenSourcesIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> new Binder((Iterable<ConfigurationPropertySource>) null))
.withMessageContaining("Sources must not be null");
}
@Test
void bindWhenNameIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.binder.bind((ConfigurationPropertyName) null,
Bindable.of(String.class), BindHandler.DEFAULT)).withMessageContaining("Name must not be null");
}
@Test
void bindWhenTargetIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.binder.bind(ConfigurationPropertyName.of("foo"), null, BindHandler.DEFAULT))
.withMessageContaining("Target must not be null");
}
@Test
void bindToValueWhenPropertyIsMissingShouldReturnUnbound() {
this.sources.add(new MockConfigurationPropertySource());
BindResult<String> result = this.binder.bind("foo", Bindable.of(String.class));
assertThat(result.isBound()).isFalse();
}
@Test
void bindToValueShouldReturnPropertyValue() {
this.sources.add(new MockConfigurationPropertySource("foo", 123));
BindResult<Integer> result = this.binder.bind("foo", Bindable.of(Integer.class));
assertThat(result.get()).isEqualTo(123);
}
@Test
void bindToValueShouldReturnPropertyValueFromSecondSource() {
this.sources.add(new MockConfigurationPropertySource("foo", 123));
this.sources.add(new MockConfigurationPropertySource("bar", 234));
BindResult<Integer> result = this.binder.bind("bar", Bindable.of(Integer.class));
assertThat(result.get()).isEqualTo(234);
}
@Test
void bindToValueShouldReturnConvertedPropertyValue() {
this.sources.add(new MockConfigurationPropertySource("foo", "123"));
BindResult<Integer> result = this.binder.bind("foo", Bindable.of(Integer.class));
assertThat(result.get()).isEqualTo(123);
}
@Test
void bindToValueWhenMultipleCandidatesShouldReturnFirst() {
this.sources.add(new MockConfigurationPropertySource("foo", 123));
this.sources.add(new MockConfigurationPropertySource("foo", 234));
BindResult<Integer> result = this.binder.bind("foo", Bindable.of(Integer.class));
assertThat(result.get()).isEqualTo(123);
}
@Test
void bindToValueWithPlaceholdersShouldResolve() {
StandardEnvironment environment = new StandardEnvironment();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(environment, "bar=23");
this.sources.add(new MockConfigurationPropertySource("foo", "1${bar}"));
this.binder = new Binder(this.sources, new PropertySourcesPlaceholdersResolver(environment));
BindResult<Integer> result = this.binder.bind("foo", Bindable.of(Integer.class));
assertThat(result.get()).isEqualTo(123);
}
@Test
void bindToValueWithMissingPlaceholderShouldResolveToValueWithPlaceholder() {
StandardEnvironment environment = new StandardEnvironment();
this.sources.add(new MockConfigurationPropertySource("foo", "${bar}"));
this.binder = new Binder(this.sources, new PropertySourcesPlaceholdersResolver(environment));
BindResult<String> result = this.binder.bind("foo", Bindable.of(String.class));
assertThat(result.get()).isEqualTo("${bar}");
}
@Test
void bindToValueWithCustomPropertyEditorShouldReturnConvertedValue() {
this.binder = new Binder(this.sources, null, null,
(registry) -> registry.registerCustomEditor(JavaBean.class, new JavaBeanPropertyEditor()));
this.sources.add(new MockConfigurationPropertySource("foo", "123"));
BindResult<JavaBean> result = this.binder.bind("foo", Bindable.of(JavaBean.class));
assertThat(result.get().getValue()).isEqualTo("123");
}
@Test
void bindToValueShouldTriggerOnSuccess() {
this.sources.add(new MockConfigurationPropertySource("foo", "1", "line1"));
BindHandler handler = mock(BindHandler.class, Answers.CALLS_REAL_METHODS);
Bindable<Integer> target = Bindable.of(Integer.class);
this.binder.bind("foo", target, handler);
InOrder ordered = inOrder(handler);
ordered.verify(handler).onSuccess(eq(ConfigurationPropertyName.of("foo")), eq(target), any(), eq(1));
}
@Test
void bindOrCreateWhenNotBoundShouldTriggerOnCreate() {
BindHandler handler = mock(BindHandler.class, Answers.CALLS_REAL_METHODS);
Bindable<JavaBean> target = Bindable.of(JavaBean.class);
this.binder.bindOrCreate("foo", target, handler);
InOrder ordered = inOrder(handler);
ordered.verify(handler).onCreate(eq(ConfigurationPropertyName.of("foo")), eq(target), any(), any());
}
@Test
void bindToJavaBeanShouldReturnPopulatedBean() {
this.sources.add(new MockConfigurationPropertySource("foo.value", "bar"));
JavaBean result = this.binder.bind("foo", Bindable.of(JavaBean.class)).get();
assertThat(result.getValue()).isEqualTo("bar");
}
@Test
void bindToJavaBeanWhenNonIterableShouldReturnPopulatedBean() {
MockConfigurationPropertySource source = new MockConfigurationPropertySource("foo.value", "bar");
this.sources.add(source.nonIterable());
JavaBean result = this.binder.bind("foo", Bindable.of(JavaBean.class)).get();
assertThat(result.getValue()).isEqualTo("bar");
}
@Test
void bindToJavaBeanWhenHasPropertyWithSameNameShouldStillBind() {
// gh-10945
MockConfigurationPropertySource source = new MockConfigurationPropertySource();
source.put("foo", "boom");
source.put("foo.value", "bar");
this.sources.add(source);
JavaBean result = this.binder.bind("foo", Bindable.of(JavaBean.class)).get();
assertThat(result.getValue()).isEqualTo("bar");
}
@Test
void bindToJavaBeanShouldTriggerOnSuccess() {
this.sources.add(new MockConfigurationPropertySource("foo.value", "bar", "line1"));
BindHandler handler = mock(BindHandler.class, Answers.CALLS_REAL_METHODS);
Bindable<JavaBean> target = Bindable.of(JavaBean.class);
this.binder.bind("foo", target, handler);
InOrder inOrder = inOrder(handler);
inOrder.verify(handler).onSuccess(eq(ConfigurationPropertyName.of("foo.value")), eq(Bindable.of(String.class)),
any(), eq("bar"));
inOrder.verify(handler).onSuccess(eq(ConfigurationPropertyName.of("foo")), eq(target), any(),
isA(JavaBean.class));
}
@Test
void bindWhenHasCustomDefaultHandlerShouldTriggerOnSuccess() {
this.sources.add(new MockConfigurationPropertySource("foo.value", "bar", "line1"));
BindHandler handler = mock(BindHandler.class, Answers.CALLS_REAL_METHODS);
Binder binder = new Binder(this.sources, null, null, null, handler);
Bindable<JavaBean> target = Bindable.of(JavaBean.class);
binder.bind("foo", target);
InOrder inOrder = inOrder(handler);
inOrder.verify(handler).onSuccess(eq(ConfigurationPropertyName.of("foo.value")), eq(Bindable.of(String.class)),
any(), eq("bar"));
inOrder.verify(handler).onSuccess(eq(ConfigurationPropertyName.of("foo")), eq(target), any(),
isA(JavaBean.class));
}
@Test
void bindWhenHasMalformedDateShouldThrowException() {
this.sources.add(new MockConfigurationPropertySource("foo", "2014-04-01T01:30:00.000-05:00"));
assertThatExceptionOfType(BindException.class)
.isThrownBy(() -> this.binder.bind("foo", Bindable.of(LocalDate.class)))
.withCauseInstanceOf(ConversionFailedException.class);
}
@Test
void bindWhenHasAnnotationsShouldChangeConvertedValue() {
this.sources.add(new MockConfigurationPropertySource("foo", "2014-04-01T01:30:00.000-05:00"));
DateTimeFormat annotation = AnnotationUtils.synthesizeAnnotation(
Collections.singletonMap("iso", DateTimeFormat.ISO.DATE_TIME), DateTimeFormat.class, null);
LocalDate result = this.binder.bind("foo", Bindable.of(LocalDate.class).withAnnotations(annotation)).get();
assertThat(result.toString()).isEqualTo("2014-04-01");
}
@Test
void bindToValidatedBeanWithResourceAndNonEnumerablePropertySource() {
ConfigurationPropertySources.from(new PropertySource<String>("test") {
@Override
public Object getProperty(String name) {
return null;
}
}).forEach(this.sources::add);
Validator validator = new SpringValidatorAdapter(
Validation.byDefaultProvider().configure().buildValidatorFactory().getValidator());
this.binder.bind("foo", Bindable.of(ResourceBean.class), new ValidationBindHandler(validator));
}
@Test
void bindToBeanWithCycle() {
MockConfigurationPropertySource source = new MockConfigurationPropertySource();
this.sources.add(source.nonIterable());
Bindable<CycleBean1> target = Bindable.of(CycleBean1.class);
this.binder.bind("foo", target);
}
@Test
@SuppressWarnings("rawtypes")
void bindToBeanWithUnresolvableGenerics() {
MockConfigurationPropertySource source = new MockConfigurationPropertySource();
source.put("foo.bar", "hello");
this.sources.add(source);
Bindable<GenericBean> target = Bindable.of(GenericBean.class);
this.binder.bind("foo", target);
}
@Test
void bindWithEmptyPrefixShouldIgnorePropertiesWithEmptyName() {
Map<String, Object> source = new HashMap<>();
source.put("value", "hello");
source.put("", "bar");
Iterable<ConfigurationPropertySource> propertySources = ConfigurationPropertySources
.from(new MapPropertySource("test", source));
propertySources.forEach(this.sources::add);
Bindable<JavaBean> target = Bindable.of(JavaBean.class);
JavaBean result = this.binder.bind("", target).get();
assertThat(result.getValue()).isEqualTo("hello");
}
@Test
void bindOrCreateWhenBindSuccessfulShouldReturnBoundValue() {
this.sources.add(new MockConfigurationPropertySource("foo.value", "bar"));
JavaBean result = this.binder.bindOrCreate("foo", Bindable.of(JavaBean.class));
assertThat(result.getValue()).isEqualTo("bar");
assertThat(result.getItems()).isEmpty();
}
@Test
void bindOrCreateWhenUnboundShouldReturnCreatedValue() {
JavaBean value = this.binder.bindOrCreate("foo", Bindable.of(JavaBean.class));
assertThat(value).isNotNull();
assertThat(value).isInstanceOf(JavaBean.class);
}
@Test
void bindToJavaBeanWhenHandlerOnStartReturnsNullShouldReturnUnbound() { // gh-18129
this.sources.add(new MockConfigurationPropertySource("foo.value", "bar"));
BindResult<JavaBean> result = this.binder.bind("foo", Bindable.of(JavaBean.class), new BindHandler() {
@Override
public <T> Bindable<T> onStart(ConfigurationPropertyName name, Bindable<T> target, BindContext context) {
return null;
}
});
assertThat(result.isBound()).isFalse();
}
@Test
void bindToJavaBeanWithPublicConstructor() {
Bindable<JavaBeanWithPublicConstructor> bindable = Bindable.of(JavaBeanWithPublicConstructor.class);
JavaBeanWithPublicConstructor result = bindToJavaBeanWithPublicConstructor(bindable);
assertThat(result.getValue()).isEqualTo("constructor");
}
@Test
void bindToJavaBeanWithPublicConstructorWhenHasBindRestriction() {
Bindable<JavaBeanWithPublicConstructor> bindable = Bindable.of(JavaBeanWithPublicConstructor.class)
.withBindRestrictions(BindRestriction.NO_DIRECT_PROPERTY);
JavaBeanWithPublicConstructor result = bindToJavaBeanWithPublicConstructor(bindable);
assertThat(result.getValue()).isEqualTo("setter");
}
private JavaBeanWithPublicConstructor bindToJavaBeanWithPublicConstructor(
Bindable<JavaBeanWithPublicConstructor> bindable) {
MockConfigurationPropertySource source = new MockConfigurationPropertySource();
source.put("foo", "constructor");
source.put("foo.value", "setter");
this.sources.add(source);
return this.binder.bindOrCreate("foo", bindable);
}
static class JavaBean {
private String value;
private List<String> items = Collections.emptyList();
String getValue() {
return this.value;
}
void setValue(String value) {
this.value = value;
}
List<String> getItems() {
return this.items;
}
}
static class NestedJavaBean {
private DefaultValuesBean valuesBean = new DefaultValuesBean();
DefaultValuesBean getValuesBean() {
return this.valuesBean;
}
void setValuesBean(DefaultValuesBean valuesBean) {
this.valuesBean = valuesBean;
}
}
static class DefaultValuesBean {
private String value = "hello";
private List<String> items = Collections.emptyList();
String getValue() {
return this.value;
}
void setValue(String value) {
this.value = value;
}
List<String> getItems() {
return this.items;
}
}
public enum ExampleEnum {
FOO_BAR, BAR_BAZ, BAZ_BOO
}
@Validated
static class ResourceBean {
private Resource resource;
Resource getResource() {
return this.resource;
}
void setResource(Resource resource) {
this.resource = resource;
}
}
static class CycleBean1 {
private CycleBean2 two;
CycleBean2 getTwo() {
return this.two;
}
void setTwo(CycleBean2 two) {
this.two = two;
}
}
static class CycleBean2 {
private CycleBean1 one;
CycleBean1 getOne() {
return this.one;
}
void setOne(CycleBean1 one) {
this.one = one;
}
}
static class GenericBean<T> {
private T bar;
T getBar() {
return this.bar;
}
void setBar(T bar) {
this.bar = bar;
}
}
static class JavaBeanPropertyEditor extends PropertyEditorSupport {
@Override
public void setAsText(String text) {
JavaBean value = new JavaBean();
value.setValue(text);
setValue(value);
}
}
}
| apache-2.0 |
filiphr/camunda-bpmn-model | src/main/java/org/camunda/bpm/model/bpmn/instance/Event.java | 903 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.model.bpmn.instance;
import org.camunda.bpm.model.bpmn.instance.bpmndi.BpmnShape;
import java.util.Collection;
/**
* The BPMN event element
*
* @author Sebastian Menski
*
*/
public interface Event extends FlowNode, InteractionNode {
Collection<Property> getProperties();
BpmnShape getDiagramElement();
}
| apache-2.0 |
kareez/ideal-car | src/main/java/org/mhshams/domain/Model.java | 684 | package org.mhshams.domain;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
/**
* The car model.
*/
@EqualsAndHashCode(of = {"id", "model"})
@ToString(of = {"id", "model"})
@Entity(name = "t_models")
public class Model extends AbstractEntity {
private static final long serialVersionUID = 3L;
@Getter
@Column(name = "c_model", nullable = false)
private String model;
@Getter
@ManyToOne(optional = false)
@JoinColumn(name = "c_make_id", nullable = false)
private Make make;
}
| apache-2.0 |
ThreatCentral/elderberry | src/main/java/com/hpe/elderberry/Taxii11Template.java | 13445 | /*
* (c) Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and limitations under the
* License.
*/
package com.hpe.elderberry;
import org.apache.commons.logging.Log;
import org.mitre.taxii.messages.xml11.CollectionInformationRequest;
import org.mitre.taxii.messages.xml11.CollectionInformationResponse;
import org.mitre.taxii.messages.xml11.CollectionRecordType;
import org.mitre.taxii.messages.xml11.DiscoveryRequest;
import org.mitre.taxii.messages.xml11.DiscoveryResponse;
import org.mitre.taxii.messages.xml11.PollRequest;
import org.mitre.taxii.messages.xml11.PollResponse;
import org.mitre.taxii.messages.xml11.ServiceInstanceType;
import org.mitre.taxii.messages.xml11.ServiceTypeEnum;
import org.mitre.taxii.messages.xml11.StatusMessage;
import org.springframework.beans.TypeMismatchException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.XMLGregorianCalendar;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Collection;
import java.util.Date;
import java.util.GregorianCalendar;
import static java.lang.System.currentTimeMillis;
import static java.util.Collections.singletonList;
import static javax.xml.datatype.DatatypeFactory.newInstance;
import static org.apache.commons.logging.LogFactory.getLog;
import static org.mitre.taxii.Versions.VID_TAXII_HTTPS_10;
import static org.mitre.taxii.Versions.VID_TAXII_HTTP_10;
import static org.mitre.taxii.Versions.VID_TAXII_SERVICES_11;
import static org.mitre.taxii.Versions.VID_TAXII_XML_11;
import static org.springframework.http.HttpStatus.OK;
import static org.springframework.http.MediaType.APPLICATION_XML;
import static org.springframework.util.StringUtils.collectionToCommaDelimitedString;
import static org.springframework.util.StringUtils.isEmpty;
/**
* <p>Taxii11Template is a convenient way to connect spring to a TAXII 1.1 server. This template allows you to easily
* connect with a <a href="http://taxii.mitre.org/specifications/version1.1/">TAXII 1.1</a> server. This template uses
* the <a href="https://github.com/TAXIIProject/java-taxii">TAXII-java</a> project for its JAXB implementation of the
* XML messages.</p>
* <br>
* example:<br>
* <pre>
* {@code
*
* <bean name="taxiiConnection" class="TaxiiConnection"
* p:discoveryUrl="http://hailataxii.com/taxii-discovery-service"
* p:useProxy="true"
* />
*
* <bean name="taxiiTemplate" class="Taxii11Template"
* p:taxiiConnection-ref="taxiiConnection"
* />
* }
* </pre>
*/
@SuppressWarnings("unused")
public class Taxii11Template {
private Log log = getLog(getClass());
private TaxiiConnection conn;
/**
* the {@link TaxiiConnection} to use
*
* @param conn a valid, non-null {@link TaxiiConnection}
*/
@Autowired
@Required
public void setTaxiiConnection(TaxiiConnection conn) {
this.conn = conn;
}
/**
* runs a TAXII 1.1 discovery
*
* @return the <code>DiscoveryResponse</code>, or null if there was an error connecting to the discovery service
*/
public DiscoveryResponse discover() {
ResponseEntity<DiscoveryResponse> response = conn.getRestTemplate().postForEntity(conn.getDiscoveryUrl(),
wrapRequest(new DiscoveryRequest().withMessageId(generateMessageId())), DiscoveryResponse.class);
return respond(response);
}
/**
* a convenient method to locate a service by type
*
* @param services a collection of <code>ServiceInstanceType</code>, likely to be in <code>discovery.getServiceInstances()</code>
* @param type the service type to locate in the collection
* @return the <code>ServiceInstanceType</code> or null when not found
*/
public ServiceInstanceType findService(Collection<ServiceInstanceType> services, ServiceTypeEnum type) {
for (ServiceInstanceType service : services) {
if (service.getServiceType().equals(type)) {
return service;
}
}
return null;
}
/**
* a convenient method to locate a collection by name
*
* @param collections a collection of <code>CollectionRecordType</code>, likely to be in <code>cm.getCollections()</code>
* @param name the name of the collection to locate
* @return the <code>CollectionRecordType</code> or null when not found
*/
public CollectionRecordType findCollection(Collection<CollectionRecordType> collections, String name) {
for (CollectionRecordType collection : collections) {
if (collection.getCollectionName().equals(name)) {
return collection;
}
}
return null;
}
/**
* runs a TAXII 1.1 collection management request (a.k.a collection information)
*
* @param service the Collection Management (information) <code>ServiceInstanceType</code> as returned from {@link #discover()}
* @return a <code>CollectionInformationResponse</code> or null when there was an error retrieving the information
* @throws MalformedURLException when the service URL is incorrect
* @throws URISyntaxException when the service URL is incorrect
*/
public CollectionInformationResponse collectionInformation(ServiceInstanceType service) throws MalformedURLException, URISyntaxException {
return collectionInformation(service.getAddress());
}
/**
* runs a TAXII 1.1 collection management request (a.k.a collection information)
*
* @param url the collection management service URL
* @return a <code>CollectionInformationResponse</code> or null when there was an error retrieving the information
* @throws MalformedURLException when the service URL is incorrect
* @throws URISyntaxException when the service URL is incorrect
*/
public CollectionInformationResponse collectionInformation(String url) throws MalformedURLException, URISyntaxException {
return collectionInformation(new URL(url));
}
/**
* runs a TAXII 1.1 collection management request (a.k.a collection information)
*
* @param url the collection management service URL
* @return a <code>CollectionInformationResponse</code> or null when there was an error retrieving the information
* @throws URISyntaxException when the service URL cannot be converted into a URI
*/
public CollectionInformationResponse collectionInformation(URL url) throws URISyntaxException {
ResponseEntity<CollectionInformationResponse> response = conn.getRestTemplate().postForEntity(url.toURI(),
wrapRequest(new CollectionInformationRequest().withMessageId(generateMessageId())), CollectionInformationResponse.class);
return respond(response);
}
/**
* polls a TAXII 1.1 service
*
* @param collection the collection record to poll
* @return a poll response
* @throws URISyntaxException when the service URL cannot be converted into a URI
* @throws MalformedURLException when the collection record has an incorrect address
*/
public PollResponse poll(CollectionRecordType collection) throws URISyntaxException, MalformedURLException {
return poll(collection, "", yesterday(), new Date());
}
/**
* polls a TAXII 1.1 poll service
*
* @param collection the collection record to poll
* @param subscriptionId an optional subscription ID. Some services require it, even if they ignore it (like hail a
* taxii)
* @param exclusiveBegin begin time to poll
* @param inclusiveEnd end time to poll
* @return a poll response
* @throws URISyntaxException when the collection record URL cannot be converted to a URI
* @throws MalformedURLException when the collection record has an incorrect address
*/
public PollResponse poll(CollectionRecordType collection, String subscriptionId, Date exclusiveBegin, Date inclusiveEnd) throws URISyntaxException, MalformedURLException {
return poll(new URL(collection.getPollingServices().get(0).getAddress()), collection.getCollectionName(),
subscriptionId, exclusiveBegin, inclusiveEnd);
}
/**
* polls a TAXII 1.1 service
*
* @param pollUrl poll service URL
* @param collectionName collection name to poll
* @param subscriptionId an optional subscription ID. Some service require it, even if they ignore it (like hail a taxii)
* @param exclusiveBegin begin time to poll
* @param inclusiveEnd end time to poll
* @return a poll response
* @throws URISyntaxException when the collection record URL cannot be converted to a URI
*/
public PollResponse poll(URL pollUrl, String collectionName, String subscriptionId, Date exclusiveBegin, Date inclusiveEnd) throws URISyntaxException {
PollRequest pollRequest;
try {
pollRequest = new PollRequest()
.withMessageId(generateMessageId())
.withCollectionName(collectionName)
.withExclusiveBeginTimestamp(toXmlGregorianCalendar(exclusiveBegin))
.withInclusiveEndTimestamp(toXmlGregorianCalendar(inclusiveEnd))
.withSubscriptionID(subscriptionId);
} catch (DatatypeConfigurationException e) {
log.error("error converting dates: " + e.getMessage(), e);
return null;
}
try {
// poll
ResponseEntity<PollResponse> response = conn.getRestTemplate().postForEntity(pollUrl.toURI(),
wrapRequest(pollRequest), PollResponse.class);
return respond(response);
} catch (TypeMismatchException e) {
// if we're here this means that the poll request returned an error in the form of a StatusMessage, let's
// poll again and log the message
log.error("poll request failed, response contained a status message instead of a poll response, " +
"requesting again to retrieve the status message", e);
ResponseEntity<StatusMessage> response = conn.getRestTemplate().postForEntity(pollUrl.toURI(),
wrapRequest(pollRequest), StatusMessage.class);
log.error("error polling, status: "+statusMessageSafelyToString(response.getBody()));
return null;
}
}
private String statusMessageSafelyToString(StatusMessage msg) {
StringBuilder sb = new StringBuilder();
if(!isEmpty(msg.getMessageId())) {
sb.append("message ID: ").append(msg.getMessageId()).append("\n");
}
if(!isEmpty(msg.getInResponseTo())) {
sb.append("in response to: ").append(msg.getInResponseTo()).append("\n");
}
if(!isEmpty(msg.getStatusType())) {
sb.append("status type: ").append(msg.getStatusType()).append("\n");
}
if(!isEmpty(msg.getMessage())) {
sb.append("message: ").append(msg.getMessage()).append("\n");
}
if(msg.getStatusDetail()!=null && msg.getStatusDetail().getDetails()!=null) {
sb.append("details: ").append(collectionToCommaDelimitedString(msg.getStatusDetail().getDetails()));
}
return sb.toString();
}
private <T> T respond(ResponseEntity<T> response) {
if (response.getStatusCode() == OK) {
return response.getBody();
}
log.error("error in TAXII request: " + response.getStatusCode());
return null;
}
private Date yesterday() {
return new Date(currentTimeMillis() - 86400000);
}
private XMLGregorianCalendar toXmlGregorianCalendar(Date date) throws DatatypeConfigurationException {
GregorianCalendar c = new GregorianCalendar();
c.setTime(date);
return newInstance().newXMLGregorianCalendar(c);
}
private <T> HttpEntity<T> wrapRequest(T body) {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(APPLICATION_XML);
headers.setAccept(singletonList(APPLICATION_XML));
headers.add("X-TAXII-Services", VID_TAXII_SERVICES_11);
headers.add("X-TAXII-Content-Type", VID_TAXII_XML_11);
String binding = conn.getDiscoveryUrl().getScheme().endsWith("s") ? VID_TAXII_HTTPS_10 : VID_TAXII_HTTP_10;
headers.add("X-TAXII-Protocol", binding);
return new HttpEntity<>(body, headers);
}
private String generateMessageId() {
return String.valueOf(currentTimeMillis() / 100000);
}
}
| apache-2.0 |
mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFBsnGenericCommandVer14.java | 13664 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnGenericCommandVer14 implements OFBsnGenericCommand {
private static final Logger logger = LoggerFactory.getLogger(OFBsnGenericCommandVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int MINIMUM_LENGTH = 80;
private final static long DEFAULT_XID = 0x0L;
private final static String DEFAULT_NAME = "";
private final static List<OFBsnTlv> DEFAULT_TLVS = ImmutableList.<OFBsnTlv>of();
// OF message fields
private final long xid;
private final String name;
private final List<OFBsnTlv> tlvs;
//
// Immutable default instance
final static OFBsnGenericCommandVer14 DEFAULT = new OFBsnGenericCommandVer14(
DEFAULT_XID, DEFAULT_NAME, DEFAULT_TLVS
);
// package private constructor - used by readers, builders, and factory
OFBsnGenericCommandVer14(long xid, String name, List<OFBsnTlv> tlvs) {
if(name == null) {
throw new NullPointerException("OFBsnGenericCommandVer14: property name cannot be null");
}
if(tlvs == null) {
throw new NullPointerException("OFBsnGenericCommandVer14: property tlvs cannot be null");
}
this.xid = xid;
this.name = name;
this.tlvs = tlvs;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x47L;
}
@Override
public String getName() {
return name;
}
@Override
public List<OFBsnTlv> getTlvs() {
return tlvs;
}
public OFBsnGenericCommand.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnGenericCommand.Builder {
final OFBsnGenericCommandVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean nameSet;
private String name;
private boolean tlvsSet;
private List<OFBsnTlv> tlvs;
BuilderWithParent(OFBsnGenericCommandVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnGenericCommand.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x47L;
}
@Override
public String getName() {
return name;
}
@Override
public OFBsnGenericCommand.Builder setName(String name) {
this.name = name;
this.nameSet = true;
return this;
}
@Override
public List<OFBsnTlv> getTlvs() {
return tlvs;
}
@Override
public OFBsnGenericCommand.Builder setTlvs(List<OFBsnTlv> tlvs) {
this.tlvs = tlvs;
this.tlvsSet = true;
return this;
}
@Override
public OFBsnGenericCommand build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
String name = this.nameSet ? this.name : parentMessage.name;
if(name == null)
throw new NullPointerException("Property name must not be null");
List<OFBsnTlv> tlvs = this.tlvsSet ? this.tlvs : parentMessage.tlvs;
if(tlvs == null)
throw new NullPointerException("Property tlvs must not be null");
//
return new OFBsnGenericCommandVer14(
xid,
name,
tlvs
);
}
}
static class Builder implements OFBsnGenericCommand.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean nameSet;
private String name;
private boolean tlvsSet;
private List<OFBsnTlv> tlvs;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnGenericCommand.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x47L;
}
@Override
public String getName() {
return name;
}
@Override
public OFBsnGenericCommand.Builder setName(String name) {
this.name = name;
this.nameSet = true;
return this;
}
@Override
public List<OFBsnTlv> getTlvs() {
return tlvs;
}
@Override
public OFBsnGenericCommand.Builder setTlvs(List<OFBsnTlv> tlvs) {
this.tlvs = tlvs;
this.tlvsSet = true;
return this;
}
//
@Override
public OFBsnGenericCommand build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
String name = this.nameSet ? this.name : DEFAULT_NAME;
if(name == null)
throw new NullPointerException("Property name must not be null");
List<OFBsnTlv> tlvs = this.tlvsSet ? this.tlvs : DEFAULT_TLVS;
if(tlvs == null)
throw new NullPointerException("Property tlvs must not be null");
return new OFBsnGenericCommandVer14(
xid,
name,
tlvs
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnGenericCommand> {
@Override
public OFBsnGenericCommand readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x47L
int subtype = bb.readInt();
if(subtype != 0x47)
throw new OFParseError("Wrong subtype: Expected=0x47L(0x47L), got="+subtype);
String name = ChannelUtils.readFixedLengthString(bb, 64);
List<OFBsnTlv> tlvs = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnTlvVer14.READER);
OFBsnGenericCommandVer14 bsnGenericCommandVer14 = new OFBsnGenericCommandVer14(
xid,
name,
tlvs
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnGenericCommandVer14);
return bsnGenericCommandVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnGenericCommandVer14Funnel FUNNEL = new OFBsnGenericCommandVer14Funnel();
static class OFBsnGenericCommandVer14Funnel implements Funnel<OFBsnGenericCommandVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnGenericCommandVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x47L
sink.putInt(0x47);
sink.putUnencodedChars(message.name);
FunnelUtils.putList(message.tlvs, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnGenericCommandVer14> {
@Override
public void write(ByteBuf bb, OFBsnGenericCommandVer14 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x47L
bb.writeInt(0x47);
ChannelUtils.writeFixedLengthString(bb, message.name, 64);
ChannelUtils.writeList(bb, message.tlvs);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnGenericCommandVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("name=").append(name);
b.append(", ");
b.append("tlvs=").append(tlvs);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnGenericCommandVer14 other = (OFBsnGenericCommandVer14) obj;
if( xid != other.xid)
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (tlvs == null) {
if (other.tlvs != null)
return false;
} else if (!tlvs.equals(other.tlvs))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((tlvs == null) ? 0 : tlvs.hashCode());
return result;
}
}
| apache-2.0 |
lsmall/flowable-engine | modules/flowable-bpmn-converter/src/test/java/org/flowable/editor/language/xml/SubProcessConverterTest.java | 4520 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.editor.language.xml;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.flowable.bpmn.model.FlowableListener;
import org.flowable.bpmn.model.BoundaryEvent;
import org.flowable.bpmn.model.BpmnModel;
import org.flowable.bpmn.model.FlowElement;
import org.flowable.bpmn.model.ImplementationType;
import org.flowable.bpmn.model.StartEvent;
import org.flowable.bpmn.model.SubProcess;
import org.flowable.bpmn.model.TimerEventDefinition;
import org.flowable.bpmn.model.UserTask;
import org.junit.Test;
public class SubProcessConverterTest extends AbstractConverterTest {
@Test
public void convertXMLToModel() throws Exception {
BpmnModel bpmnModel = readXMLFile();
validateModel(bpmnModel);
}
@Test
public void convertModelToXML() throws Exception {
BpmnModel bpmnModel = readXMLFile();
BpmnModel parsedModel = exportAndReadXMLFile(bpmnModel);
validateModel(parsedModel);
}
@Override
protected String getResource() {
return "subprocessmodel.bpmn";
}
private void validateModel(BpmnModel model) {
FlowElement flowElement = model.getMainProcess().getFlowElement("start1");
assertNotNull(flowElement);
assertTrue(flowElement instanceof StartEvent);
assertEquals("start1", flowElement.getId());
flowElement = model.getMainProcess().getFlowElement("userTask1");
assertNotNull(flowElement);
assertTrue(flowElement instanceof UserTask);
assertEquals("userTask1", flowElement.getId());
UserTask userTask = (UserTask) flowElement;
assertEquals(1, userTask.getCandidateUsers().size());
assertEquals(1, userTask.getCandidateGroups().size());
assertEquals(2, userTask.getFormProperties().size());
flowElement = model.getMainProcess().getFlowElement("subprocess1");
assertNotNull(flowElement);
assertTrue(flowElement instanceof SubProcess);
assertEquals("subprocess1", flowElement.getId());
SubProcess subProcess = (SubProcess) flowElement;
assertTrue(subProcess.getLoopCharacteristics().isSequential());
assertEquals("10", subProcess.getLoopCharacteristics().getLoopCardinality());
assertEquals("${assignee == \"\"}", subProcess.getLoopCharacteristics().getCompletionCondition());
assertEquals(5, subProcess.getFlowElements().size());
assertEquals(1, subProcess.getExecutionListeners().size());
FlowableListener listenerSubProcess = subProcess.getExecutionListeners().get(0);
assertEquals("SubProcessTestClass", listenerSubProcess.getImplementation());
assertEquals(ImplementationType.IMPLEMENTATION_TYPE_CLASS, listenerSubProcess.getImplementationType());
assertEquals("start", listenerSubProcess.getEvent());
flowElement = model.getMainProcess().getFlowElement("boundaryEvent1");
assertNotNull(flowElement);
assertTrue(flowElement instanceof BoundaryEvent);
assertEquals("boundaryEvent1", flowElement.getId());
BoundaryEvent boundaryEvent = (BoundaryEvent) flowElement;
assertNotNull(boundaryEvent.getAttachedToRef());
assertEquals("subprocess1", boundaryEvent.getAttachedToRef().getId());
assertEquals(1, boundaryEvent.getEventDefinitions().size());
assertTrue(boundaryEvent.getEventDefinitions().get(0) instanceof TimerEventDefinition);
assertEquals(1, model.getMainProcess().getExecutionListeners().size());
FlowableListener listenerMainProcess = model.getMainProcess().getExecutionListeners().get(0);
assertEquals("TestClass", listenerMainProcess.getImplementation());
assertEquals(ImplementationType.IMPLEMENTATION_TYPE_CLASS, listenerMainProcess.getImplementationType());
assertEquals("start", listenerMainProcess.getEvent());
}
}
| apache-2.0 |
OrienteerBAP/wicket-orientdb | wicket-orientdb/src/main/java/ru/ydn/wicket/wicketorientdb/OrientDbWebApplication.java | 6526 | package ru.ydn.wicket.wicketorientdb;
import java.util.function.Supplier;
import org.apache.wicket.Application;
import org.apache.wicket.ConverterLocator;
import org.apache.wicket.IApplicationListener;
import org.apache.wicket.IConverterLocator;
import org.apache.wicket.authroles.authentication.AuthenticatedWebApplication;
import org.apache.wicket.core.util.lang.PropertyResolver;
import org.apache.wicket.protocol.http.WebApplication;
import org.apache.wicket.request.IExceptionMapper;
import org.apache.wicket.util.string.Strings;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.metadata.security.ODatabaseSecurityResources;
import com.orientechnologies.orient.core.metadata.security.ORule.ResourceGeneric;
import com.orientechnologies.orient.core.metadata.security.OSecurityUser;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.server.OServer;
import ru.ydn.wicket.wicketorientdb.converter.HexConverter;
import ru.ydn.wicket.wicketorientdb.converter.ODocumentConverter;
import ru.ydn.wicket.wicketorientdb.converter.OIdentifiableConverter;
import ru.ydn.wicket.wicketorientdb.rest.OrientDBHttpAPIResource;
import ru.ydn.wicket.wicketorientdb.security.IResourceCheckingStrategy;
import ru.ydn.wicket.wicketorientdb.security.OSecurityHelper;
import ru.ydn.wicket.wicketorientdb.security.WicketOrientDbAuthorizationStrategy;
import ru.ydn.wicket.wicketorientdb.service.ODatabaseHooksInstallListener;
import ru.ydn.wicket.wicketorientdb.utils.FixFormEncTypeListener;
import ru.ydn.wicket.wicketorientdb.utils.FlexyMetaDataKey;
import ru.ydn.wicket.wicketorientdb.utils.ODocumentPropertyLocator;
/**
* {@link WebApplication} realization for applications on top of OrientDB
*/
public abstract class OrientDbWebApplication extends AuthenticatedWebApplication implements IResourceCheckingStrategy {
private IOrientDbSettings orientDbSettings = new OrientDbSettings();
private OServer server;
private Supplier<IExceptionMapper> exceptionMapperProvider = () -> new OrientDefaultExceptionMapper();
@Override
protected Class<? extends OrientDbWebSession> getWebSessionClass()
{
return OrientDbWebSession.class;
}
/**
* @return settings for the application
*/
public IOrientDbSettings getOrientDbSettings()
{
return orientDbSettings;
}
/**
* Explicit set of settings for the application. Doesn't recommended to use this method. Consider to use getOrientDBSettings().setXXX()
* @param orientDbSettings whole {@link IOrientDbSettings} to be set
*/
public void setOrientDbSettings(IOrientDbSettings orientDbSettings)
{
this.orientDbSettings=orientDbSettings;
}
public static OrientDbWebApplication get()
{
return (OrientDbWebApplication) WebApplication.get();
}
public static OrientDbWebApplication lookupApplication()
{
return lookupApplication(OrientDbWebApplication.class);
}
protected static <T extends OrientDbWebApplication> T lookupApplication(Class<T> appClass)
{
Application app = Application.exists()?Application.get():null;
if(app!=null && appClass.isInstance(app)) return (T)app;
else
{
for(String appKey: Application.getApplicationKeys())
{
app = Application.get(appKey);
if(appClass.isInstance(app)) return (T)app;
}
}
return null;
}
@Override
protected void init() {
super.init();
OSecurityHelper.init(); //Make sure that FEATURE was loaded
Orient.instance().registerThreadDatabaseFactory(new DefaultODatabaseThreadLocalFactory(this));
Orient.instance().addDbLifecycleListener(new ODatabaseHooksInstallListener(this));
getRequestCycleListeners().add(newTransactionRequestCycleListener());
getSecuritySettings().setAuthorizationStrategy(new WicketOrientDbAuthorizationStrategy(this, this));
getApplicationListeners().add(new IApplicationListener() {
@Override
public void onAfterInitialized(Application application) {
Orient.instance().startup();
Orient.instance().removeShutdownHook();
}
@Override
public void onBeforeDestroyed(Application application) {
Orient.instance().shutdown();
}
});
getAjaxRequestTargetListeners().add(new FixFormEncTypeListener());
//workaround to support changing system users passwords in web interface
getOrientDbSettings().addORecordHooks(OUserCatchPasswordHook.class);
PropertyResolver.setLocator(this, new ODocumentPropertyLocator(new PropertyResolver.CachingPropertyLocator(new PropertyResolver.DefaultPropertyLocator())));
}
protected TransactionRequestCycleListener newTransactionRequestCycleListener()
{
return new TransactionRequestCycleListener();
}
@Override
protected IConverterLocator newConverterLocator()
{
ConverterLocator locator = new ConverterLocator();
locator.set(OIdentifiable.class, new OIdentifiableConverter<OIdentifiable>());
locator.set(ODocument.class, new ODocumentConverter());
locator.set(byte[].class, new HexConverter());
return locator;
}
public OServer getServer() {
return server;
}
public void setServer(OServer server) {
this.server = server;
}
protected void mountOrientDbRestApi()
{
OrientDBHttpAPIResource.mountOrientDbRestApi(this);
}
public String getOrientDBVersion()
{
return Orient.class.getPackage().getImplementationVersion();
}
@Override
public boolean checkResource(ResourceGeneric resource, String specific, int iOperation) {
OSecurityUser user = OrientDbWebSession.get().getEffectiveUser();
if(Strings.isEmpty(specific)) specific = null;
if(user.checkIfAllowed(resource, specific, iOperation)!=null) return true;
while(!Strings.isEmpty(specific=Strings.beforeLastPathComponent(specific, '.')))
{
if(user.checkIfAllowed(resource, specific+"."+ODatabaseSecurityResources.ALL, iOperation)!=null) return true;
}
return false;
}
@Override
public Supplier<IExceptionMapper> getExceptionMapperProvider() {
return exceptionMapperProvider;
}
protected void setExceptionMapperProvider(Supplier<IExceptionMapper> exceptionMapperProvider) {
this.exceptionMapperProvider = exceptionMapperProvider!=null?exceptionMapperProvider:super.getExceptionMapperProvider();
}
public final synchronized <K, V> V getMetaData(final K key)
{
return FlexyMetaDataKey.get(this, key);
}
public final synchronized <K, V> OrientDbWebApplication setMetaData(final K key, final V value)
{
FlexyMetaDataKey.set(this, key, value);
return this;
}
}
| apache-2.0 |
collection-json/collection-json.java | src/main/java/net/hamnaberg/json/Value.java | 972 | /*
* Copyright 2012 Erlend Hamnaberg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.hamnaberg.json;
import com.fasterxml.jackson.databind.JsonNode;
import java.util.Optional;
public interface Value {
boolean isBoolean();
boolean isString();
boolean isNumeric();
boolean isNull();
String asString();
boolean asBoolean();
Number asNumber();
JsonNode asJson();
Optional<Value> NONE = Optional.empty();
}
| apache-2.0 |
royclarkson/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/elasticsearch/rest/RestClientBuilderCustomizer.java | 1314 | /*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.elasticsearch.rest;
import org.elasticsearch.client.RestClientBuilder;
/**
* Callback interface that can be implemented by beans wishing to further customize the
* {@link org.elasticsearch.client.RestClient} via a {@link RestClientBuilder} whilst
* retaining default auto-configuration.
*
* @author Brian Clozel
* @since 2.1.0
* @deprecated as of 2.3.1 in favor of
* {@link org.springframework.boot.autoconfigure.elasticsearch.RestClientBuilderCustomizer}
*/
@FunctionalInterface
@Deprecated
public interface RestClientBuilderCustomizer
extends org.springframework.boot.autoconfigure.elasticsearch.RestClientBuilderCustomizer {
}
| apache-2.0 |
apache/uima-uimaj | uimaj-tools/src/main/java/org/apache/uima/tools/jcasgen/GUI.java | 12369 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.tools.jcasgen;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowEvent;
import java.io.IOException;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.ScrollPaneConstants;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import org.apache.uima.tools.images.Images;
import org.apache.uima.tools.util.gui.AboutDialog;
/**
* The Class GUI.
*/
public class GUI extends JFrame {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The about dialog. */
private AboutDialog aboutDialog;
/** The Constant NL. */
final static String NL = System.getProperties().getProperty("line.separator");
/** The GUI. */
static GUI theGUI;
/** The jg. */
final Jg jg;
/**
* The Class G.
*/
class G extends JPanel implements ActionListener {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The gui. */
final GUI gui;
/** The lb label 6. */
JLabel lbLabel6;
/** The lb label 1. */
JLabel lbLabel1;
/** The bt input file browse. */
JButton btInputFileBrowse;
/** The bt out dir browse. */
JButton btOutDirBrowse;
/** The bt gen xml browse. */
JButton btGenXmlBrowse;
/** The lb label 0. */
JLabel lbLabel0;
/** The lb label 9. */
JLabel lbLabel9;
/** The tf input file name. */
JTextArea tfInputFileName;
/** The tf out dir name. */
JTextArea tfOutDirName;
/** The tf gen XM lname. */
JTextArea tfGenXMLname;
/** The lb label 11. */
JLabel lbLabel11;
/** The lb label 12. */
JLabel lbLabel12;
/** The lb label 13. */
JLabel lbLabel13;
/** The bt go. */
JButton btGo;
/** The ta status. */
JTextArea taStatus;
/** The area scroll pane. */
JScrollPane areaScrollPane;
/** The lb label 15. */
JLabel lbLabel15;
/** The lb label 16. */
JLabel lbLabel16;
/** The lb label 17. */
JLabel lbLabel17;
/** The lb label 10. */
JLabel lbLabel10;
/** The lb result. */
JLabel lbResult;
/**
* Instantiates a new g.
*
* @param gui
* the gui
*/
public G(GUI gui) {
this.gui = gui;
GridBagLayout gbG = new GridBagLayout();
setLayout(gbG);
GridBagConstraints gbcG = new GridBagConstraints();
// global, reused values
gbcG.insets = new Insets(4, 4, 4, 4);
gbcG.gridwidth = 1;
gbcG.gridheight = 1;
gbcG.weightx = 0;
gbcG.weighty = 0;
gbcG.fill = GridBagConstraints.NONE;
gbcG.anchor = GridBagConstraints.CENTER;
// layout:
// 0 1 2
// 0 labels i/o text area buttons
// 1 title old
// 2 inFile txt browse
// 3 out dir txt browse
// 4 gen xml txt
// 5 status
// 6 go-button txt-status result
lbLabel17 = new JLabel("Welcome to the JCasGen tool. You can drag corners to resize.");
lbLabel17.setRequestFocusEnabled(false);
lbLabel17.setToolTipText(null);
lbLabel17.setVerifyInputWhenFocusTarget(false);
gbcG.gridx = 1;
gbcG.gridy = 0;
gbG.setConstraints(lbLabel17, gbcG);
add(lbLabel17);
lbLabel0 = new JLabel("Input File:");
gbcG.gridx = 0;
gbcG.gridy = 2;
gbG.setConstraints(lbLabel0, gbcG);
add(lbLabel0);
lbLabel1 = new JLabel("Output Directory:");
gbcG.gridy = 3;
gbG.setConstraints(lbLabel1, gbcG);
add(lbLabel1);
btInputFileBrowse = new JButton("Browse");
btInputFileBrowse.addActionListener(this);
gbcG.gridx = 3;
gbcG.gridy = 2;
gbcG.anchor = GridBagConstraints.WEST;
gbG.setConstraints(btInputFileBrowse, gbcG);
add(btInputFileBrowse);
btOutDirBrowse = new JButton("Browse");
btOutDirBrowse.addActionListener(this);
gbcG.gridy = 3;
gbG.setConstraints(btOutDirBrowse, gbcG);
add(btOutDirBrowse);
tfInputFileName = new JTextArea();
gbcG.gridx = 1;
gbcG.gridy = 2;
gbcG.fill = GridBagConstraints.BOTH;
gbcG.weightx = 1;
gbcG.weighty = 1;
gbcG.anchor = GridBagConstraints.WEST;
gbG.setConstraints(tfInputFileName, gbcG);
add(tfInputFileName);
tfOutDirName = new JTextArea();
gbcG.gridy = 3;
gbG.setConstraints(tfOutDirName, gbcG);
add(tfOutDirName);
lbLabel16 = new JLabel("Status");
gbcG.gridy = 5;
gbcG.fill = GridBagConstraints.NONE;
gbcG.weightx = 1;
gbcG.weighty = 0;
gbcG.anchor = GridBagConstraints.CENTER;
gbG.setConstraints(lbLabel16, gbcG);
add(lbLabel16);
taStatus = new JTextArea();
gbcG.gridy = 6;
gbcG.weighty = 5; // most weight goes here
gbcG.fill = GridBagConstraints.BOTH;
gbcG.anchor = GridBagConstraints.NORTHWEST;
areaScrollPane = new JScrollPane(taStatus);
areaScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
gbG.setConstraints(areaScrollPane, gbcG);
add(areaScrollPane);
btGo = new JButton("Go");
btGo.addActionListener(this);
gbcG.gridx = 0;
gbcG.gridy = 6;
gbcG.fill = GridBagConstraints.NONE;
gbcG.weightx = 0; // was 1
gbcG.weighty = 0;
gbcG.anchor = GridBagConstraints.CENTER;
gbG.setConstraints(btGo, gbcG);
add(btGo);
lbResult = new JLabel(" ");
gbcG.gridx = 3;
gbcG.gridy = 6;
gbcG.fill = GridBagConstraints.BOTH;
gbcG.anchor = GridBagConstraints.NORTH;
gbG.setConstraints(lbResult, gbcG);
add(lbResult);
}
/*
* (non-Javadoc)
*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == btInputFileBrowse) {
browseFile(tfInputFileName); // Action for btInputFileBrowse
}
if (e.getSource() == btOutDirBrowse) {
browseDir(tfOutDirName); // Action for btOutDirBrowse
}
if (e.getSource() == btGenXmlBrowse) {
browseFile(tfGenXMLname); // Action for btGenXmlBrowse
}
if (e.getSource() == btGo) {
lbResult.setText("Working");
go(); // Action for btGo
}
}
/**
* Browse file.
*
* @param f
* the f
*/
void browseFile(JTextArea f) {
String startingDir = f.getText();
if (startingDir.length() == 0) {
// default to user.dir
startingDir = System.getProperty("user.dir");
}
JFileChooser c = new JFileChooser(startingDir);
int returnVal = c.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
try {
f.setText(c.getSelectedFile().getCanonicalPath());
Prefs.set(gui);
} catch (Exception e) { // do nothing
}
}
}
/**
* Browse dir.
*
* @param f
* the f
*/
void browseDir(JTextArea f) {
String startingDir = f.getText();
if (startingDir.length() == 0) {
// default to user.dir
startingDir = System.getProperty("user.dir");
}
JFileChooser c = new JFileChooser(startingDir);
c.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = c.showOpenDialog(gui);
if (returnVal == JFileChooser.APPROVE_OPTION) {
try {
f.setText(c.getSelectedFile().getCanonicalPath());
Prefs.set(gui);
} catch (Exception e) { // do nothing
}
}
}
/**
* Go.
*/
void go() {
final String outDirName = tfOutDirName.getText();
final String inFileName = tfInputFileName.getText();
Runnable r = new Runnable() {
@Override
public void run() {
jg.main0(new String[] { "-jcasgeninput", inFileName, "-jcasgenoutput", outDirName },
jg.merger, new GuiProgressMonitor(), new GuiErrorImpl());
}
};
new Thread(r).start();
}
/**
* Show in status.
*
* @param message
* the message
* @return the string
*/
String showInStatus(String message) {
taStatus.setText(taStatus.getText() + message + NL);
areaScrollPane.getVerticalScrollBar()
.setValue(areaScrollPane.getVerticalScrollBar().getMaximum());
gui.repaint();
return message;
}
}
/** The pn G. */
G pnG;
/**
* The main method.
*
* @param args
* the arguments
*/
// for testing only
public static void main(String args[]) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException e) { // do nothing
} catch (InstantiationException e) { // do nothing
} catch (IllegalAccessException e) { // do nothing
} catch (UnsupportedLookAndFeelException e) { // do nothing
}
theGUI = new GUI(null);
}
/**
* Instantiates a new gui.
*
* @param jg
* the jg
*/
public GUI(Jg jg) {
super("JCasGen");
theGUI = this;
this.jg = jg;
pnG = new G(this);
setDefaultCloseOperation(EXIT_ON_CLOSE);
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception e) {
// I don't think this should ever happen, but if it does just print error and continue
// with defalt look and feel
System.err.println("Could not set look and feel: " + e.getMessage());
}
// Set frame icon image
try {
this.setIconImage(Images.getImage(Images.MICROSCOPE));
} catch (IOException e) {
System.err.println("Image could not be loaded: " + e.getMessage());
}
this.getContentPane().setBackground(Color.WHITE);
this.getContentPane().setLayout(new BorderLayout());
JLabel banner = new JLabel(Images.getImageIcon(Images.BANNER));
this.getContentPane().add(banner, BorderLayout.NORTH);
this.getContentPane().add(pnG, BorderLayout.CENTER);
aboutDialog = new AboutDialog(this, "About JCasGen");
setJMenuBar(createMenuBar());
pack();
// show();
}
/**
* Creates the menu bar.
*
* @return the j menu bar
*/
private JMenuBar createMenuBar() {
JMenuBar menuBar = new JMenuBar();
JMenu fileMenu = new JMenu("File");
JMenuItem exitMenuItem = new JMenuItem("Exit");
exitMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
GUI.this.processWindowEvent(new WindowEvent(GUI.this, WindowEvent.WINDOW_CLOSING));
}
});
fileMenu.add(exitMenuItem);
JMenu helpMenu = new JMenu("Help");
JMenuItem aboutMenuItem = new JMenuItem("About");
aboutMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
aboutDialog.setVisible(true);
}
});
helpMenu.add(aboutMenuItem);
menuBar.add(fileMenu);
menuBar.add(helpMenu);
return menuBar;
}
}
| apache-2.0 |
EvilMcJerkface/crate | server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/Decision.java | 11710 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation.decider;
import javax.annotation.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
/**
* This abstract class defining basic {@link Decision} used during shard
* allocation process.
*
* @see AllocationDecider
*/
public abstract class Decision implements ToXContent, Writeable {
public static final Decision ALWAYS = new Single(Type.YES);
public static final Decision YES = new Single(Type.YES);
public static final Decision NO = new Single(Type.NO);
public static final Decision THROTTLE = new Single(Type.THROTTLE);
/**
* Creates a simple decision
* @param type {@link Type} of the decision
* @param label label for the Decider that produced this decision
* @param explanation explanation of the decision
* @param explanationParams additional parameters for the decision
* @return new {@link Decision} instance
*/
public static Decision single(Type type, @Nullable String label, @Nullable String explanation, @Nullable Object... explanationParams) {
return new Single(type, label, explanation, explanationParams);
}
public static Decision readFrom(StreamInput in) throws IOException {
// Determine whether to read a Single or Multi Decision
if (in.readBoolean()) {
Multi result = new Multi();
int decisionCount = in.readVInt();
for (int i = 0; i < decisionCount; i++) {
Decision s = readFrom(in);
result.decisions.add(s);
}
return result;
} else {
Single result = new Single();
result.type = Type.readFrom(in);
result.label = in.readOptionalString();
result.explanationString = in.readOptionalString();
return result;
}
}
/**
* This enumeration defines the
* possible types of decisions
*/
public enum Type implements Writeable {
YES(1),
THROTTLE(2),
NO(0);
private final int id;
Type(int id) {
this.id = id;
}
public static Type resolve(String s) {
return Type.valueOf(s.toUpperCase(Locale.ROOT));
}
public static Type readFrom(StreamInput in) throws IOException {
int i = in.readVInt();
switch (i) {
case 0:
return NO;
case 1:
return YES;
case 2:
return THROTTLE;
default:
throw new IllegalArgumentException("No Type for integer [" + i + "]");
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(id);
}
public boolean higherThan(Type other) {
if (this == NO) {
return false;
} else if (other == NO) {
return true;
} else if (other == THROTTLE && this == YES) {
return true;
}
return false;
}
}
/**
* Get the {@link Type} of this decision
* @return {@link Type} of this decision
*/
public abstract Type type();
/**
* Get the description label for this decision.
*/
@Nullable
public abstract String label();
/**
* Get the explanation for this decision.
*/
@Nullable
public abstract String getExplanation();
/**
* Return the list of all decisions that make up this decision
*/
public abstract List<Decision> getDecisions();
/**
* Simple class representing a single decision
*/
public static class Single extends Decision implements ToXContentObject {
private Type type;
private String label;
private String explanation;
private String explanationString;
private Object[] explanationParams;
public Single() {
}
/**
* Creates a new {@link Single} decision of a given type
* @param type {@link Type} of the decision
*/
public Single(Type type) {
this(type, null, null, (Object[]) null);
}
/**
* Creates a new {@link Single} decision of a given type
*
* @param type {@link Type} of the decision
* @param explanation An explanation of this {@link Decision}
* @param explanationParams A set of additional parameters
*/
public Single(Type type, @Nullable String label, @Nullable String explanation, @Nullable Object... explanationParams) {
this.type = type;
this.label = label;
this.explanation = explanation;
this.explanationParams = explanationParams;
}
@Override
public Type type() {
return this.type;
}
@Override
@Nullable
public String label() {
return this.label;
}
@Override
public List<Decision> getDecisions() {
return Collections.singletonList(this);
}
/**
* Returns the explanation string, fully formatted. Only formats the string once.
*/
@Override
@Nullable
public String getExplanation() {
if (explanationString == null && explanation != null) {
explanationString = String.format(Locale.ROOT, explanation, explanationParams);
}
return this.explanationString;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
Decision.Single s = (Decision.Single) object;
return this.type == s.type &&
Objects.equals(label, s.label) &&
Objects.equals(getExplanation(), s.getExplanation());
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + (label == null ? 0 : label.hashCode());
String explanationStr = getExplanation();
result = 31 * result + (explanationStr == null ? 0 : explanationStr.hashCode());
return result;
}
@Override
public String toString() {
if (explanationString != null || explanation != null) {
return type + "(" + getExplanation() + ")";
}
return type + "()";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("decider", label);
builder.field("decision", type);
String explanation = getExplanation();
builder.field("explanation", explanation != null ? explanation : "none");
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(false); // flag specifying its a single decision
type.writeTo(out);
out.writeOptionalString(label);
// Flatten explanation on serialization, so that explanationParams
// do not need to be serialized
out.writeOptionalString(getExplanation());
}
}
/**
* Simple class representing a list of decisions
*/
public static class Multi extends Decision implements ToXContentFragment {
private final List<Decision> decisions = new ArrayList<>();
/**
* Add a decision to this {@link Multi}decision instance
* @param decision {@link Decision} to add
* @return {@link Multi}decision instance with the given decision added
*/
public Multi add(Decision decision) {
decisions.add(decision);
return this;
}
@Override
public Type type() {
Type ret = Type.YES;
for (int i = 0; i < decisions.size(); i++) {
Type type = decisions.get(i).type();
if (type == Type.NO) {
return type;
} else if (type == Type.THROTTLE) {
ret = type;
}
}
return ret;
}
@Override
@Nullable
public String label() {
// Multi decisions have no labels
return null;
}
@Override
@Nullable
public String getExplanation() {
throw new UnsupportedOperationException("multi-level decisions do not have an explanation");
}
@Override
public List<Decision> getDecisions() {
return Collections.unmodifiableList(this.decisions);
}
@Override
public boolean equals(final Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
final Decision.Multi m = (Decision.Multi) object;
return this.decisions.equals(m.decisions);
}
@Override
public int hashCode() {
return 31 * decisions.hashCode();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (Decision decision : decisions) {
sb.append("[").append(decision.toString()).append("]");
}
return sb.toString();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
for (Decision d : decisions) {
d.toXContent(builder, params);
}
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(true); // flag indicating it is a multi decision
out.writeVInt(getDecisions().size());
for (Decision d : getDecisions()) {
d.writeTo(out);
}
}
}
}
| apache-2.0 |
nmdp-bioinformatics/service-hmlFhirConverter | src/main/java/org/nmdp/hmlfhirconverter/config/SwaggerConfig.java | 2377 | package org.nmdp.hmlfhirconverter.config;
/**
* Created by Andrew S. Brown, Ph.D., <abrown3@nmdp.org>, on 1/20/17.
* <p>
* service-hmlFhirConverter
* Copyright (c) 2012-2017 National Marrow Donor Program (NMDP)
* <p>
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 3 of the License, or (at
* your option) any later version.
* <p>
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; with out even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
* <p>
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
* <p>
* > http://www.fsf.org/licensing/licenses/lgpl.html
* > http://www.opensource.org/licenses/lgpl-license.php
*/
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
public class SwaggerConfig {
@Bean
public Docket api() {
return new Docket(DocumentationType.SWAGGER_2)
.select()
.apis(RequestHandlerSelectors.basePackage("org.nmdp.hmlfhirconverter"))
.paths(PathSelectors.ant("/v1/**"))
.build()
.apiInfo(getApiInfo());
}
private ApiInfo getApiInfo() {
return new ApiInfo("Hml to Fhir Conversion Service",
"Service to support web-hmlFhirAngularClient",
"0.0.1",
null,
new Contact("Andrew S. Brown, Ph.D.", null, "abrown3@nmdp.org"),
"Copyright NMDP",
"CC Non-commercial Non-Redistributable");
}
}
| apache-2.0 |
rosogon/SeaCloudsPlatform | monitor/monitoring-dam-generator-core/src/main/java/eu/seaclouds/monitor/monitoringdamgenerator/adpparsing/YAMLMonitorParser.java | 14015 | package eu.seaclouds.monitor.monitoringdamgenerator.adpparsing;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import eu.seaclouds.monitor.monitoringdamgenerator.DeploymentType;
@SuppressWarnings("unchecked")
public class YAMLMonitorParser {
private static Logger logger = LoggerFactory
.getLogger(YAMLMonitorParser.class);
public static final String COMPUTE_NODE_PREFIX = "seaclouds.nodes.Compute";
public static final String PLATFORM_NODE_PREFIX = "seaclouds.nodes.Platform";
public static final String QOS_REQUIREMENT_POLICY = "QoSRequirements";
public static final String RESPONSE_TIME_REQUIREMENT = "response_time";
public static final String AVAILABILITY_REQUIREMENT = "availability";
public static final String COST_REQUIREMENT = "cost";
public static final String WORKLOAD_REQUIREMENT = "workload";
public static final String TOPOLOGY_KEY = "topology_template";
public static final String NODE_TEMPLATES_KEY = "node_templates";
public static final String NODE_TYPES_KEY = "node_types";
public static final String GROUPS_KEY = "groups";
public static final String MEMBERS_KEY = "members";
public static final String POLICIES_KEY = "policies";
public static final String LANGUAGE_KEY = "language";
public static final String PORT_KEY = "port";
public static final String PROPERTIES_KEY = "properties";
public static final String HOST_KEY = "host";
public static final String REQUIREMENTS_KEY = "requirements";
public static final String TYPE_KEY = "type";
public static final String RAW_TYPE_KEY = "derived_from";
public List<Module> getModuleRelevantInfoFromAdp(String adp)
throws AdpParsingException {
logger.info("Parsing the Abstract Deployment Model.");
Yaml yamlApp = new Yaml();
Map<String, Object> appMap = (Map<String, Object>) yamlApp.load(adp);
return getModuleRelevantInfoFromAdp(appMap);
}
private List<Module> getModuleRelevantInfoFromAdp(Map<String, Object> adp)
throws AdpParsingException {
List<Module> toReturn = new ArrayList<Module>();
Map<String, Host> hosts = new HashMap<String, Host>();
Module tempModule;
Host tempHost;
Map<String, Object> groups = getGroupsFromAdp(adp);
Map<String, Object> nodeTemplates = getNodeTemplatesFromAdp(adp);
Map<String, Object> nodeTypes = getNodeTypesFromAdp(adp);
for (String nodeTemplate : nodeTemplates.keySet()) {
String type = getNodeTemplateTypeName((Map<String, Object>) nodeTemplates
.get(nodeTemplate));
if (type.startsWith(COMPUTE_NODE_PREFIX)) {
tempHost = new Host();
tempHost.setHostName(nodeTemplate);
tempHost.setDeploymentType(DeploymentType.IaaS);
hosts.put(tempHost.getHostName(), tempHost);
} else if (type.startsWith(PLATFORM_NODE_PREFIX)) {
tempHost = new Host();
tempHost.setHostName(nodeTemplate);
tempHost.setDeploymentType(DeploymentType.PaaS);
hosts.put(tempHost.getHostName(), tempHost);
}
}
for (String nodeTemplate : nodeTemplates.keySet()) {
String type = getNodeTemplateTypeName((Map<String, Object>) nodeTemplates
.get(nodeTemplate));
if (!(type.startsWith(COMPUTE_NODE_PREFIX) || type
.startsWith(PLATFORM_NODE_PREFIX))){
tempModule = new Module();
tempModule.setModuleName(nodeTemplate);
Map<String, Object> nodeType = (Map<String, Object>) nodeTypes.get(type);
String rawType = (String) nodeType.get(RAW_TYPE_KEY);
tempModule.setType(rawType);
if (getNodeTemplateLanguage((Map<String, Object>) nodeTemplates
.get(nodeTemplate)) != null) {
tempModule.setLanguage(getNodeTemplateLanguage((Map<String, Object>) nodeTemplates
.get(nodeTemplate)));
}
if (getNodeTemplatePort((Map<String, Object>) nodeTemplates
.get(nodeTemplate)) != null) {
tempModule.setPort(getNodeTemplatePort((Map<String, Object>) nodeTemplates
.get(nodeTemplate)));
}
if (getNodeTemplateHost((Map<String, Object>) nodeTemplates
.get(nodeTemplate)) != null) {
tempModule
.setHost(hosts
.get(getNodeTemplateHost((Map<String, Object>) nodeTemplates
.get(nodeTemplate))));
}
for (String key : groups.keySet()) {
for (String member : getGroupMembers((Map<String, Object>) groups
.get(key))) {
if (member.equals(tempModule.getModuleName())) {
setQosRequirements(
(Map<String, Object>) getQosRequirementsFromGroup((Map<String, Object>) groups
.get(key)), tempModule);
}
}
}
toReturn.add(tempModule);
}
}
return toReturn;
}
private Map<String, Object> getQosRequirementsFromGroup(
Map<String, Object> group) throws AdpParsingException {
try {
List<Map<String, Object>> policies = (List<Map<String, Object>>) group
.get(POLICIES_KEY);
for (Map<String, Object> policy : policies) {
for (String key : policy.keySet()) {
if (key.equals(QOS_REQUIREMENT_POLICY)) {
return (Map<String, Object>) policy
.get(QOS_REQUIREMENT_POLICY);
}
}
}
return null;
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'QoSRequirements' of one of the group in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'QoSRequirements' of one of the group in the current ADP.");
}
}
private void setQosRequirements(Map<String, Object> qosRequirements,
Module module) throws AdpParsingException {
if (qosRequirements != null) {
for (String requirement : qosRequirements.keySet()) {
if (requirement.equals(RESPONSE_TIME_REQUIREMENT)) {
Map<String, Object> condition = (Map<String, Object>) qosRequirements
.get(requirement);
if (condition.keySet().size() > 1) {
throw new AdpParsingException(
"Error parsing the ADP: found more than 1 condition for a qos requirment.");
} else {
for (String key : condition.keySet()) {
module.setResponseTimeMillis(Double
.parseDouble(condition.get(key).toString()
.split(" ")[0]));
}
}
} else if (requirement.equals(AVAILABILITY_REQUIREMENT)) {
Map<String, Object> condition = (Map<String, Object>) qosRequirements
.get(requirement);
if (condition.keySet().size() > 1) {
throw new AdpParsingException(
"Error parsing the ADP: found more than 1 condition for a qos requirment of module.");
} else {
for (String key : condition.keySet()) {
module.setAvailability(Double.parseDouble(condition
.get(key).toString().split(" ")[0]));
}
}
}
}
}
}
private String getNodeTemplateTypeName(Map<String, Object> nodeTemplate)
throws AdpParsingException {
try {
return (String) nodeTemplate.get(TYPE_KEY);
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'type' of one of the node templates in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'type' of one of the node templates in the current ADP.");
}
}
private String getNodeTemplateLanguage(Map<String, Object> nodeTemplate)
throws AdpParsingException {
try {
Map<String, Object> properties = (Map<String, Object>) nodeTemplate
.get(PROPERTIES_KEY);
return (String) properties.get(LANGUAGE_KEY);
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'language' property from one of the node templates in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'language' property from one of the node templates in the current ADP.");
}
}
private String getNodeTemplatePort(Map<String, Object> nodeTemplate) throws AdpParsingException{
try {
Map<String, Object> properties = (Map<String, Object>) nodeTemplate
.get(PROPERTIES_KEY);
return (String) properties.get(PORT_KEY);
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'port' property from one of the node templates in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'port' property from one of the node templates in the current ADP.");
}
}
private String getNodeTemplateHost(Map<String, Object> nodeTemplate)
throws AdpParsingException {
try {
List<Map<String, Object>> requirements = (List<Map<String, Object>>) nodeTemplate
.get(REQUIREMENTS_KEY);
for (Map<String, Object> requirement : requirements) {
for (String key : requirement.keySet()) {
if (key.equals(HOST_KEY)) {
return (String) requirement.get(key);
}
}
}
throw new NullPointerException();
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'host' requirement from one of the node templates in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'host' requirement from one of the node templates in the current ADP.");
}
}
private Map<String, Object> getGroupsFromAdp(Map<String, Object> appMap)
throws AdpParsingException {
try {
return (Map<String, Object>) appMap.get(GROUPS_KEY);
} catch (NullPointerException E) {
logger.error("The parser was not able to retrieve the 'groups' from the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'groups' from the current ADP.");
}
}
private Map<String, Object> getNodeTemplatesFromAdp(Map<String, Object> adp)
throws AdpParsingException {
try {
Map<String, Object> topology = (Map<String, Object>) adp
.get(TOPOLOGY_KEY);
return (Map<String, Object>) topology.get(NODE_TEMPLATES_KEY);
} catch (NullPointerException E) {
logger.error("The parser was not able to retrieve the 'node_templates' from the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'node_templates' from the current ADP.");
}
}
private Map<String, Object> getNodeTypesFromAdp(Map<String, Object> adp)
throws AdpParsingException {
try {
return (Map<String, Object>) adp.get(NODE_TYPES_KEY);
} catch (NullPointerException E) {
logger.error("The parser was not able to retrieve the 'node_types' from the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'node_types' from the current ADP.");
}
}
private List<String> getGroupMembers(Map<String, Object> group)
throws AdpParsingException {
try {
for (String key : group.keySet()) {
if (key.equals(MEMBERS_KEY)) {
return coerceStringList((ArrayList)group.get(key));
}
}
throw new NullPointerException();
} catch (NullPointerException e) {
logger.error("The parser was not able to retrieve the 'members' field from one of the 'group' in the current ADP.");
throw new AdpParsingException(
"The parser was not able to retrieve the 'members' field from one of the 'group' in the current ADP.");
}
}
private List<String> coerceStringList(List list){
List<String> result = new ArrayList<>();
if(list!=null){
for(Object item: list){
result.add((String)item);
}
}
return result;
}
}
| apache-2.0 |
jmptrader/Strata | modules/product/src/test/java/com/opengamma/strata/product/index/type/RelativeIborFutureTemplateTest.java | 3758 | /**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.index.type;
import static com.opengamma.strata.basics.date.DateSequences.QUARTERLY_IMM;
import static com.opengamma.strata.basics.index.IborIndices.USD_LIBOR_3M;
import static com.opengamma.strata.basics.index.IborIndices.USD_LIBOR_6M;
import static com.opengamma.strata.collect.TestHelper.assertSerialization;
import static com.opengamma.strata.collect.TestHelper.coverBeanEquals;
import static com.opengamma.strata.collect.TestHelper.coverImmutableBean;
import static org.testng.Assert.assertEquals;
import java.time.LocalDate;
import java.time.Period;
import org.testng.annotations.Test;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.product.SecurityId;
import com.opengamma.strata.product.index.IborFutureTrade;
/**
* Tests {@link RelativeIborFutureTemplate}.
*/
@Test
public class RelativeIborFutureTemplateTest {
private static final ReferenceData REF_DATA = ReferenceData.standard();
private static final IborFutureConvention CONVENTION = ImmutableIborFutureConvention.of(USD_LIBOR_3M, QUARTERLY_IMM);
private static final IborFutureConvention CONVENTION2 = ImmutableIborFutureConvention.of(USD_LIBOR_6M, QUARTERLY_IMM);
private static final Period MIN_PERIOD = Period.ofMonths(2);
private static final int NUMBER = 2;
//-------------------------------------------------------------------------
public void test_of() {
RelativeIborFutureTemplate test = RelativeIborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
assertEquals(test.getMinimumPeriod(), MIN_PERIOD);
assertEquals(test.getSequenceNumber(), NUMBER);
assertEquals(test.getConvention(), CONVENTION);
assertEquals(test.getIndex(), CONVENTION.getIndex());
}
//-------------------------------------------------------------------------
public void test_createTrade() {
IborFutureTemplate base = IborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
LocalDate date = LocalDate.of(2015, 10, 20);
double quantity = 3;
double price = 0.99;
double notional = 100.0;
SecurityId secId = SecurityId.of("OG-Future", "GBP-LIBOR-3M-Jun16");
IborFutureTrade trade = base.createTrade(date, secId, quantity, notional, price, REF_DATA);
IborFutureTrade expected = CONVENTION.createTrade(date, secId, MIN_PERIOD, NUMBER, quantity, notional, price, REF_DATA);
assertEquals(trade, expected);
}
public void test_calculateReferenceDateFromTradeDate() {
IborFutureTemplate base = IborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
LocalDate date = LocalDate.of(2015, 10, 20); // 2nd Quarterly IMM at least 2 months later from this date
LocalDate expected = LocalDate.of(2016, 6, 15); // 1st is March 2016, 2nd is Jun 2016
assertEquals(base.calculateReferenceDateFromTradeDate(date, REF_DATA), expected);
}
public void test_approximateMaturity() {
IborFutureTemplate base = IborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
assertEquals(base.approximateMaturity(LocalDate.of(2015, 10, 20)), 0.5d, 0.1d);
}
//-------------------------------------------------------------------------
public void coverage() {
RelativeIborFutureTemplate test = RelativeIborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
coverImmutableBean(test);
RelativeIborFutureTemplate test2 = RelativeIborFutureTemplate.of(Period.ofMonths(3), NUMBER + 1, CONVENTION2);
coverBeanEquals(test, test2);
}
public void test_serialization() {
RelativeIborFutureTemplate test = RelativeIborFutureTemplate.of(MIN_PERIOD, NUMBER, CONVENTION);
assertSerialization(test);
}
}
| apache-2.0 |
fastcat-co/fastcatsearch | server/src/main/java/org/fastcatsearch/ir/dictionary/ReloadableDictionary.java | 152 | package org.fastcatsearch.ir.dictionary;
public interface ReloadableDictionary {
public void reload(Object object) throws IllegalArgumentException;
}
| apache-2.0 |
structurizr/java | structurizr-client/test/unit/com/structurizr/encryption/MockEncryptionStrategy.java | 400 | package com.structurizr.encryption;
class MockEncryptionStrategy extends EncryptionStrategy {
@Override
public String encrypt(String plaintext) throws Exception {
return new StringBuilder(plaintext).reverse().toString();
}
@Override
public String decrypt(String ciphertext) throws Exception {
return new StringBuilder(ciphertext).reverse().toString();
}
}
| apache-2.0 |
apache/jackrabbit-oak | oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongodProcess.java | 4322 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.mongo;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import com.mongodb.ServerAddress;
import org.apache.commons.io.FileUtils;
import de.flapdoodle.embed.mongo.MongodStarter;
import de.flapdoodle.embed.mongo.config.ImmutableMongoCmdOptions;
import de.flapdoodle.embed.mongo.config.ImmutableMongodConfig;
import de.flapdoodle.embed.mongo.config.MongodConfig;
import de.flapdoodle.embed.mongo.config.Net;
import de.flapdoodle.embed.mongo.config.Storage;
import de.flapdoodle.embed.mongo.distribution.Feature;
import de.flapdoodle.embed.mongo.distribution.Versions;
import de.flapdoodle.embed.process.io.directories.Directory;
import de.flapdoodle.embed.process.io.directories.FixedPath;
import de.flapdoodle.embed.process.io.file.Files;
import de.flapdoodle.embed.process.runtime.IStopable;
/**
* Helper class for starting/stopping a mongod process.
*/
public class MongodProcess {
private static final String VERSION = "4.2.16";
private static final Directory TMP_DIR = join(new FixedPath("target"), new FixedPath("tmp"));
private IStopable process;
private final MongodStarter starter;
private final MongodConfig config;
MongodProcess(MongodStarter starter, String rsName, int port)
throws IOException {
this.starter = starter;
this.config = createConfiguration(rsName, port);
}
public synchronized void start() throws IOException {
if (process != null) {
throw new IllegalStateException("Already started");
}
process = starter.prepare(config).start();
}
public synchronized void stop() {
if (process == null) {
throw new IllegalStateException("Already stopped");
}
process.stop();
process = null;
}
public synchronized boolean isStopped() {
return process == null;
}
public ServerAddress getAddress() {
return new ServerAddress(config.net().getBindIp(), config.net().getPort());
}
static Directory join(final Directory left, final Directory right) {
return new Directory() {
@Override
public boolean isGenerated() {
return left.isGenerated() || right.isGenerated();
}
@Override
public File asFile() {
return Files.fileOf(left.asFile(), right.asFile());
}
};
}
private MongodConfig createConfiguration(String rsName, int p)
throws IOException {
return ImmutableMongodConfig.builder()
.version(Versions.withFeatures(() -> VERSION, Feature.NO_HTTP_INTERFACE_ARG))
.net(new Net(InetAddress.getLoopbackAddress().getHostAddress(), p, false))
.replication(newStorage(p, rsName))
// enable journal
.cmdOptions(ImmutableMongoCmdOptions.builder()
.useNoPrealloc(false)
.useNoJournal(false)
.useSmallFiles(false)
.build())
.build();
}
private Storage newStorage(int port, String rs) throws IOException {
File dbPath = new File(TMP_DIR.asFile(), "mongod-" + port);
if (dbPath.exists()) {
FileUtils.deleteDirectory(dbPath);
}
int oplogSize = rs != null ? 512 : 0;
return new Storage(dbPath.getAbsolutePath(), rs, oplogSize);
}
}
| apache-2.0 |
mgillian/WebproxyPortlet | src/test/java/org/jasig/portlet/proxy/service/web/MultiRequestHttpClientServiceImplTest.java | 3763 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portlet.proxy.service.web;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
public class MultiRequestHttpClientServiceImplTest {
private static final String HTTP_CLIENT_CONNECTION_TIMEOUT = "httpClientConnectionTimeout";
private static final String HTTP_CLIENT_SOCKET_TIMEOUT = "httpClientSocketTimeout";
private static final int DEFAULT_HTTP_CLIENT_CONNECTION_TIMEOUT = 10000;
private static final int DEFAULT_HTTP_CLIENT_SOCKET_TIMEOUT = 10000;
@Mock PortletRequest request;
@Mock PortletPreferences preferences;
@Mock PortletSession session;
@Mock DefaultHttpClient client;
MultiRequestHttpClientServiceImpl service;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
service = new MultiRequestHttpClientServiceImpl();
when(request.getPreferences()).thenReturn(preferences);
when(request.getPortletSession()).thenReturn(session);
when(preferences.getValue(HTTP_CLIENT_CONNECTION_TIMEOUT, String.valueOf(DEFAULT_HTTP_CLIENT_CONNECTION_TIMEOUT))).thenReturn(String.valueOf(DEFAULT_HTTP_CLIENT_CONNECTION_TIMEOUT));
when(preferences.getValue(HTTP_CLIENT_SOCKET_TIMEOUT, String.valueOf(DEFAULT_HTTP_CLIENT_SOCKET_TIMEOUT))).thenReturn(String.valueOf(DEFAULT_HTTP_CLIENT_SOCKET_TIMEOUT));
}
@Test
public void testGetSharedClient() {
when(preferences.getValue(MultiRequestHttpClientServiceImpl.SHARED_SESSION_KEY, null)).thenReturn("sharedSession");
when(session.getAttribute("sharedSession", PortletSession.APPLICATION_SCOPE)).thenReturn(client);
HttpClient response = service.getHttpClient(request);
assertSame(client, response);
}
@Test
public void testGetUnsharedClient() {
when(session.getAttribute(MultiRequestHttpClientServiceImpl.CLIENT_SESSION_KEY, PortletSession.PORTLET_SCOPE)).thenReturn(client);
HttpClient response = service.getHttpClient(request);
assertSame(client, response);
}
@Test
public void testCreateSharedClient() {
when(preferences.getValue(MultiRequestHttpClientServiceImpl.SHARED_SESSION_KEY, null)).thenReturn("sharedSession");
HttpClient response = service.getHttpClient(request);
assertNotNull(response);
verify(session).setAttribute("sharedSession", response, PortletSession.APPLICATION_SCOPE);
}
@Test
public void testCreateUnsharedClient() {
HttpClient response = service.getHttpClient(request);
assertNotNull(response);
verify(session).setAttribute(MultiRequestHttpClientServiceImpl.CLIENT_SESSION_KEY, response, PortletSession.PORTLET_SCOPE);
}
}
| apache-2.0 |
rLadia/AttacknidPatch | decompiled_src/Procyon/org/anddev/andengine/extension/physics/box2d/PhysicsConnector.java | 3998 | package org.anddev.andengine.extension.physics.box2d;
import org.anddev.andengine.engine.handler.*;
import org.anddev.andengine.extension.physics.box2d.util.constants.*;
import com.badlogic.gdx.physics.box2d.*;
import org.anddev.andengine.entity.shape.*;
import org.anddev.andengine.util.*;
import com.badlogic.gdx.math.*;
public class PhysicsConnector implements IUpdateHandler, PhysicsConstants
{
protected final Body mBody;
protected final float mPixelToMeterRatio;
protected final Shape mShape;
protected final float mShapeHalfBaseHeight;
protected final float mShapeHalfBaseWidth;
protected boolean mUpdateAngularVelocity;
protected boolean mUpdateLinearVelocity;
protected boolean mUpdatePosition;
protected boolean mUpdateRotation;
public PhysicsConnector(final Shape shape, final Body body) {
this(shape, body, true, true, true, true);
}
public PhysicsConnector(final Shape shape, final Body body, final float n) {
this(shape, body, true, true, true, true, n);
}
public PhysicsConnector(final Shape shape, final Body body, final boolean b, final boolean b2, final boolean b3, final boolean b4) {
this(shape, body, b, b2, b3, b4, 32.0f);
}
public PhysicsConnector(final Shape mShape, final Body mBody, final boolean mUpdatePosition, final boolean mUpdateRotation, final boolean mUpdateLinearVelocity, final boolean mUpdateAngularVelocity, final float mPixelToMeterRatio) {
super();
this.mShape = mShape;
this.mBody = mBody;
this.mUpdatePosition = mUpdatePosition;
this.mUpdateRotation = mUpdateRotation;
this.mUpdateLinearVelocity = mUpdateLinearVelocity;
this.mUpdateAngularVelocity = mUpdateAngularVelocity;
this.mPixelToMeterRatio = mPixelToMeterRatio;
this.mShapeHalfBaseWidth = 0.5f * mShape.getBaseWidth();
this.mShapeHalfBaseHeight = 0.5f * mShape.getBaseHeight();
}
public Body getBody() {
return this.mBody;
}
public Shape getShape() {
return this.mShape;
}
public boolean isUpdateAngularVelocity() {
return this.mUpdateAngularVelocity;
}
public boolean isUpdateLinearVelocity() {
return this.mUpdateLinearVelocity;
}
public boolean isUpdatePosition() {
return this.mUpdatePosition;
}
public boolean isUpdateRotation() {
return this.mUpdateRotation;
}
@Override
public void onUpdate(final float n) {
final Shape mShape = this.mShape;
final Body mBody = this.mBody;
if (this.mUpdatePosition) {
final Vector2 position = mBody.getPosition();
final float mPixelToMeterRatio = this.mPixelToMeterRatio;
mShape.setPosition(mPixelToMeterRatio * position.x - this.mShapeHalfBaseWidth, mPixelToMeterRatio * position.y - this.mShapeHalfBaseHeight);
}
if (this.mUpdateRotation) {
mShape.setRotation(MathUtils.radToDeg(mBody.getAngle()));
}
if (this.mUpdateLinearVelocity) {
final Vector2 linearVelocity = mBody.getLinearVelocity();
mShape.setVelocity(linearVelocity.x, linearVelocity.y);
}
if (this.mUpdateAngularVelocity) {
mShape.setAngularVelocity(mBody.getAngularVelocity());
}
}
@Override
public void reset() {
}
public void setUpdateAngularVelocity(final boolean mUpdateAngularVelocity) {
this.mUpdateAngularVelocity = mUpdateAngularVelocity;
}
public void setUpdateLinearVelocity(final boolean mUpdateLinearVelocity) {
this.mUpdateLinearVelocity = mUpdateLinearVelocity;
}
public void setUpdatePosition(final boolean mUpdatePosition) {
this.mUpdatePosition = mUpdatePosition;
}
public void setUpdateRotation(final boolean mUpdateRotation) {
this.mUpdateRotation = mUpdateRotation;
}
}
| apache-2.0 |
rzel/xruby | src/com/xruby/runtime/lang/ObjectSpace.java | 1775 | /**
* Copyright 2005-2007 Xue Yong Zhi
* Distributed under the Apache License
*/
package com.xruby.runtime.lang;
import java.util.WeakHashMap;
import com.xruby.runtime.builtin.ObjectFactory;
import com.xruby.runtime.builtin.RubyFixnum;
import com.xruby.runtime.lang.annotation.RubyLevelMethod;
import com.xruby.runtime.lang.annotation.RubyLevelModule;
//we are using map as list here.
@RubyLevelModule(name="ObjectSpace")
public class ObjectSpace {
private static WeakHashMap<RubyValue, RubyValue> map_= new WeakHashMap<RubyValue, RubyValue>();
public static void add(RubyValue v) {
map_.put(v, null);
}
@RubyLevelMethod(name="each_object", singleton=true)
public static RubyFixnum rubyEachObject(RubyValue receiver, RubyBlock block) {
int n = ObjectSpace.each_object(receiver, block);
return ObjectFactory.createFixnum(n);
}
@RubyLevelMethod(name="each_object", singleton=true)
public static RubyFixnum rubyEachObject(RubyValue receiver, RubyValue arg, RubyBlock block) {
int n = ObjectSpace.each_object(receiver, (RubyModule)arg, block);
return ObjectFactory.createFixnum(n);
}
private static int each_object(RubyValue receiver, RubyBlock block) {
int i = 0;
for (RubyValue v : map_.keySet()) {
block.invoke(receiver, v);
++i;
}
return i;
}
private static int each_object(RubyValue receiver, RubyModule m, RubyBlock block) {
int i = 0;
for (RubyValue v : map_.keySet()) {
if (RubyAPI.isKindOf(m, v)) {
block.invoke(receiver, v);
++i;
}
}
return i;
}
}
| apache-2.0 |
deeplearning4j/deeplearning4j | datavec/datavec-api/src/main/java/org/datavec/api/transform/ops/AggregatorImpls.java | 21724 | /*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.datavec.api.transform.ops;
import com.clearspring.analytics.stream.cardinality.CardinalityMergeException;
import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.datavec.api.writable.DoubleWritable;
import org.datavec.api.writable.LongWritable;
import org.datavec.api.writable.UnsafeWritableInjector;
import org.datavec.api.writable.Writable;
/**
* Created by huitseeker on 4/28/17.
*/
public class AggregatorImpls {
public static class AggregableFirst<T> implements IAggregableReduceOp<T, Writable> {
private T elem = null;
@Override
public void accept(T element) {
if (elem == null)
elem = element;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
// left-favoring for first
if (!(accu instanceof IAggregableReduceOp))
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return UnsafeWritableInjector.inject(elem);
}
}
public static class AggregableLast<T> implements IAggregableReduceOp<T, Writable> {
private T elem = null;
private Writable override = null;
@Override
public void accept(T element) {
if (element != null)
elem = element;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (accu instanceof AggregableLast)
override = accu.get(); // right-favoring for last
else
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
if (override == null)
return UnsafeWritableInjector.inject(elem);
else
return override;
}
}
public static class AggregableSum<T extends Number> implements IAggregableReduceOp<T, Writable> {
@Getter
private Number sum;
@Getter
private T initialElement; // this value is ignored and jut serves as a subtype indicator
private static <U extends Number> Number addNumbers(U a, U b) {
if (a instanceof Double || b instanceof Double) {
return new Double(a.doubleValue() + b.doubleValue());
} else if (a instanceof Float || b instanceof Float) {
return new Float(a.floatValue() + b.floatValue());
} else if (a instanceof Long || b instanceof Long) {
return new Long(a.longValue() + b.longValue());
} else {
return new Integer(a.intValue() + b.intValue());
}
}
@Override
public void accept(T element) {
if (sum == null) {
sum = element;
initialElement = element;
} else {
if (initialElement.getClass().isAssignableFrom(element.getClass()))
sum = addNumbers(sum, element);
}
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (accu instanceof AggregableSum) {
AggregableSum<T> accumulator = (AggregableSum<T>) accu;
// the type of this now becomes that of the union of initialelement
if (accumulator.getInitialElement().getClass().isAssignableFrom(initialElement.getClass()))
initialElement = accumulator.initialElement;
sum = addNumbers(sum, accumulator.getSum());
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return UnsafeWritableInjector.inject(sum);
}
}
public static class AggregableProd<T extends Number> implements IAggregableReduceOp<T, Writable> {
@Getter
private Number prod;
@Getter
private T initialElement; // this value is ignored and jut serves as a subtype indicator
private static <U extends Number> Number multiplyNumbers(U a, U b) {
if (a instanceof Double || b instanceof Double) {
return new Double(a.doubleValue() * b.doubleValue());
} else if (a instanceof Float || b instanceof Float) {
return new Float(a.floatValue() * b.floatValue());
} else if (a instanceof Long || b instanceof Long) {
return new Long(a.longValue() * b.longValue());
} else {
return new Integer(a.intValue() * b.intValue());
}
}
@Override
public void accept(T element) {
if (prod == null) {
prod = element;
initialElement = element;
} else {
if (initialElement.getClass().isAssignableFrom(element.getClass()))
prod = multiplyNumbers(prod, element);
}
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (accu instanceof AggregableSum) {
AggregableSum<T> accumulator = (AggregableSum<T>) accu;
// the type of this now becomes that of the union of initialelement
if (accumulator.getInitialElement().getClass().isAssignableFrom(initialElement.getClass()))
initialElement = accumulator.initialElement;
prod = multiplyNumbers(prod, accumulator.getSum());
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return UnsafeWritableInjector.inject(prod);
}
}
public static class AggregableMax<T extends Number & Comparable<T>> implements IAggregableReduceOp<T, Writable> {
@Getter
private T max = null;
@Override
public void accept(T element) {
if (max == null || max.compareTo(element) < 0)
max = element;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (max == null || (accu instanceof AggregableMax && max.compareTo(((AggregableMax<T>) accu).getMax()) < 0))
max = ((AggregableMax<T>) accu).getMax();
else if (!(accu instanceof AggregableMax))
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return UnsafeWritableInjector.inject(max);
}
}
public static class AggregableMin<T extends Number & Comparable<T>> implements IAggregableReduceOp<T, Writable> {
@Getter
private T min = null;
@Override
public void accept(T element) {
if (min == null || min.compareTo(element) > 0)
min = element;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (min == null || (accu instanceof AggregableMin && min.compareTo(((AggregableMin<T>) accu).getMin()) > 0))
min = ((AggregableMin<T>) accu).getMin();
else if (!(accu instanceof AggregableMin))
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return UnsafeWritableInjector.inject(min);
}
}
public static class AggregableRange<T extends Number & Comparable<T>> implements IAggregableReduceOp<T, Writable> {
@Getter
private T min = null;
@Getter
private T max = null;
@Override
public void accept(T element) {
if (min == null || min.compareTo(element) > 0)
min = element;
if (max == null || max.compareTo(element) < 0)
max = element;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (max == null || (accu instanceof AggregableRange
&& max.compareTo(((AggregableRange<T>) accu).getMax()) < 0))
max = ((AggregableRange<T>) accu).getMax();
if (min == null || (accu instanceof AggregableRange
&& min.compareTo(((AggregableRange<T>) accu).getMin()) > 0))
min = ((AggregableRange<T>) accu).getMin();
if (!(accu instanceof AggregableRange))
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
if (min.getClass() == Long.class)
return UnsafeWritableInjector.inject(max.longValue() - min.longValue());
else if (min.getClass() == Integer.class)
return UnsafeWritableInjector.inject(max.intValue() - min.intValue());
else if (min.getClass() == Float.class)
return UnsafeWritableInjector.inject(max.floatValue() - min.floatValue());
else if (min.getClass() == Double.class)
return UnsafeWritableInjector.inject(max.doubleValue() - min.doubleValue());
else if (min.getClass() == Byte.class)
return UnsafeWritableInjector.inject(max.byteValue() - min.byteValue());
else
throw new IllegalArgumentException(
"Wrong type for Aggregable Range operation " + min.getClass().getName());
}
}
public static class AggregableCount<T> implements IAggregableReduceOp<T, Writable> {
private Long count = 0L;
@Override
public void accept(T element) {
count += 1L;
}
@Override
public <W extends IAggregableReduceOp<T, Writable>> void combine(W accu) {
if (accu instanceof AggregableCount)
count = count + accu.get().toLong();
else
throw new UnsupportedOperationException("Tried to combine() incompatible " + accu.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return new LongWritable(count);
}
}
public static class AggregableMean<T extends Number> implements IAggregableReduceOp<T, Writable> {
@Getter
private Long count = 0L;
private Double mean = 0D;
public void accept(T n) {
// See Knuth TAOCP vol 2, 3rd edition, page 232
if (count == 0) {
count = 1L;
mean = n.doubleValue();
} else {
count = count + 1;
mean = mean + (n.doubleValue() - mean) / count;
}
}
public <U extends IAggregableReduceOp<T, Writable>> void combine(U acc) {
if (acc instanceof AggregableMean) {
Long cnt = ((AggregableMean<T>) acc).getCount();
Long newCount = count + cnt;
mean = (mean * count + (acc.get().toDouble() * cnt)) / newCount;
count = newCount;
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + acc.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
public Writable get() {
return new DoubleWritable(mean);
}
}
/**
* This class offers an aggregable reduce operation for the unbiased standard deviation, i.e. the estimator
* of the square root of the arithmetic mean of squared differences to the mean, corrected with Bessel's correction.
*
* See <a href="https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation">https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation</a>
* This is computed with Welford's method for increased numerical stability & aggregability.
*/
public static class AggregableStdDev<T extends Number> implements IAggregableReduceOp<T, Writable> {
@Getter
private Long count = 0L;
@Getter
private Double mean = 0D;
@Getter
private Double variation = 0D;
public void accept(T n) {
if (count == 0) {
count = 1L;
mean = n.doubleValue();
variation = 0D;
} else {
Long newCount = count + 1;
Double newMean = mean + (n.doubleValue() - mean) / newCount;
Double newvariation = variation + (n.doubleValue() - mean) * (n.doubleValue() - newMean);
count = newCount;
mean = newMean;
variation = newvariation;
}
}
public <U extends IAggregableReduceOp<T, Writable>> void combine(U acc) {
if (this.getClass().isAssignableFrom(acc.getClass())) {
AggregableStdDev<T> accu = (AggregableStdDev<T>) acc;
Long totalCount = count + accu.getCount();
Double totalMean = (accu.getMean() * accu.getCount() + mean * count) / totalCount;
// the variance of the union is the sum of variances
Double variance = variation / (count - 1);
Double otherVariance = accu.getVariation() / (accu.getCount() - 1);
Double totalVariation = (variance + otherVariance) * (totalCount - 1);
count = totalCount;
mean = totalMean;
variation = variation;
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + acc.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
public Writable get() {
return new DoubleWritable(Math.sqrt(variation / (count - 1)));
}
}
/**
* This class offers an aggregable reduce operation for the biased standard deviation, i.e. the estimator
* of the square root of the arithmetic mean of squared differences to the mean.
*
* See <a href="https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation">https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation</a>
* This is computed with Welford's method for increased numerical stability & aggregability.
*/
public static class AggregableUncorrectedStdDev<T extends Number> extends AggregableStdDev<T> {
@Override
public Writable get() {
return new DoubleWritable(Math.sqrt(this.getVariation() / this.getCount()));
}
}
/**
* This class offers an aggregable reduce operation for the unbiased variance, i.e. the estimator
* of the arithmetic mean of squared differences to the mean, corrected with Bessel's correction.
*
* See <a href="https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation">https://en.wikipedia.org/wiki/Unbiased_estimation_of_standard_deviation</a>
* This is computed with Welford's method for increased numerical stability & aggregability.
*/
public static class AggregableVariance<T extends Number> implements IAggregableReduceOp<T, Writable> {
@Getter
private Long count = 0L;
@Getter
private Double mean = 0D;
@Getter
private Double variation = 0D;
public void accept(T n) {
if (count == 0) {
count = 1L;
mean = n.doubleValue();
variation = 0D;
} else {
Long newCount = count + 1;
Double newMean = mean + (n.doubleValue() - mean) / newCount;
Double newvariation = variation + (n.doubleValue() - mean) * (n.doubleValue() - newMean);
count = newCount;
mean = newMean;
variation = newvariation;
}
}
public <U extends IAggregableReduceOp<T, Writable>> void combine(U acc) {
if (this.getClass().isAssignableFrom(acc.getClass())) {
AggregableVariance<T> accu = (AggregableVariance<T>) acc;
Long totalCount = count + accu.getCount();
Double totalMean = (accu.getMean() * accu.getCount() + mean * count) / totalCount;
// the variance of the union is the sum of variances
Double variance = variation / (count - 1);
Double otherVariance = accu.getVariation() / (accu.getCount() - 1);
Double totalVariation = (variance + otherVariance) * (totalCount - 1);
count = totalCount;
mean = totalMean;
variation = variation;
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + acc.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
public Writable get() {
return new DoubleWritable(variation / (count - 1));
}
}
/**
* This class offers an aggregable reduce operation for the population variance, i.e. the uncorrected estimator
* of the arithmetic mean of squared differences to the mean.
*
* See <a href="https://en.wikipedia.org/wiki/Variance#Population_variance_and_sample_variance">https://en.wikipedia.org/wiki/Variance#Population_variance_and_sample_variance</a>
* This is computed with Welford's method for increased numerical stability & aggregability.
*/
public static class AggregablePopulationVariance<T extends Number> extends AggregableVariance<T> {
@Override
public Writable get() {
return new DoubleWritable(this.getVariation() / this.getCount());
}
}
/**
*
* This distinct count is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* The relative accuracy is approximately `1.054 / sqrt(2^p)`. Setting
* a nonzero `sp > p` in HyperLogLogPlus(p, sp) would trigger sparse
* representation of registers, which may reduce the memory consumption
* and increase accuracy when the cardinality is small.
* @param <T>
*/
@NoArgsConstructor
public static class AggregableCountUnique<T> implements IAggregableReduceOp<T, Writable> {
private float p = 0.05f;
@Getter
private HyperLogLogPlus hll = new HyperLogLogPlus((int) Math.ceil(2.0 * Math.log(1.054 / p) / Math.log(2)), 0);
public AggregableCountUnique(float precision) {
this.p = precision;
}
@Override
public void accept(T element) {
hll.offer(element);
}
@Override
public <U extends IAggregableReduceOp<T, Writable>> void combine(U acc) {
if (acc instanceof AggregableCountUnique) {
try {
hll.addAll(((AggregableCountUnique<T>) acc).getHll());
} catch (CardinalityMergeException e) {
throw new RuntimeException(e);
}
} else
throw new UnsupportedOperationException("Tried to combine() incompatible " + acc.getClass().getName()
+ " operator where " + this.getClass().getName() + " expected");
}
@Override
public Writable get() {
return new LongWritable(hll.cardinality());
}
}
}
| apache-2.0 |
jk1/intellij-community | platform/projectModel-api/src/com/intellij/openapi/project/ProjectManagerListener.java | 1527 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.project;
import org.jetbrains.annotations.NotNull;
import java.util.EventListener;
/**
* Listener for Project.
*/
public interface ProjectManagerListener extends EventListener {
ProjectManagerListener[] EMPTY_ARRAY = new ProjectManagerListener[0];
/**
* Invoked on project open.
*
* @param project opening project
*/
default void projectOpened(Project project) {
}
/**
* @deprecated Please use {@link VetoableProjectManagerListener}
*/
@Deprecated
default boolean canCloseProject(Project project) {
return true;
}
/**
* Invoked on project close.
*
* @param project closing project
*/
default void projectClosed(Project project) {
}
/**
* Invoked on project close before any closing activities
*/
default void projectClosing(Project project) {
}
default void projectClosingBeforeSave(@NotNull Project project) {
}
}
| apache-2.0 |
biospi/seamass-windeps | src/hdf5-1.10.0-patch1/java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java | 1308 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the files COPYING and Copyright.html. COPYING can be found at the root *
* of the source code distribution tree; Copyright.html can be found at the *
* root level of an installed copy of the electronic HDF5 document set and *
* is linked from the top-level documents page. It can also be found at *
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
package hdf.hdf5lib.callbacks;
public interface H5D_iterate_t {
/** public ArrayList iterdata = new ArrayList();
* Any derived interfaces must define the single public variable as above.
*/
}
| apache-2.0 |
dropwizard/dropwizard | dropwizard-util/src/main/java/io/dropwizard/util/Throwables.java | 1439 | package io.dropwizard.util;
/**
* @since 2.0
*
* @deprecated consider using Apache commons-lang3 ExceptionUtils instead
*/
@Deprecated
public final class Throwables {
private Throwables() {
}
/**
* Returns the innermost cause of {@code throwable}. The first throwable in a chain provides
* context from when the error or exception was initially detected. Example usage:
*
* <pre>
* assertEquals("Unable to assign a customer id", Throwables.getRootCause(e).getMessage());
* </pre>
*
* @throws IllegalArgumentException if there is a loop in the causal chain
*/
public static Throwable getRootCause(Throwable throwable) {
// Keep a second pointer that slowly walks the causal chain. If the fast pointer ever catches
// the slower pointer, then there's a loop.
Throwable slowPointer = throwable;
boolean advanceSlowPointer = false;
Throwable cause;
while ((cause = throwable.getCause()) != null) {
throwable = cause;
if (throwable == slowPointer) {
throw new IllegalArgumentException("Loop in causal chain detected.", throwable);
}
if (advanceSlowPointer) {
slowPointer = slowPointer.getCause();
}
advanceSlowPointer = !advanceSlowPointer; // only advance every other iteration
}
return throwable;
}
}
| apache-2.0 |
djechelon/spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/NamespaceHttpOpenIDLoginTests.java | 13109 | /*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers;
import java.util.Arrays;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.openid4java.consumer.ConsumerManager;
import org.openid4java.discovery.DiscoveryInformation;
import org.openid4java.discovery.yadis.YadisResolver;
import org.openid4java.message.AuthRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationDetailsSource;
import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.security.config.annotation.ObjectPostProcessor;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.test.SpringTestContext;
import org.springframework.security.config.test.SpringTestContextExtension;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.AuthenticationUserDetailsService;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.openid.OpenIDAttribute;
import org.springframework.security.openid.OpenIDAuthenticationFilter;
import org.springframework.security.openid.OpenIDAuthenticationStatus;
import org.springframework.security.openid.OpenIDAuthenticationToken;
import org.springframework.security.openid.OpenIDConsumer;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler;
import org.springframework.security.web.authentication.WebAuthenticationDetailsSource;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Tests to verify that all the functionality of <openid-login> attributes is
* present
*
* @author Rob Winch
* @author Josh Cummings
*/
@ExtendWith(SpringTestContextExtension.class)
public class NamespaceHttpOpenIDLoginTests {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
@Test
public void openidLoginWhenUsingDefaultsThenMatchesNamespace() throws Exception {
this.spring.register(OpenIDLoginConfig.class).autowire();
this.mvc.perform(get("/")).andExpect(redirectedUrl("http://localhost/login"));
this.mvc.perform(post("/login/openid").with(csrf())).andExpect(redirectedUrl("/login?error"));
}
@Test
public void openidLoginWhenAttributeExchangeConfiguredThenFetchAttributesMatchAttributeList() throws Exception {
OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER = mock(ConsumerManager.class);
AuthRequest mockAuthRequest = mock(AuthRequest.class);
DiscoveryInformation mockDiscoveryInformation = mock(DiscoveryInformation.class);
given(mockAuthRequest.getDestinationUrl(anyBoolean())).willReturn("mockUrl");
given(OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER.associate(any()))
.willReturn(mockDiscoveryInformation);
given(OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER.authenticate(any(DiscoveryInformation.class), any(),
any())).willReturn(mockAuthRequest);
this.spring.register(OpenIDLoginAttributeExchangeConfig.class).autowire();
try (MockWebServer server = new MockWebServer()) {
String endpoint = server.url("/").toString();
server.enqueue(new MockResponse().addHeader(YadisResolver.YADIS_XRDS_LOCATION, endpoint));
server.enqueue(new MockResponse()
.setBody(String.format("<XRDS><XRD><Service><URI>%s</URI></Service></XRD></XRDS>", endpoint)));
MvcResult mvcResult = this.mvc.perform(get("/login/openid")
.param(OpenIDAuthenticationFilter.DEFAULT_CLAIMED_IDENTITY_FIELD, "https://www.google.com/1"))
.andExpect(status().isFound()).andReturn();
Object attributeObject = mvcResult.getRequest().getSession()
.getAttribute("SPRING_SECURITY_OPEN_ID_ATTRIBUTES_FETCH_LIST");
assertThat(attributeObject).isInstanceOf(List.class);
List<OpenIDAttribute> attributeList = (List<OpenIDAttribute>) attributeObject;
assertThat(attributeList.stream().anyMatch((attribute) -> "firstname".equals(attribute.getName())
&& "https://axschema.org/namePerson/first".equals(attribute.getType()) && attribute.isRequired()))
.isTrue();
assertThat(attributeList.stream().anyMatch((attribute) -> "lastname".equals(attribute.getName())
&& "https://axschema.org/namePerson/last".equals(attribute.getType()) && attribute.isRequired()))
.isTrue();
assertThat(attributeList.stream().anyMatch((attribute) -> "email".equals(attribute.getName())
&& "https://axschema.org/contact/email".equals(attribute.getType()) && attribute.isRequired()))
.isTrue();
}
}
@Test
public void openidLoginWhenUsingCustomEndpointsThenMatchesNamespace() throws Exception {
this.spring.register(OpenIDLoginCustomConfig.class).autowire();
this.mvc.perform(get("/")).andExpect(redirectedUrl("http://localhost/authentication/login"));
this.mvc.perform(post("/authentication/login/process").with(csrf()))
.andExpect(redirectedUrl("/authentication/login?failed"));
}
@Test
public void openidLoginWithCustomHandlersThenBehaviorMatchesNamespace() throws Exception {
OpenIDAuthenticationToken token = new OpenIDAuthenticationToken(OpenIDAuthenticationStatus.SUCCESS,
"identityUrl", "message", Arrays.asList(new OpenIDAttribute("name", "type")));
OpenIDLoginCustomRefsConfig.AUDS = mock(AuthenticationUserDetailsService.class);
User user = new User("user", "password", AuthorityUtils.createAuthorityList("ROLE_USER"));
given(OpenIDLoginCustomRefsConfig.AUDS.loadUserDetails(any(Authentication.class))).willReturn(user);
OpenIDLoginCustomRefsConfig.ADS = spy(new WebAuthenticationDetailsSource());
OpenIDLoginCustomRefsConfig.CONSUMER = mock(OpenIDConsumer.class);
this.spring.register(OpenIDLoginCustomRefsConfig.class, UserDetailsServiceConfig.class).autowire();
given(OpenIDLoginCustomRefsConfig.CONSUMER.endConsumption(any(HttpServletRequest.class)))
.willThrow(new AuthenticationServiceException("boom"));
// @formatter:off
MockHttpServletRequestBuilder login = post("/login/openid")
.with(csrf())
.param("openid.identity", "identity");
// @formatter:on
this.mvc.perform(login).andExpect(redirectedUrl("/custom/failure"));
reset(OpenIDLoginCustomRefsConfig.CONSUMER);
given(OpenIDLoginCustomRefsConfig.CONSUMER.endConsumption(any(HttpServletRequest.class))).willReturn(token);
this.mvc.perform(login).andExpect(redirectedUrl("/custom/targetUrl"));
verify(OpenIDLoginCustomRefsConfig.AUDS).loadUserDetails(any(Authentication.class));
verify(OpenIDLoginCustomRefsConfig.ADS).buildDetails(any(Object.class));
}
@Configuration
@EnableWebSecurity
static class OpenIDLoginConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().hasRole("USER")
.and()
.openidLogin()
.permitAll();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static class OpenIDLoginAttributeExchangeConfig extends WebSecurityConfigurerAdapter {
static ConsumerManager CONSUMER_MANAGER;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().hasRole("USER")
.and()
.openidLogin()
.consumerManager(CONSUMER_MANAGER)
.attributeExchange("https://www.google.com/.*") // attribute-exchange@identifier-match
.attribute("email") // openid-attribute@name
.type("https://axschema.org/contact/email") // openid-attribute@type
.required(true) // openid-attribute@required
.count(1) // openid-attribute@count
.and()
.attribute("firstname")
.type("https://axschema.org/namePerson/first")
.required(true)
.and()
.attribute("lastname")
.type("https://axschema.org/namePerson/last")
.required(true)
.and()
.and()
.attributeExchange(".*yahoo.com.*")
.attribute("email")
.type("https://schema.openid.net/contact/email")
.required(true)
.and()
.attribute("fullname")
.type("https://axschema.org/namePerson")
.required(true)
.and()
.and()
.permitAll();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static class OpenIDLoginCustomConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
boolean alwaysUseDefaultSuccess = true;
// @formatter:off
http
.authorizeRequests()
.anyRequest().hasRole("USER")
.and()
.openidLogin()
.permitAll()
.loginPage("/authentication/login") // openid-login@login-page
.failureUrl("/authentication/login?failed") // openid-login@authentication-failure-url
.loginProcessingUrl("/authentication/login/process") // openid-login@login-processing-url
.defaultSuccessUrl("/default", alwaysUseDefaultSuccess); // openid-login@default-target-url / openid-login@always-use-default-target
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static class OpenIDLoginCustomRefsConfig extends WebSecurityConfigurerAdapter {
static AuthenticationUserDetailsService AUDS;
static AuthenticationDetailsSource ADS;
static OpenIDConsumer CONSUMER;
@Override
protected void configure(HttpSecurity http) throws Exception {
SavedRequestAwareAuthenticationSuccessHandler handler = new SavedRequestAwareAuthenticationSuccessHandler();
handler.setDefaultTargetUrl("/custom/targetUrl");
// @formatter:off
http
.authorizeRequests()
.anyRequest().hasRole("USER")
.and()
.openidLogin()
// if using UserDetailsService wrap with new UserDetailsByNameServiceWrapper<OpenIDAuthenticationToken>()
.authenticationUserDetailsService(AUDS) // openid-login@user-service-ref
.failureHandler(new SimpleUrlAuthenticationFailureHandler("/custom/failure")) // openid-login@authentication-failure-handler-ref
.successHandler(handler) // openid-login@authentication-success-handler-ref
.authenticationDetailsSource(ADS) // openid-login@authentication-details-source-ref
.withObjectPostProcessor(new ObjectPostProcessor<OpenIDAuthenticationFilter>() {
@Override
public <O extends OpenIDAuthenticationFilter> O postProcess(O filter) {
filter.setConsumer(CONSUMER);
return filter;
}
});
// @formatter:on
}
}
@Configuration
static class UserDetailsServiceConfig {
@Bean
UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(
User.withDefaultPasswordEncoder().username("user").password("password").roles("USER").build());
}
}
}
| apache-2.0 |
Dhandapani/gluster-ovirt | frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/storage/StorageModelBehavior.java | 1799 | package org.ovirt.engine.ui.uicommonweb.models.storage;
import org.ovirt.engine.core.common.businessentities.storage_pool;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.models.Model;
@SuppressWarnings("unused")
public abstract class StorageModelBehavior
{
private StorageModel privateModel;
public StorageModel getModel()
{
return privateModel;
}
public void setModel(StorageModel value)
{
privateModel = value;
}
public java.util.List<storage_pool> FilterDataCenter(java.util.List<storage_pool> source)
{
return source;
}
public void UpdateItemsAvailability()
{
}
public void FilterUnSelectableModels()
{
// Filter UnSelectable models from AvailableStorageItems list
java.util.ArrayList<Object> filterredItems = new java.util.ArrayList<Object>();
java.util.ArrayList<IStorageModel> items = Linq.<IStorageModel> Cast(getModel().getItems());
for (IStorageModel model : items)
{
if (((Model) model).getIsSelectable())
{
filterredItems.add(model);
}
}
getModel().getAvailableStorageItems().setItems(filterredItems);
}
public void OnStorageModelUpdated(IStorageModel model)
{
// Update models list (the list is used for checking update completion)
getModel().UpdatedStorageModels.add(model);
// Filter UnSelectable model from AvailableStorageItems list
if (getModel().UpdatedStorageModels.size() == Linq.<IStorageModel> Cast(getModel().getItems()).size())
{
FilterUnSelectableModels();
getModel().UpdatedStorageModels.clear();
getModel().ChooseFirstItem();
}
}
}
| apache-2.0 |
apache/axis1-java | axis-rt-core/src/test/java/test/message/TestSOAPFault.java | 3083 | /*
* Copyright 2002-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.message;
import junit.framework.TestCase;
import javax.xml.soap.Detail;
import javax.xml.soap.MessageFactory;
import javax.xml.soap.SOAPBody;
import javax.xml.soap.SOAPElement;
import javax.xml.soap.SOAPFault;
import javax.xml.soap.SOAPMessage;
import org.apache.axis.AxisFault;
import org.w3c.dom.Element;
import java.io.InputStream;
import java.util.Iterator;
/**
* @author steve.johnson@riskmetrics.com (Steve Johnson)
* @author Davanum Srinivas (dims@yahoo.com)
* @author Andreas Veithen
*
* @version $Revision$
*/
public class TestSOAPFault extends TestCase {
/**
* Regression test for AXIS-1008.
*
* @throws Exception
*/
public void testAxis1008() throws Exception {
InputStream in = TestSOAPFault.class.getResourceAsStream("AXIS-1008.xml");
try {
MessageFactory msgFactory = MessageFactory.newInstance();
SOAPMessage msg = msgFactory.createMessage(null, in);
//now attempt to access the fault
if (msg.getSOAPPart().getEnvelope().getBody().hasFault()) {
SOAPFault fault =
msg.getSOAPPart().getEnvelope().getBody().getFault();
System.out.println("Fault: " + fault.getFaultString());
}
} finally {
in.close();
}
}
/**
* Regression test for AXIS-2705. The issue occurs when a SOAP fault has a detail element
* containing text (and not elements). Note that such a SOAP fault violates the SOAP spec, but
* Axis should nevertheless be able to process it.
*
* @throws Exception
*/
public void testAxis2705() throws Exception {
InputStream in = TestSOAPFault.class.getResourceAsStream("AXIS-2705.xml");
try {
MessageFactory msgFactory = MessageFactory.newInstance();
SOAPMessage msg = msgFactory.createMessage(null, in);
SOAPBody body = msg.getSOAPPart().getEnvelope().getBody();
assertTrue(body.hasFault());
SOAPFault fault = body.getFault();
AxisFault axisFault = ((org.apache.axis.message.SOAPFault)fault).getFault();
Element[] details = axisFault.getFaultDetails();
assertEquals(1, details.length);
Element detailElement = details[0];
assertEquals("text", detailElement.getTagName());
} finally {
in.close();
}
}
} | apache-2.0 |
newbiet/zstack | search/src/main/java/org/zstack/query/MysqlQueryBuilderImpl3.java | 71601 | package org.zstack.query;
import org.apache.commons.lang.StringUtils;
import org.objenesis.Objenesis;
import org.objenesis.ObjenesisStd;
import org.objenesis.instantiator.ObjectInstantiator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.Component;
import org.zstack.header.apimediator.ApiMessageInterceptionException;
import org.zstack.header.apimediator.GlobalApiMessageInterceptor;
import org.zstack.header.configuration.PythonApiBindingWriter;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.APIMessage;
import org.zstack.header.query.*;
import org.zstack.header.rest.APINoSee;
import org.zstack.header.search.Inventory;
import org.zstack.header.search.Parent;
import org.zstack.header.search.TypeField;
import org.zstack.utils.*;
import org.zstack.utils.function.Function;
import org.zstack.utils.gson.JSONObjectUtil;
import org.zstack.utils.logging.CLogger;
import javax.persistence.*;
import javax.persistence.metamodel.StaticMetamodel;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Timestamp;
import java.util.*;
import java.util.Map.Entry;
import static org.zstack.utils.StringDSL.s;
/**
*/
public class MysqlQueryBuilderImpl3 implements Component, QueryBuilder, GlobalApiMessageInterceptor, PythonApiBindingWriter {
private static final CLogger logger = Utils.getLogger(MysqlQueryBuilderImpl3.class);
@Autowired
private DatabaseFacade dbf;
@Autowired
private ErrorFacade errf;
@Autowired
private PluginRegistry pluginRgty;
private Objenesis objenesis = new ObjenesisStd();
private static final String USER_TAG = "__userTag__";
private static final String SYSTEM_TAG = "__systemTag__";
@Override
public List<Class> getMessageClassToIntercept() {
List<Class> clz = new ArrayList<Class>();
clz.add(APIQueryMessage.class);
return clz;
}
@Override
public InterceptorPosition getPosition() {
return InterceptorPosition.FRONT;
}
@Override
public APIMessage intercept(APIMessage msg) throws ApiMessageInterceptionException {
List<ExpandedQueryAliasInfo> infos = aliasInfos.get(msg.getClass());
if (infos != null) {
APIQueryMessage qmsg = (APIQueryMessage) msg;
for (QueryCondition qcond : qmsg.getConditions()) {
for (ExpandedQueryAliasInfo info : infos) {
if (qcond.getName().startsWith(String.format("%s.",info.alias))) {
qcond.setName(qcond.getName().replaceFirst(info.alias, info.expandField));
}
}
}
}
return msg;
}
private class ExpandedQueryAliasInfo {
Class queryMessageClass;
Class inventoryClassDefiningThisAlias;
Class inventoryClass;
String expandField;
String alias;
boolean isFromAnnotation;
void check() {
String[] slices = expandField.split("\\.");
String firstExpand = slices[0];
if (isFromAnnotation) {
ExpandedQueries eqs = (ExpandedQueries) inventoryClassDefiningThisAlias.getAnnotation(ExpandedQueries.class);
DebugUtils.Assert(eqs != null, String.format("inventory[%s] having annotation[ExpandedQueryAliases] must also have annotation[ExpandedQueries]",
inventoryClassDefiningThisAlias.getName()));
for (ExpandedQuery at : eqs.value()) {
if (at.expandedField().equals(firstExpand)) {
return;
}
}
throw new CloudRuntimeException(String.format("inventory[%s] has an expanded query alias[%s], but it doesn't have an expand query that has expandedField[%s]",
inventoryClassDefiningThisAlias.getName(), alias, firstExpand));
} else {
List<ExpandedQueryStruct> expds = expandedQueryStructs.get(inventoryClassDefiningThisAlias);
for (ExpandedQueryStruct s : expds) {
if (s.getExpandedField().equals(firstExpand)) {
return;
}
}
throw new CloudRuntimeException(String.format("inventory[%s] has an expanded query alias[%s](added by AddExpandedQueryExtensionPoint], but the extension doesn't declare any expanded query having expandedField[%s]",
inventoryClassDefiningThisAlias.getClass(), alias, firstExpand));
}
}
}
private class EntityInfo {
EntityInfo parent;
List<EntityInfo> children = new ArrayList<EntityInfo>();
Inventory inventoryAnnotation;
Class entityClass;
Class jpaMetaClass;
Class inventoryClass;
String primaryKey;
Field entityPrimaryKeyField;
Field inventoryPrimaryKeyField;
Field inventoryTypeField;
Field entityTypeField;
Map<String, ExpandedQueryStruct> expandedQueries = new HashMap<String, ExpandedQueryStruct>();
Map<String, EntityInfo> flatTypeEntityMap = new HashMap<String, EntityInfo>();
Method inventoryValueOf;
Method inventoryCollectionValueOf;
ObjectInstantiator objectInstantiator;
Map<String, Field> allFieldsMap = new HashMap<String, Field>();
Map<String, ExpandedQueryAliasInfo> aliases = new HashMap<String, ExpandedQueryAliasInfo>();
List<String> premitiveFieldNames = new ArrayList<String>();
EntityInfo(Class invClass) throws NoSuchMethodException {
inventoryAnnotation = (Inventory) invClass.getAnnotation(Inventory.class);
entityClass = inventoryAnnotation.mappingVOClass();
if (!entityClass.isAnnotationPresent(Entity.class)) {
throw new CloudRuntimeException(String.format("class[%s] is not annotated by @Entity, but it's stated as entity class by @Inventory of %s",
entityClass.getName(), invClass.getName()));
}
jpaMetaClass = metaModelClasses.get(entityClass);
if (jpaMetaClass == null) {
throw new CloudRuntimeException(String.format("cannot find JPA meta model class for entity class[%s], the meta model class is expected as %s",
entityClass.getName(), entityClass.getName() + "_"));
}
inventoryClass = invClass;
entityPrimaryKeyField = FieldUtils.getAnnotatedField(Id.class, entityClass);
primaryKey = entityPrimaryKeyField.getName();
entityPrimaryKeyField.setAccessible(true);
inventoryPrimaryKeyField = FieldUtils.getField(primaryKey, inventoryClass);
if (inventoryPrimaryKeyField != null) {
inventoryPrimaryKeyField.setAccessible(true);
}
inventoryTypeField = FieldUtils.getAnnotatedFieldOfThisClass(TypeField.class, invClass);
if (inventoryTypeField != null) {
inventoryTypeField.setAccessible(true);
entityTypeField = FieldUtils.getField(inventoryTypeField.getName(), entityClass);
DebugUtils.Assert(entityTypeField!=null, String.format("the type field[%s] of inventory class[%s] is not on entity class[%s]", inventoryTypeField.getName(), inventoryClass.getName(), entityClass.getName()));
entityTypeField.setAccessible(true);
}
String methodName = inventoryAnnotation.collectionValueOfMethod();
if (methodName.equals("")) {
methodName = "valueOf";
}
inventoryCollectionValueOf = invClass.getMethod(methodName, Collection.class);
inventoryValueOf = invClass.getMethod("valueOf", entityClass);
List<ExpandedQueryStruct> structs = expandedQueryStructs.get(inventoryClass);
if (structs != null) {
for (ExpandedQueryStruct s : structs) {
s.check();
this.expandedQueries.put(s.getExpandedField(), s);
}
}
ExpandedQueries expandedQueries = (ExpandedQueries) invClass.getAnnotation(ExpandedQueries.class);
if (expandedQueries != null) {
for (ExpandedQuery e : expandedQueries.value()) {
ExpandedQueryStruct s = ExpandedQueryStruct.fromExpandedQueryAnnotation(inventoryClass, e);
s.check();
this.expandedQueries.put(s.getExpandedField(), s);
}
}
objectInstantiator = objenesis.getInstantiatorOf(inventoryClass);
List<Field> allFields = FieldUtils.getAllFields(inventoryClass);
for (Field f : allFields) {
f.setAccessible(true);
allFieldsMap.put(f.getName(), f);
if (!f.isAnnotationPresent(Unqueryable.class) && !f.isAnnotationPresent(Queryable.class)) {
premitiveFieldNames.add(f.getName());
}
}
for (List<ExpandedQueryAliasInfo> aliasList : aliasInfos.values()) {
for (ExpandedQueryAliasInfo struct : aliasList) {
if (struct.inventoryClassDefiningThisAlias == inventoryClass) {
aliases.put(struct.alias, struct);
}
}
}
}
void buildFlatTypeEntityMap() {
for (EntityInfo e : children) {
Parent pat = e.inventoryAnnotation.parent()[0];
flatTypeEntityMap.put(pat.type(), e);
}
}
Object getPrimaryKeyValue(Object vo) {
try {
return entityPrimaryKeyField.get(vo);
} catch (IllegalAccessException e) {
throw new CloudRuntimeException(e);
}
}
Class selectInventoryClass(final APIQueryMessage msg) {
if (inventoryTypeField == null) {
return inventoryClass;
}
QueryCondition typeCondition = null;
for (QueryCondition cond : msg.getConditions()) {
if (QueryOp.EQ.equals(cond.getOp()) && inventoryTypeField.getName().equals(cond.getName())) {
typeCondition = cond;
break;
}
}
if (typeCondition != null) {
EntityInfo child = flatTypeEntityMap.get(typeCondition.getValue());
if (child != null) {
return child.inventoryClass;
}
}
return inventoryClass;
}
void addQueryAliases(List<ExpandedQueryAliasInfo> aliases) {
for (ExpandedQueryAliasInfo info : aliases) {
this.aliases.put(info.alias, info);
}
}
}
private class SubQueryInfo {
Class joinInventoryClass;
}
private class ExpandedSubQuery extends SubQueryInfo {
ExpandedQueryStruct struct;
}
private class InherentSubQuery extends SubQueryInfo {
Queryable at;
Field parentField;
}
private Map<Class, EntityInfo> entityInfos = new HashMap<Class, EntityInfo>();
private Map<Class, Class> metaModelClasses = new HashMap<Class, Class>();
private List<String> escapeConditionNames = new ArrayList<String>();
private List<MysqlQuerySubQueryExtension> subQueryExtensions = new ArrayList<MysqlQuerySubQueryExtension>();
private Map<Class, List<ExpandedQueryStruct>> expandedQueryStructs = new HashMap<Class, List<ExpandedQueryStruct>>();
private Map<Class, List<AddExtraConditionToQueryExtensionPoint>> extraConditionsExts = new HashMap<Class, List<AddExtraConditionToQueryExtensionPoint>>();
private Map<Class, List<ExpandedQueryAliasInfo>> aliasInfos = new HashMap<Class, List<ExpandedQueryAliasInfo>>();
private Map<Class, Class> inventoryQueryMessageMap = new HashMap<Class, Class>();
private EntityInfo buildEntityInfo(Class invClass) throws NoSuchMethodException {
EntityInfo info = entityInfos.get(invClass);
if (info != null) {
return info;
}
info = new EntityInfo(invClass);
entityInfos.put(invClass, info);
return info;
}
private void populateEntityInfo() throws NoSuchMethodException {
List<Class> metaClasses = BeanUtils.scanClass("org.zstack", StaticMetamodel.class);
for (Class it : metaClasses) {
StaticMetamodel at = (StaticMetamodel) it.getAnnotation(StaticMetamodel.class);
metaModelClasses.put(at.value(), it);
}
List<Class> invClasses = BeanUtils.scanClass("org.zstack", Inventory.class);
for (Class invClass : invClasses) {
EntityInfo info = buildEntityInfo(invClass);
if (info.inventoryAnnotation.parent().length > 0) {
Parent pat = info.inventoryAnnotation.parent()[0];
Class pinvClass = pat.inventoryClass();
DebugUtils.Assert(pinvClass.isAnnotationPresent(Inventory.class), String.format("inventory[%s]'s parent inventory class[%s] is not annotated by @Inventory", info.inventoryClass.getName(), pinvClass.getName()));
EntityInfo pinfo = buildEntityInfo(pinvClass);
info.parent = pinfo;
pinfo.children.add(info);
}
}
for (EntityInfo e : entityInfos.values()) {
e.buildFlatTypeEntityMap();
}
}
private class MetaCondition {
String attr;
String op;
String value;
Class inventoryClass;
String attrValueName;
boolean skipInventoryCheck;
int index;
private Field entityField;
QueryCondition toQueryCondtion() {
QueryCondition qcond = new QueryCondition();
qcond.setName(attr);
qcond.setOp(op);
qcond.setValue(value);
return qcond;
}
private Class getEntityFieldType() {
if (Collection.class.isAssignableFrom(entityField.getType())) {
return FieldUtils.getGenericType(entityField);
} else if (Map.class.isAssignableFrom(entityField.getType())) {
throw new CloudRuntimeException(String.format("query cannot support Map type. %s.%s",
entityField.getDeclaringClass(), entityField.getName()));
} else {
return entityField.getType();
}
}
private Object doNormalizeValue(String value) {
try {
Class entityType = getEntityFieldType();
if (Timestamp.class.isAssignableFrom(entityType)) {
return Timestamp.valueOf(value);
} else if (Enum.class.isAssignableFrom(entityType)) {
Method valueOf = entityType.getMethod("valueOf", String.class);
return valueOf.invoke(entityType, value);
} else if (Boolean.class.isAssignableFrom(entityType) || Boolean.TYPE.isAssignableFrom(entityType)) {
return Boolean.valueOf(value);
} else {
return TypeUtils.stringToValue(value, entityType);
}
} catch (Exception e) {
throw new CloudRuntimeException(String.format("failed to parse value[%s]", value), e);
}
}
Object normalizeValue() {
if (QueryOp.IS_NULL.equals(op) || QueryOp.NOT_NULL.equals(op)) {
return null;
}
if (QueryOp.IN.equals(op) || QueryOp.NOT_IN.equals(op)) {
List<Object> ret = new ArrayList();
for (String it : value.split(",")) {
ret.add(doNormalizeValue(it.trim()));
}
if (ret.isEmpty()) {
// the query value is like ",,,,",
// in this case, compliment an empty string
ret.add("");
}
return ret;
} else {
return doNormalizeValue(value);
}
}
private String formatSql(String entityName, String attr, String op) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("%s.%s", entityName, attr));
if (QueryOp.IN.equals(op)) {
sb.append(String.format(" in (:%s)", attrValueName));
} else if (QueryOp.NOT_IN.equals(op)) {
sb.append(String.format(" not in (:%s)", attrValueName));
} else if (QueryOp.IS_NULL.equals(op)) {
sb.append(" is null");
attrValueName = null;
} else if (QueryOp.NOT_NULL.equals(op)) {
sb.append(" is not null");
attrValueName = null;
} else {
sb.append(String.format(" %s :%s", op, attrValueName));
}
return sb.toString();
}
String toJpql() {
String entityName = inventoryClass.getSimpleName().toLowerCase();
attrValueName = entityName + "_" + attr + "_" + "value" + index;
Field inventoryField = FieldUtils.getField(attr, inventoryClass);
if (!skipInventoryCheck) {
if (inventoryField == null || inventoryField.isAnnotationPresent(APINoSee.class)) {
throw new OperationFailureException(errf.instantiateErrorCode(SysErrors.INVALID_ARGUMENT_ERROR,
String.format("condition name[%s] is invalid, no such field on inventory class[%s]",
attr, inventoryClass.getName())));
}
if (inventoryField.isAnnotationPresent(Unqueryable.class)) {
throw new OperationFailureException(errf.instantiateErrorCode(SysErrors.INVALID_ARGUMENT_ERROR,
String.format("condition name[%s] is invalid, field[%s] of inventory[%s] is annotated as @Unqueryable field",
attr, attr, inventoryClass.getName())));
}
}
Queryable at = inventoryField.getAnnotation(Queryable.class);
EntityInfo info = entityInfos.get(inventoryClass);
if (at == null) {
Field metaField = FieldUtils.getField(attr, info.jpaMetaClass);
if (metaField == null) {
throw new OperationFailureException(errf.stringToInvalidArgumentError(String.format("entity meta class[%s] has no field[%s]",
info.jpaMetaClass.getName(), attr)));
}
entityField = FieldUtils.getField(attr, info.entityClass);
DebugUtils.Assert(entityField!=null, String.format("mismatching between inventory[%s] and entity[%s], field[%s] is not present on entity",
inventoryClass.getName(), info.entityClass.getName(), attr));
return formatSql(entityName, attr, op);
} else {
entityField = inventoryField;
JoinColumn jc = at.joinColumn();
String refName = jc.referencedColumnName();
DebugUtils.Assert(!"".equals(refName), String.format("referencedColumnName of JoinColumn of field[%s] on inventory class[%s] cannot be empty string",
inventoryField.getName(), inventoryClass.getName()));
String foreignKey = jc.name();
DebugUtils.Assert(!"".equals(foreignKey), String.format("name of JoinColumn of field[%s] on inventory class[%s] cannot be empty string",
inventoryField.getName(), inventoryClass.getName()));
Class mappingInvClass = at.mappingClass();
Inventory mappingInvAt = (Inventory) mappingInvClass.getAnnotation(Inventory.class);
DebugUtils.Assert(mappingInvAt!=null, String.format("Mapping inventory class[%s] of inventory class[%s] is not annotated by @Inventory", mappingInvClass.getName(), inventoryClass.getName()));
Class foreignVOClass = mappingInvAt.mappingVOClass();
DebugUtils.Assert(FieldUtils.hasField(refName, foreignVOClass), String.format("referencedColumnName of JoinColumn of field[%s] on inventory class[%s] is invalid, class[%s] doesn't have field[%s]",
inventoryField.getName(), inventoryClass.getName(), foreignVOClass.getName(), refName));
DebugUtils.Assert(FieldUtils.hasField(foreignKey, foreignVOClass), String.format("name of JoinColumn of field[%s] on inventory class[%s] is invalid, class[%s] doesn't have field[%s]",
inventoryField.getName(), inventoryClass.getName(), foreignVOClass.getName(), foreignKey));
Map<String, String> var = new HashMap();
var.put("entity", entityName);
var.put("primaryKey", info.primaryKey);
var.put("subEntity", foreignVOClass.getSimpleName().toLowerCase());
var.put("foreignKey", foreignKey);
var.put("foreignVO", foreignVOClass.getSimpleName());
if (QueryOp.NOT_IN.equals(op)) {
// NOT_IN needs special handle
op = QueryOp.IN.toString();
var.put("condition", formatSql(foreignVOClass.getSimpleName().toLowerCase(), refName, op));
return s("{entity}.{primaryKey} not in (select {subEntity}.{foreignKey} from {foreignVO} {subEntity} where {condition})").formatByMap(var);
} else {
var.put("condition", formatSql(foreignVOClass.getSimpleName().toLowerCase(), refName, op));
return s("{entity}.{primaryKey} in (select {subEntity}.{foreignKey} from {foreignVO} {subEntity} where {condition})").formatByMap(var);
}
}
}
}
private class QueryObject {
EntityInfo info;
List<MetaCondition> conditions = new ArrayList<MetaCondition>();
QueryObject parent;
List<QueryObject> children = new ArrayList<QueryObject>();
SubQueryInfo subQueryInfo;
APIQueryMessage msg;
// NOTE: we hard code tag specific logic here because we think current query model is not sustainable,
// it worth nothing to waste effort on making this as extension point; we will switch the entire
// query framework to ANTLR based DSL in next version.
class TagSqlBuilder {
List<String> IN_CONDITIONS;
List<String> NOT_IN_CONDITIONS;
{
IN_CONDITIONS = CollectionDSL.list(
QueryOp.EQ.toString(),
QueryOp.GT.toString(),
QueryOp.GT_AND_EQ.toString(),
QueryOp.LT.toString(),
QueryOp.LT_AND_EQ.toString(),
QueryOp.IN.toString(),
QueryOp.LIKE.toString(),
QueryOp.NOT_NULL.toString()
);
NOT_IN_CONDITIONS = CollectionDSL.list(
QueryOp.NOT_EQ.toString(),
QueryOp.NOT_IN.toString(),
QueryOp.IS_NULL.toString(),
QueryOp.NOT_LIKE.toString()
);
}
private String reverseOpIfNeed(QueryCondition cond) {
if (QueryOp.NOT_EQ.equals(cond.getOp())) {
return QueryOp.EQ.toString();
} else if (QueryOp.NOT_IN.equals(cond.getOp())) {
return QueryOp.IN.toString();
} else if (QueryOp.IS_NULL.equals(cond.getOp())) {
return QueryOp.NOT_NULL.toString();
} else if (QueryOp.NOT_LIKE.equals(cond.getOp())) {
return QueryOp.LIKE.toString();
} else {
return cond.getOp();
}
}
private String buildCondition(String field, QueryCondition cond) {
if (QueryOp.IN.equals(cond.getOp()) || QueryOp.NOT_IN.equals(cond.getOp())) {
String[] values = cond.getValue().split(",");
List<String> vals = new ArrayList<String>();
for (String val : values) {
vals.add(String.format("'%s'", val));
}
return String.format("%s %s (%s)", field, reverseOpIfNeed(cond), StringUtils.join(vals, ","));
} else if (QueryOp.IS_NULL.equals(cond.getOp()) || QueryOp.NOT_NULL.equals(cond.getOp())) {
return String.format("%s %s", field, reverseOpIfNeed(cond));
} else {
return String.format("%s %s '%s'", field, reverseOpIfNeed(cond), cond.getValue());
}
}
private String chooseOp (QueryCondition cond) {
if (IN_CONDITIONS.contains(cond.getOp())) {
return "in";
}
if (NOT_IN_CONDITIONS.contains(cond.getOp())) {
return "not in";
}
throw new CloudRuntimeException(String.format("invalid comparison operator[%s]; %s", cond.getOp(), JSONObjectUtil.toJsonString(cond)));
}
private List<String> getAllResourceTypesForTag() {
List<String> types = new ArrayList<String>();
Class c = info.entityClass;
while (c != Object.class) {
types.add(String.format("'%s'", c.getSimpleName()));
c = c.getSuperclass();
}
return types;
}
String toJpql() {
List<String> resultQuery = new ArrayList<String>();
List<String> rtypes = getAllResourceTypesForTag();
String primaryKey = info.primaryKey;
String invname = info.inventoryClass.getSimpleName().toLowerCase();
List<QueryCondition> conds = CollectionUtils.transformToList(conditions, new Function<QueryCondition, MetaCondition>() {
@Override
public QueryCondition call(MetaCondition arg) {
return USER_TAG.equals(arg.attr) || SYSTEM_TAG.equals(arg.attr) ? arg.toQueryCondtion() : null;
}
});
String typeString = StringUtils.join(rtypes, ",");
for (QueryCondition cond : conds) {
if (cond.getName().equals(USER_TAG)) {
List<String> condStrs = new ArrayList<String>();
condStrs.add(buildCondition("user.tag", cond));
condStrs.add(String.format("user.resourceType in (%s)", typeString));
resultQuery.add(String.format("%s.%s %s (select user.resourceUuid from UserTagVO user where %s)",
invname, primaryKey, chooseOp(cond), StringUtils.join(condStrs, " and ")));
} else if (cond.getName().equals(SYSTEM_TAG)) {
List<String> condStrs = new ArrayList<String>();
condStrs.add(buildCondition("sys.tag", cond));
condStrs.add(String.format("sys.resourceType in (%s)", typeString));
resultQuery.add(String.format("%s.%s %s (select sys.resourceUuid from SystemTagVO sys where %s)",
invname, primaryKey, chooseOp(cond), StringUtils.join(condStrs, " and ")));
}
}
if (resultQuery.isEmpty()) {
return null;
} else {
return StringUtils.join(resultQuery, " and ");
}
}
}
String toJpql(boolean isCount) {
List<String> where = new ArrayList<String>();
boolean hasTag = false;
int index = 0;
for (MetaCondition it : conditions) {
if (USER_TAG.equals(it.attr) || SYSTEM_TAG.equals(it.attr)) {
hasTag = true;
continue;
}
// use an index to differentiate multiple conditions with the same name
it.index = index ++;
where.add(it.toJpql());
}
//conditions = tmpConditions;
if (hasTag) {
where.add(new TagSqlBuilder().toJpql());
}
for (QueryObject it : children) {
where.add(it.toJpql(false));
}
if (parent != null) {
// this is a sub query
if (subQueryInfo instanceof InherentSubQuery) {
InherentSubQuery isub = (InherentSubQuery) subQueryInfo;
JoinColumn jc = isub.at.joinColumn();
String foreignKey = jc.name();
DebugUtils.Assert(!"".equals(foreignKey), String.format("name of JoinColumn of field[%s] on inventory class[%s] cannot be empty string",
isub.parentField.getName(), parent.info.inventoryClass.getName()));
Class foreignVOClass = info.entityClass;
DebugUtils.Assert(FieldUtils.hasField(foreignKey, foreignVOClass), String.format("name of JoinColumn of field[%s] on inventory class[%s] is invalid, class[%s] doesn't have field[%s]",
isub.parentField.getName(), parent.info.inventoryClass.getName(), foreignVOClass.getName(), foreignKey));
Map<String, String> var = new HashMap<String, String>();
var.put("entity", parent.info.inventoryClass.getSimpleName().toLowerCase());
var.put("primaryKey", parent.info.primaryKey);
var.put("subEntity", info.inventoryClass.getSimpleName().toLowerCase());
var.put("foreignKey", foreignKey);
var.put("foreignVO", foreignVOClass.getSimpleName());
if (where.isEmpty()) {
return s("{entity}.{primaryKey} in (select {subEntity}.{foreignKey} from {foreignVO} {subEntity})").formatByMap(var);
} else {
var.put("condition", StringUtils.join(where, " and ").trim());
return s("{entity}.{primaryKey} in (select {subEntity}.{foreignKey} from {foreignVO} {subEntity} where {condition})").formatByMap(var);
}
} else if (subQueryInfo instanceof ExpandedSubQuery) {
ExpandedSubQuery esub = (ExpandedSubQuery) subQueryInfo;
Inventory joinAt = (Inventory) esub.struct.getInventoryClass().getAnnotation(Inventory.class);
Class joinVO = joinAt.mappingVOClass();
Map<String, String> var = new HashMap<String, String>();
var.put("entity", parent.info.inventoryClass.getSimpleName().toLowerCase());
var.put("foreignKey", esub.struct.getForeignKey());
var.put("expandedEntity", esub.struct.getInventoryClass().getSimpleName().toLowerCase());
var.put("expandedVO", joinVO.getSimpleName());
var.put("expandedKey", esub.struct.getExpandedInventoryKey());
if (where.isEmpty()) {
return s("{entity}.{foreignKey} in (select {expandedEntity}.{expandedKey} from {expandedVO} {expandedEntity})").formatByMap(var);
} else {
var.put("condition", StringUtils.join(where, " and ").trim());
return s("{entity}.{foreignKey} in (select {expandedEntity}.{expandedKey} from {expandedVO} {expandedEntity} where {condition})").formatByMap(var);
}
}
throw new CloudRuntimeException("cannot be here");
} else {
// this is root query
for (MysqlQuerySubQueryExtension ext : subQueryExtensions) {
String sub = ext.makeSubquery(msg, info.inventoryClass);
if (sub != null) {
where.add(sub);
}
}
String entityName = info.inventoryClass.getSimpleName().toLowerCase();
String entity = info.entityClass.getSimpleName();
String condition = StringUtils.join(where, " and ").trim();
if (isCount) {
if (where.isEmpty()) {
return String.format("select count(%s) from %s %s", entityName, entity, entityName);
} else {
return String.format("select count(%s) from %s %s where %s", entityName, entity, entityName, condition);
}
} else {
String ret = null;
String selector = null;
if (msg.isFieldQuery()) {
List<String> ss = new ArrayList<String>();
for (String f : msg.getFields()) {
ss.add(String.format("%s.%s", entityName, f));
}
selector = StringUtils.join(ss, ",");
} else {
selector = entityName;
}
if (where.isEmpty()) {
ret = String.format("select %s from %s %s", selector, entity, entityName);
} else {
ret = String.format("select %s from %s %s where %s", selector, entity, entityName, condition);
}
if (msg.getSortBy() != null) {
if (!FieldUtils.hasField(msg.getSortBy(), info.entityClass)) {
throw new IllegalArgumentException(String.format("illegal sortBy[%s], entity[%s] doesn't have this field", msg.getSortBy(), info.entityClass.getName()));
}
ret = String.format("%s order by %s.%s %s", ret, entityName, msg.getSortBy(), msg.getSortDirection().toUpperCase());
}
return ret;
}
}
}
}
private class QueryContext {
private APIQueryMessage msg;
private Class inventoryClass;
private QueryObject root;
private Map<Class, QueryObject> tmpMap = new HashMap<Class, QueryObject>();
private MetaCondition buildCondition(QueryCondition qcond, EntityInfo info) {
MetaCondition mcond = new MetaCondition();
mcond.attr = qcond.getName();
mcond.op = qcond.getOp();
mcond.inventoryClass = info.inventoryClass;
mcond.value = qcond.getValue();
return mcond;
}
private void buildSubQuery(QueryCondition qcond, QueryObject parent) {
String[] slices = qcond.getName().split("\\.");
String currentFieldName = slices[0];
Class parentInvClass = parent.info.inventoryClass;
DebugUtils.Assert(parentInvClass != null, String.format("parent inventory class cannot be null. Parent entity class[%s]", parent.info.entityClass.getName()));
SubQueryInfo subQueryInfo = null;
ExpandedQueryStruct expandedQuery = parent.info.expandedQueries.get(currentFieldName);
if (expandedQuery == null) {
// try finding alias
ExpandedQueryAliasInfo alias = parent.info.aliases.get(currentFieldName);
if (alias != null) {
QueryCondition ncond = new QueryCondition();
ncond.setName(qcond.getName().replaceFirst(alias.alias, alias.expandField));
ncond.setOp(qcond.getOp());
ncond.setValue(qcond.getValue());
buildSubQuery(ncond, parent);
return;
}
}
if (expandedQuery != null) {
// an expanded query
ExpandedSubQuery esub = new ExpandedSubQuery();
esub.struct = expandedQuery;
esub.joinInventoryClass = expandedQuery.getInventoryClass();
subQueryInfo = esub;
} else {
Field currentField = FieldUtils.getField(currentFieldName, parentInvClass);
DebugUtils.Assert(currentField!=null, String.format("cannot find field[%s] on class[%s], wrong subquery name[%s]",
currentFieldName, parentInvClass.getName(), qcond.getName()));
InherentSubQuery isub = new InherentSubQuery();
Queryable at = currentField.getAnnotation(Queryable.class);
DebugUtils.Assert(at != null, String.format("nested query field[%s] on inventory[%s] must be annotated as @Queryable", currentFieldName, parentInvClass.getName()));
isub.at = at;
isub.joinInventoryClass = at.mappingClass();
isub.parentField = currentField;
DebugUtils.Assert(isub.joinInventoryClass.isAnnotationPresent(Inventory.class),
String.format("field[%s] of inventory[%s] can only be type of Collection whose generic type is inventory class or a object whose type is inventory class. Current class is %s which is not annotated by @Inventory",
currentField.getName(), parentInvClass.getName(), isub.joinInventoryClass.getName())
);
subQueryInfo = isub;
}
EntityInfo info = entityInfos.get(subQueryInfo.joinInventoryClass);
QueryObject qobj = tmpMap.get(info.entityClass);
if (qobj == null) {
qobj = new QueryObject();
qobj.info = info;
qobj.parent = parent;
qobj.subQueryInfo = subQueryInfo;
qobj.msg = msg;
parent.children.add(qobj);
tmpMap.put(info.entityClass, qobj);
}
slices = Arrays.copyOfRange(slices, 1, slices.length);
String subFieldName = StringUtils.join(slices, ".");
QueryCondition ncond = new QueryCondition();
ncond.setName(subFieldName);
ncond.setOp(qcond.getOp());
ncond.setValue(qcond.getValue());
if (!subFieldName.contains(".")) {
qobj.conditions.add(buildCondition(ncond, info));
} else {
buildSubQuery(ncond, qobj);
}
}
private void buildMetaCondition(QueryCondition qcond, EntityInfo info, boolean skipInventoryCheck) {
QueryObject qobj = tmpMap.get(info.entityClass);
if (qobj == null) {
qobj = new QueryObject();
qobj.info = info;
tmpMap.put(info.entityClass, qobj);
}
MetaCondition mcond = buildCondition(qcond, info);
mcond.skipInventoryCheck = skipInventoryCheck;
qobj.conditions.add(mcond);
}
private void buildMetaCondition(QueryCondition qcond, EntityInfo info) {
buildMetaCondition(qcond, info, false);
}
private String build(boolean isCount) {
root = new QueryObject();
root.msg = msg;
root.info = entityInfos.get(inventoryClass);
DebugUtils.Assert(root.info!=null, String.format("class[%s] is not annotated by @Inventory", inventoryClass.getName()));
tmpMap.put(root.info.entityClass, root);
for (QueryCondition qcond : msg.getConditions()) {
if (escapeConditionNames.contains(qcond.getName())) {
continue;
}
if (!qcond.getName().contains(".")) {
buildMetaCondition(qcond, root.info);
} else {
buildSubQuery(qcond, root);
}
}
List<AddExtraConditionToQueryExtensionPoint> exts = extraConditionsExts.get(msg.getClass());
if (exts != null) {
for (AddExtraConditionToQueryExtensionPoint ext : exts) {
try {
for (QueryCondition cond : ext.getExtraQueryConditionForMessage(msg)) {
buildMetaCondition(cond, root.info, true);
}
} catch (Throwable t) {
logger.warn(String.format("unhandled exception when calling %s", ext.getClass().getName()), t);
}
}
}
return root.toJpql(isCount);
}
private void setQueryValue(Query q, QueryObject qobj) {
for (MetaCondition mcond : qobj.conditions) {
if (USER_TAG.equals(mcond.attr) || SYSTEM_TAG.equals(mcond.attr)) {
continue;
}
Object val = mcond.normalizeValue();
if (val != null) {
q.setParameter(mcond.attrValueName, val);
}
}
for (QueryObject child : qobj.children) {
setQueryValue(q, child);
}
}
public List convertVOsToInventories(final List vos) {
try {
if (vos.isEmpty()) {
return new ArrayList();
}
if (root.info.children.isEmpty()) {
return (List) root.info.inventoryCollectionValueOf.invoke(inventoryClass, vos);
}
final LinkedHashMap flatMap = new LinkedHashMap();
final List primaryKeysNeedResolve = new ArrayList();
for (Object vo : vos) {
String type = (String) root.info.entityTypeField.get(vo);
Object priKey = root.info.getPrimaryKeyValue(vo);
if (!root.info.flatTypeEntityMap.containsKey(type)) {
flatMap.put(priKey, root.info.inventoryValueOf.invoke(inventoryClass, vo));
} else {
flatMap.put(priKey, null);
primaryKeysNeedResolve.add(priKey);
}
}
if (primaryKeysNeedResolve.isEmpty()) {
return (List) root.info.inventoryCollectionValueOf.invoke(inventoryClass, vos);
}
// the inventory has child inventory inheriting it, we have to find out all child inventory and
// reload them from DB and keep them in order.
class SubInventoryResolver {
class SQL {
String sql;
EntityInfo entityInfo;
}
List<SQL> subInventoryQuerySQL = new ArrayList<SQL>();
List resolve() throws InvocationTargetException, IllegalAccessException {
buildSubInventoryQuerySQL(root.info.children);
querySubInventory();
List result = new ArrayList(flatMap.values().size());
result.addAll(flatMap.values());
return result;
}
@Transactional(readOnly = true)
private void querySubInventory() throws InvocationTargetException, IllegalAccessException {
for (SQL sql : subInventoryQuerySQL) {
if (primaryKeysNeedResolve.isEmpty()) {
break;
}
TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql.sql, Tuple.class);
q.setParameter("ids", primaryKeysNeedResolve);
List<Tuple> res = q.getResultList();
for (Tuple t : res) {
Object priKey = t.get(0);
Object vo = t.get(1);
flatMap.put(priKey, sql.entityInfo.inventoryValueOf.invoke(sql.entityInfo.inventoryClass, vo));
primaryKeysNeedResolve.remove(priKey);
}
}
}
private void buildSubInventoryQuerySQL(List<EntityInfo> infos) {
// child queries execute first
for (EntityInfo info : infos) {
if (!info.children.isEmpty()) {
buildSubInventoryQuerySQL(info.children);
}
}
for (EntityInfo info : infos) {
SQL sql = new SQL();
sql.entityInfo = info;
sql.sql = String.format("select e.%s, e from %s e where e.%s in (:ids)", info.primaryKey, info.entityClass.getSimpleName(), info.primaryKey);
subInventoryQuerySQL.add(sql);
}
}
}
return new SubInventoryResolver().resolve();
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private void validateFields() {
EntityInfo info = entityInfos.get(inventoryClass);
for (String f : msg.getFields()) {
if (!info.premitiveFieldNames.contains(f)) {
throw new OperationFailureException(errf.stringToInvalidArgumentError(
String.format("field[%s] is not a primitive of the inventory %s; you cannot specify it in the parameter 'fields';" +
"valid fields are %s", f, info.inventoryClass.getSimpleName(), info.premitiveFieldNames)
));
}
}
}
private List convertFieldsTOPartialInventories(List fieldTuple) {
if (fieldTuple.isEmpty()) {
return new ArrayList();
}
EntityInfo info = entityInfos.get(inventoryClass);
List ret = new ArrayList(fieldTuple.size());
for (Object t : fieldTuple) {
Tuple tuple = (Tuple)t;
Object inv = info.objectInstantiator.newInstance();
for (int i=0; i<msg.getFields().size(); i++) {
String fname = msg.getFields().get(i);
Object value = tuple.get(i);
Field f = info.allFieldsMap.get(fname);
try {
if (value != null && String.class.isAssignableFrom(f.getType())) {
value = value.toString();
}
f.set(inv, value);
} catch (IllegalAccessException e) {
throw new CloudRuntimeException(e);
}
}
ret.add(inv);
}
return ret;
}
@Transactional(readOnly = true)
List query() {
if (msg.isFieldQuery()) {
validateFields();
}
String jpql = build(false);
Query q = msg.isFieldQuery() ? dbf.getEntityManager().createQuery(jpql, Tuple.class) : dbf.getEntityManager().createQuery(jpql);
if (logger.isTraceEnabled()) {
org.hibernate.Query hq = q.unwrap(org.hibernate.Query.class);
logger.trace(hq.getQueryString());
}
setQueryValue(q, root);
if (msg.getLimit() != null) {
q.setMaxResults(msg.getLimit());
}
if (msg.getStart() != null) {
q.setFirstResult(msg.getStart());
}
List vos = q.getResultList();
if (msg.isFieldQuery()) {
return convertFieldsTOPartialInventories(vos);
} else {
return convertVOsToInventories(vos);
}
}
@Transactional(readOnly = true)
long count() {
String jpql = build(true);
Query q = dbf.getEntityManager().createQuery(jpql);
if (logger.isTraceEnabled()) {
org.hibernate.Query hq = q.unwrap(org.hibernate.Query.class);
logger.trace(hq.getQueryString());
}
setQueryValue(q, root);
return (Long)q.getSingleResult();
}
}
private void populateExtensions() {
subQueryExtensions.addAll(pluginRgty.getExtensionList(MysqlQuerySubQueryExtension.class));
for (MysqlQuerySubQueryExtension ext : subQueryExtensions) {
if (ext.getEscapeConditionNames() != null) {
escapeConditionNames.addAll(ext.getEscapeConditionNames());
}
}
for (AddExpandedQueryExtensionPoint ext : pluginRgty.getExtensionList(AddExpandedQueryExtensionPoint.class)) {
List<ExpandedQueryStruct> expandedQueries = ext.getExpandedQueryStructs();
if (expandedQueries != null) {
for (ExpandedQueryStruct s : expandedQueries) {
List<ExpandedQueryStruct> exts = expandedQueryStructs.get(s.getInventoryClassToExpand());
if (exts == null) {
exts = new ArrayList<ExpandedQueryStruct>();
expandedQueryStructs.put(s.getInventoryClassToExpand(), exts);
}
exts.add(s);
}
}
List<ExpandedQueryAliasStruct> aliases = ext.getExpandedQueryAliasesStructs();
if (aliases != null) {
for (ExpandedQueryAliasStruct as : aliases) {
ExpandedQueryAliasInfo info = new ExpandedQueryAliasInfo();
info.isFromAnnotation = false;
info.inventoryClassDefiningThisAlias = as.getInventoryClass();
info.queryMessageClass = inventoryQueryMessageMap.get(info.inventoryClassDefiningThisAlias);
DebugUtils.Assert(info.queryMessageClass != null, String.format("AddExpandedQueryExtensionPoint[%s] defines an expanded query alias[%s], but no query message declares inventory class[%s] to which the alias maps",
ext.getClass().getName(), as.getAlias(), as.getInventoryClass()));
info.expandField = as.getExpandedField();
info.alias = as.getAlias();
List<ExpandedQueryAliasInfo> infos = aliasInfos.get(info.queryMessageClass);
if (infos == null) {
infos = new ArrayList<ExpandedQueryAliasInfo>();
aliasInfos.put(info.queryMessageClass, infos);
}
infos.add(info);
}
}
}
for (AddExtraConditionToQueryExtensionPoint ext : pluginRgty.getExtensionList(AddExtraConditionToQueryExtensionPoint.class)) {
for (Class clz : ext.getMessageClassesForAddExtraConditionToQueryExtensionPoint()) {
List<AddExtraConditionToQueryExtensionPoint> exts = extraConditionsExts.get(clz);
if (exts == null) {
exts = new ArrayList<AddExtraConditionToQueryExtensionPoint>();
extraConditionsExts.put(clz, exts);
}
exts.add(ext);
}
}
}
private void buildExpandedQueryAliasInfo() {
List<Class> invClasses = BeanUtils.scanClass("org.zstack", Inventory.class);
for (Class invClass : invClasses) {
ExpandedQueryAliases aliases = (ExpandedQueryAliases) invClass.getAnnotation(ExpandedQueryAliases.class);
if (aliases == null) {
continue;
}
for (ExpandedQueryAlias alias : aliases.value()) {
ExpandedQueryAliasInfo info = new ExpandedQueryAliasInfo();
info.alias = alias.alias();
info.expandField = alias.expandedField();
info.queryMessageClass = inventoryQueryMessageMap.get(invClass);
info.inventoryClassDefiningThisAlias = invClass;
info.isFromAnnotation = true;
if (info.queryMessageClass == null) {
throw new CloudRuntimeException(String.format("inventory[%s] declares expanded query alias, but not query message declare this inventory class in AutoQuery annotation",
invClass.getName()));
}
List<ExpandedQueryAliasInfo> lst = aliasInfos.get(info.queryMessageClass);
if (lst == null) {
lst = new ArrayList<ExpandedQueryAliasInfo>();
aliasInfos.put(info.queryMessageClass, lst);
}
lst.add(info);
}
}
}
@Override
public boolean start() {
try {
List<Class> queryMessageClasses = BeanUtils.scanClassByType("org.zstack", APIQueryMessage.class);
for (Class msgClass : queryMessageClasses) {
AutoQuery at = (AutoQuery) msgClass.getAnnotation(AutoQuery.class);
if (at == null) {
logger.warn(String.format("query message[%s] doesn't have AutoQuery annotation, expanded query alias would not take effect", msgClass.getName()));
continue;
}
inventoryQueryMessageMap.put(at.inventoryClass(), msgClass);
}
// NOTE: don't change the order
populateExtensions();
buildExpandedQueryAliasInfo();
populateEntityInfo();
completeAliasInfo();
inheritExpandedQueryAndAliases();
removeSuppressedExpandedQuery();
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
return true;
}
private void removeSuppressedExpandedQuery() {
for (EntityInfo info : entityInfos.values()) {
Map<String, ExpandedQueryStruct> ess = new HashMap<String, ExpandedQueryStruct>();
ess.putAll(info.expandedQueries);
for (Entry<String, ExpandedQueryStruct> e : ess.entrySet()) {
if (e.getValue().getSuppressedInventoryClass() != null) {
ExpandedQueryStruct toSuppressed = null;
for (ExpandedQueryStruct s : info.expandedQueries.values()) {
if (s.getInventoryClass() == e.getValue().getSuppressedInventoryClass()) {
toSuppressed = s;
break;
}
}
DebugUtils.Assert(toSuppressed!=null, String.format("ExpandedQuery[%s] of %s is going to suppress a undefined ExpandedQuery that has inventory class[%s]",
e.getValue().getExpandedField(), info.inventoryClass, e.getValue().getSuppressedInventoryClass()));
info.expandedQueries.remove(toSuppressed.getExpandedField());
logger.debug(String.format("ExpandedQuery[%s] of %s suppresses ExpandedQuery[%s]",
e.getValue().getExpandedField(), info.inventoryClass, toSuppressed.getExpandedField()));
}
}
}
}
private void completeAliasInfo() {
class ExpandedQueryAliasInfoCompletion {
ExpandedQueryAliasInfo alias;
ExpandedQueryAliasInfoCompletion(ExpandedQueryAliasInfo alias) {
this.alias = alias;
}
private ExpandedQueryStruct findTargetExpandedQueryStruct(String[] expandedFields, EntityInfo entityInfo) {
ExpandedQueryStruct struct = null;
for (String exf : expandedFields) {
struct = entityInfo.expandedQueries.get(exf);
if (struct == null) {
ExpandedQueryAliasInfo exalias = entityInfo.aliases.get(exf);
DebugUtils.Assert(exalias!=null, String.format("cannot find expanded query or alias[%s] on %s", exf, entityInfo.inventoryClass));
struct = findTargetExpandedQueryStruct(exalias.expandField.split("\\."), entityInfo);
}
entityInfo = entityInfos.get(struct.getInventoryClass());
}
return struct;
}
void complete() {
EntityInfo entityInfo = entityInfos.get(alias.inventoryClassDefiningThisAlias);
String[] expandedFields = alias.expandField.split("\\.");
DebugUtils.Assert(expandedFields.length!=0, String.format("alias[%s] defined in %s is invalid", alias.expandField, alias.inventoryClassDefiningThisAlias));
ExpandedQueryStruct struct = findTargetExpandedQueryStruct(expandedFields, entityInfo);
alias.inventoryClass = struct.getInventoryClass();
alias.check();
}
}
for (List<ExpandedQueryAliasInfo> infos : aliasInfos.values()) {
for (ExpandedQueryAliasInfo alias : infos) {
new ExpandedQueryAliasInfoCompletion(alias).complete();
}
}
}
private void inheritExpandedQueryAndAliases(EntityInfo current, EntityInfo ancestor) {
if (ancestor == null) {
return;
}
if (!ancestor.aliases.isEmpty()) {
Class msgClz = inventoryQueryMessageMap.get(current.inventoryClass);
List<ExpandedQueryAliasInfo> aliases = aliasInfos.get(msgClz);
if (aliases == null) {
aliases = new ArrayList<ExpandedQueryAliasInfo>();
}
aliases.addAll(ancestor.aliases.values());
current.addQueryAliases(aliases);
}
if (!ancestor.expandedQueries.isEmpty()) {
current.expandedQueries.putAll(ancestor.expandedQueries);
}
inheritExpandedQueryAndAliases(current, ancestor.parent);
}
private void inheritExpandedQueryAndAliases() {
for (EntityInfo info : entityInfos.values()) {
inheritExpandedQueryAndAliases(info, info.parent);
}
}
@Override
public boolean stop() {
return true;
}
@Override
public <T> List<T> query(APIQueryMessage msg, Class<T> inventoryClass) {
QueryContext context = new QueryContext();
context.msg = msg;
context.inventoryClass = selectInventoryClass(msg, inventoryClass);
return context.query();
}
private Class selectInventoryClass(APIQueryMessage msg, Class inventoryClass) {
EntityInfo info = entityInfos.get(inventoryClass);
return info.selectInventoryClass(msg);
}
@Override
public long count(APIQueryMessage msg, Class inventoryClass) {
QueryContext context = new QueryContext();
context.msg = msg;
context.inventoryClass = selectInventoryClass(msg, inventoryClass);
return context.count();
}
@Override
public Map<String, List<String>> populateQueryableFields() {
//throw new CloudRuntimeException("it's impossible enumerate all combinations");
Map<String, List<String>> ret = new HashMap<String, List<String>>();
class QueryableBuilder {
Class inventoryClass;
List<String> queryableFields = new ArrayList<String>();
EntityInfo info;
Stack<Class> visitedPath = new Stack<Class>();
QueryableBuilder() {
}
QueryableBuilder(Stack<Class> path) {
visitedPath = path;
}
List<String> build() {
info = entityInfos.get(inventoryClass);
if (!visitedPath.contains(inventoryClass)) {
visitedPath.push(inventoryClass);
buildExpandedQueryableFields();
buildInherentQueryableFields();
normalizeToAliases();
visitedPath.pop();
}
return queryableFields;
}
private String normalizeToAlias(String fieldName) {
for (ExpandedQueryAliasInfo alias : info.aliases.values()) {
if (fieldName.startsWith(String.format("%s.", alias.expandField))) {
return fieldName.replaceFirst(alias.expandField, alias.alias);
}
}
return fieldName;
}
private void normalizeToAliases() {
Set<String> set = new HashSet<String>();
for (String ret : queryableFields) {
set.add(normalizeToAlias(ret));
}
queryableFields.clear();
queryableFields.addAll(set);
}
private void buildExpandedQueryableFields() {
for (ExpandedQueryStruct struct : info.expandedQueries.values()) {
//QueryableBuilder nbuilder = new QueryableBuilder(inherentPath, expandedPath);
QueryableBuilder nbuilder = new QueryableBuilder(visitedPath);
nbuilder.inventoryClass = struct.getInventoryClass();
List<String> expandedFields = nbuilder.build();
for (String ef : expandedFields) {
String ff = String.format("%s.%s", struct.getExpandedField(), ef);
//logger.debug(ff);
queryableFields.add(ff);
}
}
}
private void buildInherentQueryableFields() {
for (Field field : info.allFieldsMap.values()) {
if (field.isAnnotationPresent(APINoSee.class)) {
continue;
}
if (field.isAnnotationPresent(Unqueryable.class)) {
continue;
}
if (TypeUtils.isZstackBeanPrimitive(field.getType())) {
queryableFields.add(field.getName());
continue;
}
if (Map.class.isAssignableFrom(field.getType())) {
logger.warn(String.format("%s.%s is Map type, not support", field.getDeclaringClass(), field.getName()));
continue;
}
if (Collection.class.isAssignableFrom(field.getType()) && field.isAnnotationPresent(Queryable.class)) {
FieldUtils.CollectionGenericType gtype = (FieldUtils.CollectionGenericType) FieldUtils.inferGenericTypeOnMapOrCollectionField(field);
if (!gtype.isInferred()) {
throw new CloudRuntimeException(String.format("unable infer generic type of %s.%s", field.getDeclaringClass(), field.getName()));
}
if (gtype.getNestedGenericValue() != null) {
throw new CloudRuntimeException(String.format("%s.%s is nested Collection, not support", field.getDeclaringClass(), field.getName()));
}
if (TypeUtils.isZstackBeanPrimitive(gtype.getValueType())) {
queryableFields.add(field.getName());
continue;
}
Class nestedInventory = gtype.getValueType();
if (!nestedInventory.isAnnotationPresent(Inventory.class)) {
throw new CloudRuntimeException(String.format("field[%s] on inventory class[%s] is collection type with @Queryable, but its generic type[%s] is not an inventory class",
field.getName(), inventoryClass.getName(), nestedInventory.getName()));
}
QueryableBuilder nbuilder = new QueryableBuilder(visitedPath);
nbuilder.inventoryClass = nestedInventory;
List<String> nestedFields = nbuilder.build();
for (String nf : nestedFields) {
queryableFields.add(String.format("%s.%s", field.getName(), nf));
}
}
}
}
}
for (Map.Entry<Class, Class> e : inventoryQueryMessageMap.entrySet()) {
QueryableBuilder builder = new QueryableBuilder();
builder.inventoryClass = e.getKey();
List<String> queryables = builder.build();
ret.put(e.getValue().getName(), queryables);
}
LinkedHashMap<String, List<String>> orderedRet = new LinkedHashMap<String, List<String>>();
// order
List<String> keys = new ArrayList<String>();
keys.addAll(ret.keySet());
Collections.sort(keys);
for (String k : keys) {
List<String> lst = ret.get(k);
Collections.sort(lst);
orderedRet.put(k, lst);
}
return orderedRet;
}
@Override
public void writePython(final StringBuilder sb) {
class PythonQueryObjectWriter {
private String makeClassName(Class clazz) {
return String.format("QueryObject%s", clazz.getSimpleName());
}
void write() {
Set<String> objectNameHavingWritten = new HashSet<String>();
for (EntityInfo info : entityInfos.values()) {
if (objectNameHavingWritten.contains(info.inventoryClass.getName())) {
continue;
}
write(info);
objectNameHavingWritten.add(info.inventoryClass.getName());
}
sb.append(String.format("\n\nqueryMessageInventoryMap = {"));
for (Map.Entry<Class, Class> e : inventoryQueryMessageMap.entrySet()) {
sb.append(String.format("\n%s '%s' : %s,", StringUtils.repeat(" ", 4), e.getValue().getSimpleName(), makeClassName(e.getKey())));
}
sb.append("\n}\n");
}
private void write(EntityInfo info) {
sb.append(String.format("\nclass %s(object):", makeClassName(info.inventoryClass)));
List<String> primitiveFields = new ArrayList<String>();
List<String> expandedFields = new ArrayList<String>();
Map<String, Class> nestedAndExpandedFields = new HashMap<String, Class>();
for (Field f : info.allFieldsMap.values()) {
if (f.isAnnotationPresent(Unqueryable.class)) {
continue;
}
if (f.isAnnotationPresent(APINoSee.class)) {
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class invClass = FieldUtils.getGenericType(f);
if (!TypeUtils.isZstackBeanPrimitive(invClass)) {
if (invClass.isAnnotationPresent(Inventory.class)) {
expandedFields.add(String.format("'%s'", f.getName()));
nestedAndExpandedFields.put(f.getName(), invClass);
}
}
} else {
primitiveFields.add(String.format("'%s'", f.getName()));
}
}
primitiveFields.add("'__userTag__'");
primitiveFields.add("'__systemTag__'");
sb.append(String.format("\n%s PRIMITIVE_FIELDS = [%s]", StringUtils.repeat(" ", 4), StringUtils.join(primitiveFields, ",")));
for (ExpandedQueryStruct s : info.expandedQueries.values()) {
if (s.isHidden()) {
continue;
}
expandedFields.add(String.format("'%s'", s.getExpandedField()));
nestedAndExpandedFields.put(s.getExpandedField(), s.getInventoryClass());
}
for (ExpandedQueryAliasInfo i : info.aliases.values()) {
expandedFields.add(String.format("'%s'", i.alias));
nestedAndExpandedFields.put(i.alias, i.inventoryClass);
}
sb.append(String.format("\n%s EXPANDED_FIELDS = [%s]", StringUtils.repeat(" ", 4), StringUtils.join(expandedFields, ",")));
sb.append(String.format("\n%s QUERY_OBJECT_MAP = {", StringUtils.repeat(" ", 4)));
for (Map.Entry<String, Class> e : nestedAndExpandedFields.entrySet()) {
sb.append(String.format("\n%s'%s' : '%s',", StringUtils.repeat(" ", 8), e.getKey(), makeClassName(e.getValue())));
}
sb.append(String.format("\n%s}\n", StringUtils.repeat(" ", 5)));
}
}
new PythonQueryObjectWriter().write();
}
}
| apache-2.0 |
kishoreg/helix-actors | recipes/rsync-replicated-file-system/src/main/java/org/apache/helix/filestore/ChangeLogGenerator.java | 3792 | package org.apache.helix.filestore;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.nio.charset.Charset;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.helix.filestore.FileSystemWatchService.ChangeType;
public class ChangeLogGenerator implements FileChangeWatcher {
Lock lock;
private long currentSeq;
private long currentGen;
private int entriesLogged;
private DataOutputStream out;
private final String directory;
public ChangeLogGenerator(String directory, long startGen, long startSeq) throws Exception {
this.directory = directory;
lock = new ReentrantLock();
currentSeq = startSeq;
currentGen = startGen;
setLogFile();
}
private void setLogFile() throws Exception {
File file = new File(directory);
String[] list = file.list();
if (list == null) {
list = new String[] {};
}
int max = 1;
for (String name : list) {
String[] split = name.split("\\.");
if (split.length == 2) {
try {
int index = Integer.parseInt(split[1]);
if (index > max) {
max = index;
}
} catch (NumberFormatException e) {
System.err.println("Invalid transaction log file found:" + name);
}
}
}
String transLogFile = directory + "/" + "log." + (max);
System.out.println("Current file name:" + transLogFile);
out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(transLogFile, true)));
}
@Override
public void onEntryAdded(String path) {
appendChange(path, FileSystemWatchService.ChangeType.CREATE);
}
@Override
public void onEntryDeleted(String path) {
appendChange(path, FileSystemWatchService.ChangeType.DELETE);
}
@Override
public void onEntryModified(String path) {
appendChange(path, FileSystemWatchService.ChangeType.MODIFY);
}
public boolean appendChange(String path, ChangeType type) {
lock.lock();
if (new File(path).isDirectory()) {
return true;
}
try {
ChangeRecord record = new ChangeRecord();
record.file = path;
record.timestamp = System.currentTimeMillis();
currentSeq++;
long txnId = (((long) currentGen) << 32) + ((long) currentSeq);
record.txid = txnId;
record.type = (short) type.ordinal();
write(record);
} catch (Exception e) {
e.printStackTrace();
return false;
} finally {
lock.unlock();
}
return true;
}
private void write(ChangeRecord record) throws Exception {
out.writeLong(record.txid);
out.writeShort(record.type);
out.writeLong(record.timestamp);
out.writeUTF(record.file);
out.flush();
entriesLogged++;
if (entriesLogged == 10000) {
entriesLogged = 0;
out.close();
setLogFile();
}
}
}
| apache-2.0 |
permazen/permazen | permazen-kv-cockroach/src/main/java/io/permazen/kv/cockroach/package-info.java | 383 |
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
/**
* {@link io.permazen.kv.KVDatabase} implementation based on CockroachDB.
*
* <p>
* This uses the PostgreSQL JDBC driver to connect to CockroachDB.
*
* @see io.permazen.kv.cockroach.CockroachKVDatabase
* @see <a href="https://www.cockroachlabs.com/">CockroachDB</a>
*/
package io.permazen.kv.cockroach;
| apache-2.0 |
Phaneendra-Huawei/demo | providers/openflow/flow/src/main/java/org/onosproject/provider/of/flow/impl/FlowStatsCollector.java | 3364 | /*
* Copyright 2014-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.of.flow.impl;
import org.onlab.util.SharedExecutors;
import org.onosproject.openflow.controller.OpenFlowSwitch;
import org.onosproject.openflow.controller.RoleState;
import org.projectfloodlight.openflow.protocol.OFFlowStatsRequest;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.slf4j.Logger;
import java.util.Timer;
import java.util.TimerTask;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Collects flow statistics for the specified switch.
*/
class FlowStatsCollector {
private final Logger log = getLogger(getClass());
public static final int SECONDS = 1000;
private final OpenFlowSwitch sw;
private Timer timer;
private TimerTask task;
private int pollInterval;
/**
* Creates a new collector for the given switch and poll frequency.
*
* @param timer timer to use for scheduling
* @param sw switch to pull
* @param pollInterval poll frequency in seconds
*/
FlowStatsCollector(Timer timer, OpenFlowSwitch sw, int pollInterval) {
this.timer = timer;
this.sw = sw;
this.pollInterval = pollInterval;
}
/**
* Adjusts poll frequency.
*
* @param pollInterval poll frequency in seconds
*/
synchronized void adjustPollInterval(int pollInterval) {
this.pollInterval = pollInterval;
task.cancel();
task = new InternalTimerTask();
timer.scheduleAtFixedRate(task, pollInterval * SECONDS, pollInterval * 1000);
}
private class InternalTimerTask extends TimerTask {
@Override
public void run() {
if (sw.getRole() == RoleState.MASTER) {
log.trace("Collecting stats for {}", sw.getStringId());
OFFlowStatsRequest request = sw.factory().buildFlowStatsRequest()
.setMatch(sw.factory().matchWildcardAll())
.setTableId(TableId.ALL)
.setOutPort(OFPort.NO_MASK)
.build();
sw.sendMsg(request);
}
}
}
public synchronized void start() {
// Initially start polling quickly. Then drop down to configured value
log.debug("Starting Stats collection thread for {}", sw.getStringId());
task = new InternalTimerTask();
SharedExecutors.getTimer().scheduleAtFixedRate(task, 1 * SECONDS,
pollInterval * SECONDS);
}
public synchronized void stop() {
log.debug("Stopping Stats collection thread for {}", sw.getStringId());
task.cancel();
task = null;
}
}
| apache-2.0 |
fanatic-mobile-developer-for-android/A-week-to-develop-android-app-plan | AndroidQuickStartProject/app/src/main/java/com/devilwwj/app/adapters/AdapterBase.java | 2319 | package com.devilwwj.app.adapters;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import java.util.ArrayList;
import java.util.List;
/**
* @ClassName: AdapterBase
* @Description: 通用Adapter
* @author wwj_748
* @date 2015年6月17日 下午4:03:39
* @param <T>
*/
public abstract class AdapterBase <T> extends BaseAdapter{
protected final Context mContext;
protected List<T> mData;
protected final int[] mLayoutResArrays;
public AdapterBase(Context context, int[] layoutResArray) {
this(context, layoutResArray, null);
}
/**
* @param context
* @param layoutResArray
* @param data
*/
public AdapterBase(Context context, int[] layoutResArray, List<T> data) {
this.mData = data == null? new ArrayList<T>() : data;
this.mContext = context;
this.mLayoutResArrays = layoutResArray;
}
public void setData(ArrayList<T> data) {
this.mData = data;
this.notifyDataSetChanged();
}
public void addData(ArrayList<T> data) {
if (data != null) {
this.mData.addAll(data);
}
this.notifyDataSetChanged();
}
public void addData(T data) {
this.mData.add(data);
this.notifyDataSetChanged();
}
public ArrayList<T> getAllData() {
return (ArrayList<T>) this.mData;
}
@Override
public int getCount() {
if (this.mData == null) {
return 0;
}
return this.mData.size();
}
@Override
public T getItem(int position) {
if (position > this.mData.size()) {
return null;
}
return mData.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getViewTypeCount() {
return this.mLayoutResArrays.length;
}
/**
* You should always override this method,to return the
* correct view type for every cell.
*
*/
public int getItemViewType(int position){
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ViewHolderHelper helper = getAdapterHelper(position, convertView, parent);
T item = getItem(position);
covert(helper, item);
return helper.getView();
}
protected abstract void covert(ViewHolderHelper helper, T item);
protected abstract ViewHolderHelper getAdapterHelper(int position, View convertView, ViewGroup parent);
}
| apache-2.0 |
boundlessgeo/sqlite-jdbc | src/main/java/org/sqlite/Function.java | 9143 | /*
* Copyright (c) 2007 David Crawshaw <david@zentus.com>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package org.sqlite;
import java.sql.Connection;
import java.sql.SQLException;
/** Provides an interface for creating SQLite user-defined functions.
*
* <p>A subclass of <tt>org.sqlite.Function</tt> can be registered with
* <tt>Function.create()</tt> and called by the name it was given. All
* functions must implement <tt>xFunc()</tt>, which is called when SQLite
* runs the custom function.</p>
*
* Eg.
*
* <pre>
* Class.forName("org.sqlite.JDBC");
* Connection conn = DriverManager.getConnection("jdbc:sqlite:");
*
* Function.create(conn, "myFunc", new Function() {
* protected void xFunc() {
* System.out.println("myFunc called!");
* }
* });
*
* conn.createStatement().execute("select myFunc();");
* </pre>
*
* <p>Arguments passed to a custom function can be accessed using the
* <tt>protected</tt> functions provided. <tt>args()</tt> returns
* the number of arguments passed, while
* <tt>value_<type>(int)</tt> returns the value of the specific
* argument. Similarly a function can return a value using the
* <tt>result(<type>)</tt> function.</p>
*
* <p>Aggregate functions are not yet supported, but coming soon.</p>
*
*/
public abstract class Function
{
private Conn conn;
private DB db;
long context = 0; // pointer sqlite3_context*
long value = 0; // pointer sqlite3_value**
int args = 0;
/**
* Registers a given function with the connection.
* @param conn The connection.
* @param name The name of the function.
* @param f The function to register.
*/
public static final void create(Connection conn, String name, Function f)
throws SQLException {
if (conn == null || !(conn instanceof Conn)) {
throw new SQLException("connection must be to an SQLite db");
}
if (conn.isClosed()) {
throw new SQLException("connection closed");
}
f.conn = (Conn)conn;
f.db = f.conn.db();
if (name == null || name.length() > 255) {
throw new SQLException("invalid function name: '"+name+"'");
}
if (f.db.create_function(name, f) != Codes.SQLITE_OK) {
throw new SQLException("error creating function");
}
}
/**
* Removes a named function from the given connection.
* @param conn The connection to remove the function from.
* @param name The name of the function.
* @throws SQLException
*/
public static final void destroy(Connection conn, String name)
throws SQLException {
if (conn == null || !(conn instanceof Conn)) {
throw new SQLException("connection must be to an SQLite db");
}
((Conn)conn).db().destroy_function(name);
}
/**
* Called by SQLite as a custom function. Should access arguments
* through <tt>value_*(int)</tt>, return results with
* <tt>result(*)</tt> and throw errors with <tt>error(String)</tt>.
*/
protected abstract void xFunc() throws SQLException;
/**
* Returns the number of arguments passed to the function.
* Can only be called from <tt>xFunc()</tt>.
*/
protected synchronized final int args()
throws SQLException { checkContext(); return args; }
/**
* Called by <tt>xFunc</tt> to return a value.
* @param value
*/
protected synchronized final void result(byte[] value)
throws SQLException { checkContext(); db.result_blob(context, value); }
/**
* Called by <tt>xFunc</tt> to return a value.
* @param value
*/
protected synchronized final void result(double value)
throws SQLException { checkContext(); db.result_double(context,value);}
/**
* Called by <tt>xFunc</tt> to return a value.
* @param value
*/
protected synchronized final void result(int value)
throws SQLException { checkContext(); db.result_int(context, value); }
/**
* Called by <tt>xFunc</tt> to return a value.
* @param value
*/
protected synchronized final void result(long value)
throws SQLException { checkContext(); db.result_long(context, value); }
/**
* Called by <tt>xFunc</tt> to return a value.
*/
protected synchronized final void result()
throws SQLException { checkContext(); db.result_null(context); }
/**
* Called by <tt>xFunc</tt> to return a value.
* @param value
*/
protected synchronized final void result(String value)
throws SQLException { checkContext(); db.result_text(context, value); }
/**
* Called by <tt>xFunc</tt> to throw an error.
* @param err
*/
protected synchronized final void error(String err)
throws SQLException { checkContext(); db.result_error(context, err); }
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final int value_bytes(int arg)
throws SQLException {checkValue(arg); return db.value_bytes(this,arg);}
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final String value_text(int arg)
throws SQLException {checkValue(arg); return db.value_text(this,arg);}
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final byte[] value_blob(int arg)
throws SQLException {checkValue(arg); return db.value_blob(this,arg); }
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final double value_double(int arg)
throws SQLException {checkValue(arg); return db.value_double(this,arg);}
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final int value_int(int arg)
throws SQLException {checkValue(arg); return db.value_int(this, arg); }
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final long value_long(int arg)
throws SQLException { checkValue(arg); return db.value_long(this,arg); }
/**
* Called by <tt>xFunc</tt> to access the value of an argument.
* @param arg
*/
protected synchronized final int value_type(int arg)
throws SQLException {checkValue(arg); return db.value_type(this,arg); }
/**
* @throws SQLException
*/
private void checkContext() throws SQLException {
if (conn == null || conn.db() == null || context == 0) {
throw new SQLException("no context, not allowed to read value");
}
}
/**
* @param arg
* @throws SQLException
*/
private void checkValue(int arg) throws SQLException {
if (conn == null || conn.db() == null || value == 0) {
throw new SQLException("not in value access state");
}
if (arg >= args) {
throw new SQLException("arg "+arg+" out bounds [0,"+args+")");
}
}
/**
* Provides an interface for creating SQLite user-defined aggregate functions.
* @see Function
*/
public static abstract class Aggregate
extends Function
implements Cloneable
{
/**
* @see org.sqlite.Function#xFunc()
*/
protected final void xFunc() {}
/**
* Defines the abstract aggregate callback function
* @throws SQLException
* @see <a href="http://www.sqlite.org/c3ref/aggregate_context.html">http://www.sqlite.org/c3ref/aggregate_context.html</a>
*/
protected abstract void xStep() throws SQLException;
/**
* Defines the abstract aggregate callback function
* @throws SQLException
* @see <a href="http://www.sqlite.org/c3ref/aggregate_context.html">http://www.sqlite.org/c3ref/aggregate_context.html</a>
*/
protected abstract void xFinal() throws SQLException;
/**
* @see java.lang.Object#clone()
*/
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
}
| apache-2.0 |
porcelli-forks/jbpm-form-modeler | jbpm-form-modeler-panels/jbpm-form-modeler-editor/jbpm-form-modeler-editor-backend/src/main/java/org/jbpm/formModeler/panels/modeler/backend/indexing/model/terms/valueterms/ValueDataHolderTypeIndexTerm.java | 1438 | /*
* Copyright 2014 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.formModeler.panels.modeler.backend.indexing.model.terms.valueterms;
import org.jboss.errai.common.client.api.annotations.Portable;
import org.jbpm.formModeler.panels.modeler.backend.indexing.model.terms.DataHolderTypeIndexTerm;
import org.kie.workbench.common.services.refactoring.model.index.terms.valueterms.ValueIndexTerm;
import org.uberfire.commons.validation.PortablePreconditions;
@Portable
public class ValueDataHolderTypeIndexTerm extends DataHolderTypeIndexTerm implements ValueIndexTerm {
private String type;
public ValueDataHolderTypeIndexTerm() {
//Errai marshalling
}
public ValueDataHolderTypeIndexTerm(String type) {
this.type = PortablePreconditions.checkNotNull("type", type);
}
@Override
public String getValue() {
return type;
}
}
| apache-2.0 |
diorcety/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/DaemonListeners.java | 28298 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.ProjectTopics;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettings;
import com.intellij.codeInsight.hint.TooltipController;
import com.intellij.ide.IdeTooltipManager;
import com.intellij.ide.PowerSaveMode;
import com.intellij.ide.todo.TodoConfiguration;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionManagerEx;
import com.intellij.openapi.actionSystem.ex.AnActionListener;
import com.intellij.openapi.application.*;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.command.CommandAdapter;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.actionSystem.DocCommandGroupId;
import com.intellij.openapi.editor.colors.EditorColorsListener;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.editor.ex.EditorEventMulticasterEx;
import com.intellij.openapi.editor.ex.EditorMarkupModel;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectUtil;
import com.intellij.openapi.roots.ModuleRootAdapter;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.UserDataHolderEx;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileAdapter;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.VirtualFilePropertyEvent;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.impl.status.TogglePopupHintsPanel;
import com.intellij.packageDependencies.DependencyValidationManager;
import com.intellij.profile.Profile;
import com.intellij.profile.ProfileChangeAdapter;
import com.intellij.profile.codeInspection.InspectionProfileManager;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiDocumentManagerImpl;
import com.intellij.psi.impl.PsiManagerEx;
import com.intellij.psi.search.scope.packageSet.NamedScopeManager;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.Collections;
import java.util.List;
/**
* @author cdr
*/
public class DaemonListeners implements Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.DaemonListeners");
private final Project myProject;
private final DaemonCodeAnalyzerImpl myDaemonCodeAnalyzer;
@NotNull private final PsiDocumentManager myPsiDocumentManager;
private final FileEditorManager myFileEditorManager;
private final UndoManager myUndoManager;
private final ProjectLevelVcsManager myProjectLevelVcsManager;
private final VcsDirtyScopeManager myVcsDirtyScopeManager;
private final FileStatusManager myFileStatusManager;
@NotNull private final ActionManager myActionManager;
private final TooltipController myTooltipController;
private boolean myEscPressed;
private volatile boolean cutOperationJustHappened;
private final DaemonCodeAnalyzer.DaemonListener myDaemonEventPublisher;
private static final Key<Boolean> DAEMON_INITIALIZED = Key.create("DAEMON_INITIALIZED");
public static DaemonListeners getInstance(Project project) {
return project.getComponent(DaemonListeners.class);
}
public DaemonListeners(@NotNull final Project project,
@NotNull DaemonCodeAnalyzerImpl daemonCodeAnalyzer,
@NotNull final EditorTracker editorTracker,
@NotNull EditorFactory editorFactory,
@NotNull PsiDocumentManager psiDocumentManager,
@NotNull CommandProcessor commandProcessor,
@NotNull EditorColorsManager editorColorsManager,
@NotNull final Application application,
@NotNull InspectionProfileManager inspectionProfileManager,
@NotNull InspectionProjectProfileManager inspectionProjectProfileManager,
@NotNull TodoConfiguration todoConfiguration,
@NotNull ActionManagerEx actionManagerEx,
@NotNull VirtualFileManager virtualFileManager,
@SuppressWarnings("UnusedParameters") // for dependency order
@NotNull final NamedScopeManager namedScopeManager,
@SuppressWarnings("UnusedParameters") // for dependency order
@NotNull final DependencyValidationManager dependencyValidationManager,
@NotNull final FileDocumentManager fileDocumentManager,
@NotNull final PsiManager psiManager,
@NotNull final FileEditorManager fileEditorManager,
@NotNull TooltipController tooltipController,
@NotNull UndoManager undoManager,
@NotNull ProjectLevelVcsManager projectLevelVcsManager,
@NotNull VcsDirtyScopeManager vcsDirtyScopeManager,
@NotNull FileStatusManager fileStatusManager) {
Disposer.register(project, this);
myProject = project;
myDaemonCodeAnalyzer = daemonCodeAnalyzer;
myPsiDocumentManager = psiDocumentManager;
myFileEditorManager = fileEditorManager;
myUndoManager = undoManager;
myProjectLevelVcsManager = projectLevelVcsManager;
myVcsDirtyScopeManager = vcsDirtyScopeManager;
myFileStatusManager = fileStatusManager;
myActionManager = actionManagerEx;
myTooltipController = tooltipController;
boolean replaced = ((UserDataHolderEx)myProject).replace(DAEMON_INITIALIZED, null, Boolean.TRUE);
LOG.assertTrue(replaced, "Daemon listeners already initialized for the project "+myProject);
MessageBus messageBus = myProject.getMessageBus();
myDaemonEventPublisher = messageBus.syncPublisher(DaemonCodeAnalyzer.DAEMON_EVENT_TOPIC);
final MessageBusConnection connection = messageBus.connect();
if (project.isDefault()) return;
EditorEventMulticaster eventMulticaster = editorFactory.getEventMulticaster();
eventMulticaster.addDocumentListener(new DocumentAdapter() {
// clearing highlighters before changing document because change can damage editor highlighters drastically, so we'll clear more than necessary
@Override
public void beforeDocumentChange(final DocumentEvent e) {
if (isUnderIgnoredAction(null)) return;
Document document = e.getDocument();
VirtualFile virtualFile = fileDocumentManager.getFile(document);
Project project = virtualFile == null ? null : ProjectUtil.guessProjectForFile(virtualFile);
if (!worthBothering(document, project)) {
return; //no need to stop daemon if something happened in the console
}
stopDaemon(true, "Document change");
UpdateHighlightersUtil.updateHighlightersByTyping(myProject, e);
}
}, this);
eventMulticaster.addCaretListener(new CaretAdapter() {
@Override
public void caretPositionChanged(CaretEvent e) {
final Editor editor = e.getEditor();
if (!editor.getComponent().isShowing() && !application.isUnitTestMode() ||
!worthBothering(editor.getDocument(), editor.getProject())) {
return; //no need to stop daemon if something happened in the console
}
if (!application.isUnitTestMode()) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!editor.getComponent().isShowing() || myProject.isDisposed()) {
return;
}
myDaemonCodeAnalyzer.hideLastIntentionHint();
}
}, ModalityState.current());
}
}
}, this);
eventMulticaster.addEditorMouseMotionListener(new MyEditorMouseMotionListener(), this);
eventMulticaster.addEditorMouseListener(new MyEditorMouseListener(myTooltipController), this);
EditorTrackerListener editorTrackerListener = new EditorTrackerListener() {
private List<Editor> myActiveEditors = Collections.emptyList();
@Override
public void activeEditorsChanged(@NotNull List<Editor> editors) {
List<Editor> activeEditors = editorTracker.getActiveEditors();
if (myActiveEditors.equals(activeEditors)) {
return;
}
myActiveEditors = activeEditors;
stopDaemon(true, "Active editor change"); // do not stop daemon if idea loses/gains focus
if (ApplicationManager.getApplication().isDispatchThread() && LaterInvocator.isInModalContext()) {
// editor appear in modal context, re-enable the daemon
myDaemonCodeAnalyzer.setUpdateByTimerEnabled(true);
}
for (Editor editor : activeEditors) {
repaintErrorStripeRenderer(editor, myProject);
}
}
};
editorTracker.addEditorTrackerListener(editorTrackerListener, this);
EditorFactoryListener editorFactoryListener = new EditorFactoryListener() {
@Override
public void editorCreated(@NotNull EditorFactoryEvent event) {
Editor editor = event.getEditor();
Document document = editor.getDocument();
Project editorProject = editor.getProject();
// worthBothering() checks for getCachedPsiFile, so call getPsiFile here
PsiFile file = editorProject == null ? null : PsiDocumentManager.getInstance(editorProject).getPsiFile(document);
if (!editor.getComponent().isShowing() || !worthBothering(document, editorProject)) {
LOG.debug("Not worth: " + file);
return;
}
repaintErrorStripeRenderer(editor, myProject);
}
@Override
public void editorReleased(@NotNull EditorFactoryEvent event) {
// mem leak after closing last editor otherwise
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
myDaemonCodeAnalyzer.hideLastIntentionHint();
}
});
}
};
editorFactory.addEditorFactoryListener(editorFactoryListener, this);
PsiDocumentManagerImpl documentManager = (PsiDocumentManagerImpl)psiDocumentManager;
PsiChangeHandler changeHandler = new PsiChangeHandler(myProject, documentManager, editorFactory,connection,
daemonCodeAnalyzer.getFileStatusMap());
Disposer.register(this, changeHandler);
psiManager.addPsiTreeChangeListener(changeHandler, changeHandler);
connection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() {
@Override
public void rootsChanged(ModuleRootEvent event) {
stopDaemonAndRestartAllFiles("Project roots changed");
}
});
connection.subscribe(DumbService.DUMB_MODE, new DumbService.DumbModeListener() {
@Override
public void enteredDumbMode() {
stopDaemonAndRestartAllFiles("Dumb mode started");
}
@Override
public void exitDumbMode() {
stopDaemonAndRestartAllFiles("Dumb mode finished");
}
});
connection.subscribe(PowerSaveMode.TOPIC, new PowerSaveMode.Listener() {
@Override
public void powerSaveStateChanged() {
stopDaemon(true, "Power save mode change");
}
});
editorColorsManager.addEditorColorsListener(new EditorColorsListener() {
@Override
public void globalSchemeChange(EditorColorsScheme scheme) {
stopDaemonAndRestartAllFiles("Global color scheme changed");
}
}, this);
commandProcessor.addCommandListener(new MyCommandListener(), this);
application.addApplicationListener(new MyApplicationListener(), this);
editorColorsManager.addEditorColorsListener(new MyEditorColorsListener(), this);
inspectionProfileManager.addProfileChangeListener(new MyProfileChangeListener(), this);
inspectionProjectProfileManager.addProfilesListener(new MyProfileChangeListener(), this);
todoConfiguration.addPropertyChangeListener(new MyTodoListener(), this);
todoConfiguration.colorSettingsChanged();
actionManagerEx.addAnActionListener(new MyAnActionListener(), this);
virtualFileManager.addVirtualFileListener(new VirtualFileAdapter() {
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
String propertyName = event.getPropertyName();
if (VirtualFile.PROP_NAME.equals(propertyName)) {
stopDaemonAndRestartAllFiles("Virtual file name changed");
VirtualFile virtualFile = event.getFile();
PsiFile psiFile = !virtualFile.isValid() ? null : ((PsiManagerEx)psiManager).getFileManager().getCachedPsiFile(virtualFile);
if (psiFile != null && !myDaemonCodeAnalyzer.isHighlightingAvailable(psiFile)) {
Document document = fileDocumentManager.getCachedDocument(virtualFile);
if (document != null) {
// highlight markers no more
//todo clear all highlights regardless the pass id
// Here color scheme required for TextEditorFields, as far as I understand this
// code related to standard file editors, which always use Global color scheme,
// thus we can pass null here.
final EditorColorsScheme editorColorScheme = null;
UpdateHighlightersUtil.setHighlightersToEditor(myProject, document, 0, document.getTextLength(),
Collections.<HighlightInfo>emptyList(),
editorColorScheme,
Pass.UPDATE_ALL);
}
}
}
if (!propertyName.equals(PsiTreeChangeEvent.PROP_WRITABLE)) {
stopDaemon(true, "Virtual file property change");
}
}
}, this);
((EditorEventMulticasterEx)eventMulticaster).addErrorStripeListener(new ErrorStripeHandler(myProject), this);
ModalityStateListener modalityStateListener = new ModalityStateListener() {
@Override
public void beforeModalityStateChanged(boolean entering) {
// before showing dialog we are in non-modal context yet, and before closing dialog we are still in modal context
boolean inModalContext = LaterInvocator.isInModalContext();
stopDaemon(inModalContext, "Modality change");
myDaemonCodeAnalyzer.setUpdateByTimerEnabled(inModalContext);
}
};
LaterInvocator.addModalityStateListener(modalityStateListener,this);
messageBus.connect().subscribe(SeverityRegistrar.SEVERITIES_CHANGED_TOPIC, new Runnable() {
@Override
public void run() {
stopDaemonAndRestartAllFiles("Severities changed");
}
});
if (RefResolveService.ENABLED) {
RefResolveService resolveService = RefResolveService.getInstance(project);
resolveService.addListener(this, new RefResolveService.Listener() {
@Override
public void allFilesResolved() {
stopDaemon(true, "RefResolveService is up to date");
}
});
}
}
static boolean isUnderIgnoredAction(@Nullable Object action) {
return action instanceof DocumentRunnable.IgnoreDocumentRunnable ||
action == DocumentRunnable.IgnoreDocumentRunnable.class ||
ApplicationManager.getApplication().hasWriteAction(DocumentRunnable.IgnoreDocumentRunnable.class);
}
private boolean worthBothering(final Document document, Project project) {
if (document == null) return true;
if (project != null && project != myProject) return false;
// cached is essential here since we do not want to create PSI file in alien project
PsiFile psiFile = myPsiDocumentManager.getCachedPsiFile(document);
return psiFile != null && psiFile.getOriginalFile() == psiFile;
}
@Override
public void dispose() {
stopDaemonAndRestartAllFiles("Project closed");
boolean replaced = ((UserDataHolderEx)myProject).replace(DAEMON_INITIALIZED, Boolean.TRUE, Boolean.FALSE);
LOG.assertTrue(replaced, "Daemon listeners already disposed for the project "+myProject);
}
public static boolean canChangeFileSilently(@NotNull PsiFileSystemItem file) {
Project project = file.getProject();
DaemonListeners listeners = getInstance(project);
if (listeners == null) return true;
if (listeners.cutOperationJustHappened) return false;
VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return false;
if (file instanceof PsiCodeFragment) return true;
if (!ModuleUtilCore.projectContainsFile(project, virtualFile, false)) return false;
Result vcs = listeners.vcsThinksItChanged(virtualFile);
if (vcs == Result.CHANGED) return true;
if (vcs == Result.UNCHANGED) return false;
return listeners.canUndo(virtualFile);
}
private boolean canUndo(@NotNull VirtualFile virtualFile) {
for (FileEditor editor : myFileEditorManager.getEditors(virtualFile)) {
if (myUndoManager.isUndoAvailable(editor)) return true;
}
return false;
}
private enum Result {
CHANGED, UNCHANGED, NOT_SURE
}
private Result vcsThinksItChanged(VirtualFile virtualFile) {
AbstractVcs activeVcs = myProjectLevelVcsManager.getVcsFor(virtualFile);
if (activeVcs == null) return Result.NOT_SURE;
FilePath path = VcsUtil.getFilePath(virtualFile);
boolean vcsIsThinking = !myVcsDirtyScopeManager.whatFilesDirty(Collections.singletonList(path)).isEmpty();
if (vcsIsThinking) return Result.NOT_SURE; // do not modify file which is in the process of updating
FileStatus status = myFileStatusManager.getStatus(virtualFile);
if (status == FileStatus.UNKNOWN) return Result.NOT_SURE;
return status == FileStatus.MODIFIED || status == FileStatus.ADDED ? Result.CHANGED : Result.UNCHANGED;
}
private class MyApplicationListener extends ApplicationAdapter {
private boolean myDaemonWasRunning;
@Override
public void beforeWriteActionStart(Object action) {
myDaemonWasRunning = myDaemonCodeAnalyzer.isRunning();
if (!myDaemonWasRunning) return; // we'll restart in writeActionFinished()
stopDaemon(true, "Write action start");
}
@Override
public void writeActionFinished(Object action) {
stopDaemon(true, "Write action finish");
}
}
private class MyCommandListener extends CommandAdapter {
private final Object myCutActionName = myActionManager.getAction(IdeActions.ACTION_EDITOR_CUT).getTemplatePresentation().getText();
@Override
public void commandStarted(CommandEvent event) {
Document affectedDocument = extractDocumentFromCommand(event);
if (isUnderIgnoredAction(null)) return;
if (!worthBothering(affectedDocument, event.getProject())) return;
cutOperationJustHappened = myCutActionName.equals(event.getCommandName());
if (!myDaemonCodeAnalyzer.isRunning()) return;
if (LOG.isDebugEnabled()) {
LOG.debug("cancelling code highlighting by command:" + event.getCommand());
}
stopDaemon(false, "Command start");
}
@Nullable
private Document extractDocumentFromCommand(CommandEvent event) {
Document affectedDocument = event.getDocument();
if (affectedDocument != null) return affectedDocument;
Object id = event.getCommandGroupId();
if (id instanceof Document) {
affectedDocument = (Document)id;
}
else if (id instanceof DocCommandGroupId) {
affectedDocument = ((DocCommandGroupId)id).getDocument();
}
return affectedDocument;
}
@Override
public void commandFinished(CommandEvent event) {
Document affectedDocument = extractDocumentFromCommand(event);
if (isUnderIgnoredAction(null)) return;
if (!worthBothering(affectedDocument, event.getProject())) return;
if (myEscPressed) {
myEscPressed = false;
if (affectedDocument != null) {
// prevent Esc key to leave the document in the not-highlighted state
if (!myDaemonCodeAnalyzer.getFileStatusMap().allDirtyScopesAreNull(affectedDocument)) {
stopDaemon(true, "Command finish");
}
}
}
else if (!myDaemonCodeAnalyzer.isRunning()) {
stopDaemon(true, "Command finish");
}
}
}
private class MyEditorColorsListener implements EditorColorsListener {
@Override
public void globalSchemeChange(EditorColorsScheme scheme) {
TodoConfiguration.getInstance().colorSettingsChanged();
stopDaemonAndRestartAllFiles("Editor color scheme changed");
}
}
private class MyTodoListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (TodoConfiguration.PROP_TODO_PATTERNS.equals(evt.getPropertyName())) {
stopDaemonAndRestartAllFiles("Todo patterns changed");
}
}
}
private class MyProfileChangeListener extends ProfileChangeAdapter {
@Override
public void profileChanged(Profile profile) {
stopDaemonAndRestartAllFiles("Profile changed");
}
@Override
public void profileActivated(Profile oldProfile, Profile profile) {
stopDaemonAndRestartAllFiles("Profile activated");
}
@Override
public void profilesInitialized() {
inspectionProfilesInitialized();
}
}
private TogglePopupHintsPanel myTogglePopupHintsPanel;
private void inspectionProfilesInitialized() {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
if (myProject.isDisposed()) return;
StatusBar statusBar = WindowManager.getInstance().getStatusBar(myProject);
myTogglePopupHintsPanel = new TogglePopupHintsPanel(myProject);
statusBar.addWidget(myTogglePopupHintsPanel, myProject);
updateStatusBar();
stopDaemonAndRestartAllFiles("Inspection profiles activated");
}
});
}
public void updateStatusBar() {
if (myTogglePopupHintsPanel != null) myTogglePopupHintsPanel.updateStatus();
}
private class MyAnActionListener extends AnActionListener.Adapter {
private final AnAction escapeAction = myActionManager.getAction(IdeActions.ACTION_EDITOR_ESCAPE);
@Override
public void beforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event) {
myEscPressed = action == escapeAction;
}
@Override
public void beforeEditorTyping(char c, DataContext dataContext) {
Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
//no need to stop daemon if something happened in the console
if (editor != null && !worthBothering(editor.getDocument(), editor.getProject())) {
return;
}
stopDaemon(true, "Editor typing");
}
}
private static class MyEditorMouseListener extends EditorMouseAdapter {
@NotNull
private final TooltipController myTooltipController;
public MyEditorMouseListener(@NotNull TooltipController tooltipController) {
myTooltipController = tooltipController;
}
@Override
public void mouseExited(EditorMouseEvent e) {
if (!myTooltipController.shouldSurvive(e.getMouseEvent())) {
DaemonTooltipUtil.cancelTooltips();
}
}
}
private class MyEditorMouseMotionListener implements EditorMouseMotionListener {
@Override
public void mouseMoved(EditorMouseEvent e) {
Editor editor = e.getEditor();
if (myProject != editor.getProject()) return;
if (editor.getComponent().getClientProperty(EditorImpl.IGNORE_MOUSE_TRACKING) != null) return;
boolean shown = false;
try {
// There is a possible case that cursor is located at soft wrap-introduced virtual space (that is mapped to offset
// of the document symbol just after soft wrap). We don't want to show any tooltips for it then.
VisualPosition visual = editor.xyToVisualPosition(e.getMouseEvent().getPoint());
if (editor.getSoftWrapModel().isInsideOrBeforeSoftWrap(visual)) {
return;
}
LogicalPosition logical = editor.visualToLogicalPosition(visual);
if (e.getArea() == EditorMouseEventArea.EDITING_AREA && !UIUtil.isControlKeyDown(e.getMouseEvent())) {
int offset = editor.logicalPositionToOffset(logical);
if (editor.offsetToLogicalPosition(offset).column != logical.column) return; // we are in virtual space
HighlightInfo info = myDaemonCodeAnalyzer.findHighlightByOffset(editor.getDocument(), offset, false);
if (info == null || info.getDescription() == null) return;
if (IdeTooltipManager.getInstance().hasCurrent()) return;
DaemonTooltipUtil.showInfoTooltip(info, editor, offset);
shown = true;
}
}
finally {
if (!shown && !myTooltipController.shouldSurvive(e.getMouseEvent())) {
DaemonTooltipUtil.cancelTooltips();
}
}
}
@Override
public void mouseDragged(EditorMouseEvent e) {
myTooltipController.cancelTooltips();
}
}
private void stopDaemon(boolean toRestartAlarm, @NonNls @NotNull String reason) {
myDaemonEventPublisher.daemonCancelEventOccurred(reason);
myDaemonCodeAnalyzer.stopProcess(toRestartAlarm, reason);
}
private void stopDaemonAndRestartAllFiles(@NotNull String reason) {
myDaemonEventPublisher.daemonCancelEventOccurred(reason);
myDaemonCodeAnalyzer.restart();
}
static void repaintErrorStripeRenderer(@NotNull Editor editor, @NotNull Project project) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (!project.isInitialized()) return;
final Document document = editor.getDocument();
final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(document);
final EditorMarkupModel markup = (EditorMarkupModel)editor.getMarkupModel();
markup.setErrorPanelPopupHandler(new DaemonEditorPopup(psiFile));
markup.setErrorStripTooltipRendererProvider(new DaemonTooltipRendererProvider(project));
markup.setMinMarkHeight(DaemonCodeAnalyzerSettings.getInstance().ERROR_STRIPE_MARK_MIN_HEIGHT);
TrafficLightRenderer.setOrRefreshErrorStripeRenderer(markup, project, document, psiFile);
}
}
| apache-2.0 |
integrated/jakarta-slide-server | src/webdav/server/org/apache/slide/webdav/util/resourcekind/BaselineImpl.java | 2727 | /*
* $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/webdav/server/org/apache/slide/webdav/util/resourcekind/BaselineImpl.java,v 1.2 2006-01-22 22:55:21 peter-cvs Exp $
* $Revision: 1.2 $
* $Date: 2006-01-22 22:55:21 $
*
* ====================================================================
*
* Copyright 1999-2002 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.slide.webdav.util.resourcekind;
import java.util.Set;
public class BaselineImpl extends AbstractResourceKind implements Baseline {
protected static ResourceKind singleton = null;
/**
* Factory method.
*/
static public ResourceKind getInstance() {
if( singleton == null )
singleton = new BaselineImpl();
return singleton;
}
/**
* Protected constructor
*/
protected BaselineImpl() {
}
/**
* Get the set properties supported by this resource kind.
* @param filter Q_PROTECTED_ONLY or Q_COMPUTED_ONLY (no filtering if null)
* @param excludedFeatures array of F_* constants (no filtering if null or empty)
* @see org.apache.slide.webdav.util.WebdavConstants
* @see org.apache.slide.webdav.util.DeltavConstants
* @see org.apache.slide.webdav.util.AclConstants
* @see org.apache.slide.webdav.util.DaslConstants
*/
public Set getSupportedLiveProperties( String[] excludedFeatures ) {
Set s = super.getSupportedLiveProperties( excludedFeatures );
if( isSupportedFeature(F_BASELINE, excludedFeatures) ) {
s.add( P_BASELINE_COLLECTION );
s.add( P_SUBBASELINE_SET );
}
return s;
}
/**
* Get the set methods supported by this resource kind.
*/
public Set getSupportedMethods() {
Set s = super.getSupportedMethods();
return s;
}
/**
* Get the set reports supported by this resource kind.
*/
public Set getSupportedReports() {
Set s = super.getSupportedReports();
return s;
}
/**
*
*/
public String toString() {
return "baseline";
}
}
| apache-2.0 |
andrewmains12/hbase | hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java | 22499 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure2.store.wal;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker;
import org.apache.hadoop.hbase.procedure2.util.ByteSlot;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureWALHeader;
/**
* WAL implementation of the ProcedureStore.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class WALProcedureStore implements ProcedureStore {
private static final Log LOG = LogFactory.getLog(WALProcedureStore.class);
public interface LeaseRecovery {
void recoverFileLease(FileSystem fs, Path path) throws IOException;
}
private static final int MAX_RETRIES_BEFORE_ABORT = 3;
private static final String SYNC_WAIT_MSEC_CONF_KEY = "hbase.procedure.store.wal.sync.wait.msec";
private static final int DEFAULT_SYNC_WAIT_MSEC = 100;
private static final String USE_HSYNC_CONF_KEY = "hbase.procedure.store.wal.use.hsync";
private static final boolean DEFAULT_USE_HSYNC = true;
private static final String ROLL_THRESHOLD_CONF_KEY = "hbase.procedure.store.wal.roll.threshold";
private static final long DEFAULT_ROLL_THRESHOLD = 32 * 1024 * 1024; // 32M
private final CopyOnWriteArrayList<ProcedureStoreListener> listeners =
new CopyOnWriteArrayList<ProcedureStoreListener>();
private final LinkedList<ProcedureWALFile> logs = new LinkedList<ProcedureWALFile>();
private final ProcedureStoreTracker storeTracker = new ProcedureStoreTracker();
private final AtomicBoolean running = new AtomicBoolean(false);
private final ReentrantLock lock = new ReentrantLock();
private final Condition waitCond = lock.newCondition();
private final Condition slotCond = lock.newCondition();
private final Condition syncCond = lock.newCondition();
private final LeaseRecovery leaseRecovery;
private final Configuration conf;
private final FileSystem fs;
private final Path logDir;
private AtomicBoolean inSync = new AtomicBoolean(false);
private LinkedTransferQueue<ByteSlot> slotsCache = null;
private Set<ProcedureWALFile> corruptedLogs = null;
private AtomicLong totalSynced = new AtomicLong(0);
private FSDataOutputStream stream = null;
private long lastRollTs = 0;
private long flushLogId = 0;
private int slotIndex = 0;
private Thread syncThread;
private ByteSlot[] slots;
private long rollThreshold;
private boolean useHsync;
private int syncWaitMsec;
public WALProcedureStore(final Configuration conf, final FileSystem fs, final Path logDir,
final LeaseRecovery leaseRecovery) {
this.fs = fs;
this.conf = conf;
this.logDir = logDir;
this.leaseRecovery = leaseRecovery;
}
@Override
public void start(int numSlots) throws IOException {
if (running.getAndSet(true)) {
return;
}
// Init buffer slots
slots = new ByteSlot[numSlots];
slotsCache = new LinkedTransferQueue();
while (slotsCache.size() < numSlots) {
slotsCache.offer(new ByteSlot());
}
// Tunings
rollThreshold = conf.getLong(ROLL_THRESHOLD_CONF_KEY, DEFAULT_ROLL_THRESHOLD);
syncWaitMsec = conf.getInt(SYNC_WAIT_MSEC_CONF_KEY, DEFAULT_SYNC_WAIT_MSEC);
useHsync = conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
// Init sync thread
syncThread = new Thread("WALProcedureStoreSyncThread") {
@Override
public void run() {
while (running.get()) {
try {
syncLoop();
} catch (IOException e) {
LOG.error("got an exception from the sync-loop", e);
sendAbortProcessSignal();
}
}
}
};
syncThread.start();
}
@Override
public void stop(boolean abort) {
if (!running.getAndSet(false)) {
return;
}
LOG.info("Stopping the WAL Procedure Store");
if (lock.tryLock()) {
try {
waitCond.signalAll();
} finally {
lock.unlock();
}
}
if (!abort) {
try {
syncThread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
// Close the writer
closeStream();
// Close the old logs
// they should be already closed, this is just in case the load fails
// and we call start() and then stop()
for (ProcedureWALFile log: logs) {
log.close();
}
logs.clear();
}
@Override
public boolean isRunning() {
return running.get();
}
@Override
public int getNumThreads() {
return slots == null ? 0 : slots.length;
}
public ProcedureStoreTracker getStoreTracker() {
return storeTracker;
}
@Override
public void registerListener(ProcedureStoreListener listener) {
this.listeners.add(listener);
}
@Override
public boolean unregisterListener(ProcedureStoreListener listener) {
return this.listeners.remove(listener);
}
@Override
public void recoverLease() throws IOException {
LOG.info("Starting WAL Procedure Store lease recovery");
FileStatus[] oldLogs = getLogFiles();
while (running.get()) {
// Get Log-MaxID and recover lease on old logs
flushLogId = initOldLogs(oldLogs) + 1;
// Create new state-log
if (!rollWriter(flushLogId)) {
// someone else has already created this log
LOG.debug("someone else has already created log " + flushLogId);
continue;
}
// We have the lease on the log
oldLogs = getLogFiles();
if (getMaxLogId(oldLogs) > flushLogId) {
// Someone else created new logs
LOG.debug("someone else created new logs. expected maxLogId < " + flushLogId);
logs.getLast().removeFile();
continue;
}
LOG.info("lease acquired flushLogId=" + flushLogId);
break;
}
}
@Override
public Iterator<Procedure> load() throws IOException {
if (logs.isEmpty()) {
throw new RuntimeException("recoverLease() must be called before loading data");
}
// Nothing to do, If we have only the current log.
if (logs.size() == 1) {
LOG.debug("No state logs to replay");
return null;
}
// Load the old logs
final ArrayList<ProcedureWALFile> toRemove = new ArrayList<ProcedureWALFile>();
Iterator<ProcedureWALFile> it = logs.descendingIterator();
it.next(); // Skip the current log
try {
return ProcedureWALFormat.load(it, storeTracker, new ProcedureWALFormat.Loader() {
@Override
public void removeLog(ProcedureWALFile log) {
toRemove.add(log);
}
@Override
public void markCorruptedWAL(ProcedureWALFile log, IOException e) {
if (corruptedLogs == null) {
corruptedLogs = new HashSet<ProcedureWALFile>();
}
corruptedLogs.add(log);
// TODO: sideline corrupted log
}
});
} finally {
if (!toRemove.isEmpty()) {
for (ProcedureWALFile log: toRemove) {
removeLogFile(log);
}
}
}
}
@Override
public void insert(final Procedure proc, final Procedure[] subprocs) {
if (LOG.isTraceEnabled()) {
LOG.trace("insert " + proc + " subproc=" + Arrays.toString(subprocs));
}
ByteSlot slot = acquireSlot();
long logId = -1;
try {
// Serialize the insert
if (subprocs != null) {
ProcedureWALFormat.writeInsert(slot, proc, subprocs);
} else {
assert !proc.hasParent();
ProcedureWALFormat.writeInsert(slot, proc);
}
// Push the transaction data and wait until it is persisted
logId = pushData(slot);
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
LOG.fatal("Unable to serialize one of the procedure: proc=" + proc +
" subprocs=" + Arrays.toString(subprocs), e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
}
// Update the store tracker
synchronized (storeTracker) {
storeTracker.insert(proc, subprocs);
}
}
@Override
public void update(final Procedure proc) {
if (LOG.isTraceEnabled()) {
LOG.trace("update " + proc);
}
ByteSlot slot = acquireSlot();
long logId = -1;
try {
// Serialize the update
ProcedureWALFormat.writeUpdate(slot, proc);
// Push the transaction data and wait until it is persisted
logId = pushData(slot);
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
LOG.fatal("Unable to serialize the procedure: " + proc, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
}
// Update the store tracker
boolean removeOldLogs = false;
synchronized (storeTracker) {
storeTracker.update(proc);
if (logId == flushLogId) {
removeOldLogs = storeTracker.isUpdated();
}
}
if (removeOldLogs) {
removeAllLogs(logId - 1);
}
}
@Override
public void delete(final long procId) {
if (LOG.isTraceEnabled()) {
LOG.trace("delete " + procId);
}
ByteSlot slot = acquireSlot();
long logId = -1;
try {
// Serialize the delete
ProcedureWALFormat.writeDelete(slot, procId);
// Push the transaction data and wait until it is persisted
logId = pushData(slot);
} catch (IOException e) {
// We are not able to serialize the procedure.
// this is a code error, and we are not able to go on.
LOG.fatal("Unable to serialize the procedure: " + procId, e);
throw new RuntimeException(e);
} finally {
releaseSlot(slot);
}
boolean removeOldLogs = false;
synchronized (storeTracker) {
storeTracker.delete(procId);
if (logId == flushLogId) {
if (storeTracker.isEmpty() && totalSynced.get() > rollThreshold) {
removeOldLogs = rollWriterOrDie(logId + 1);
}
}
}
if (removeOldLogs) {
removeAllLogs(logId);
}
}
private ByteSlot acquireSlot() {
ByteSlot slot = slotsCache.poll();
return slot != null ? slot : new ByteSlot();
}
private void releaseSlot(final ByteSlot slot) {
slot.reset();
slotsCache.offer(slot);
}
private long pushData(final ByteSlot slot) {
assert isRunning() && !logs.isEmpty() : "recoverLease() must be called before inserting data";
long logId = -1;
lock.lock();
try {
// Wait for the sync to be completed
while (true) {
if (inSync.get()) {
syncCond.await();
} else if (slotIndex == slots.length) {
slotCond.signal();
syncCond.await();
} else {
break;
}
}
slots[slotIndex++] = slot;
logId = flushLogId;
// Notify that there is new data
if (slotIndex == 1) {
waitCond.signal();
}
// Notify that the slots are full
if (slotIndex == slots.length) {
waitCond.signal();
slotCond.signal();
}
syncCond.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
sendAbortProcessSignal();
} finally {
lock.unlock();
}
return logId;
}
private void syncLoop() throws IOException {
inSync.set(false);
while (running.get()) {
lock.lock();
try {
// Wait until new data is available
if (slotIndex == 0) {
if (LOG.isTraceEnabled()) {
float rollTsSec = (System.currentTimeMillis() - lastRollTs) / 1000.0f;
LOG.trace(String.format("Waiting for data. flushed=%s (%s/sec)",
StringUtils.humanSize(totalSynced.get()),
StringUtils.humanSize(totalSynced.get() / rollTsSec)));
}
waitCond.await();
if (slotIndex == 0) {
// no data.. probably a stop()
continue;
}
}
// Wait SYNC_WAIT_MSEC or the signal of "slots full" before flushing
long syncWaitSt = System.currentTimeMillis();
if (slotIndex != slots.length) {
slotCond.await(syncWaitMsec, TimeUnit.MILLISECONDS);
}
long syncWaitMs = System.currentTimeMillis() - syncWaitSt;
if (LOG.isTraceEnabled() && (syncWaitMs > 10 || slotIndex < slots.length)) {
float rollSec = (System.currentTimeMillis() - lastRollTs) / 1000.0f;
LOG.trace("sync wait " + StringUtils.humanTimeDiff(syncWaitMs) +
" slotIndex=" + slotIndex +
" totalSynced=" + StringUtils.humanSize(totalSynced.get()) +
" " + StringUtils.humanSize(totalSynced.get() / rollSec) + "/sec");
}
inSync.set(true);
totalSynced.addAndGet(syncSlots());
slotIndex = 0;
inSync.set(false);
syncCond.signalAll();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
sendAbortProcessSignal();
} finally {
lock.unlock();
}
}
}
private long syncSlots() {
int retry = 0;
long totalSynced = 0;
do {
try {
totalSynced = syncSlots(stream, slots, 0, slotIndex);
break;
} catch (Throwable e) {
if (++retry == MAX_RETRIES_BEFORE_ABORT) {
LOG.error("sync slot failed, abort.", e);
sendAbortProcessSignal();
}
}
} while (running.get());
return totalSynced;
}
protected long syncSlots(FSDataOutputStream stream, ByteSlot[] slots, int offset, int count)
throws IOException {
long totalSynced = 0;
for (int i = 0; i < count; ++i) {
ByteSlot data = slots[offset + i];
data.writeTo(stream);
totalSynced += data.size();
}
if (useHsync) {
stream.hsync();
} else {
stream.hflush();
}
if (LOG.isTraceEnabled()) {
LOG.trace("Sync slots=" + count + '/' + slots.length +
" flushed=" + StringUtils.humanSize(totalSynced));
}
return totalSynced;
}
private void sendAbortProcessSignal() {
if (!this.listeners.isEmpty()) {
for (ProcedureStoreListener listener : this.listeners) {
listener.abortProcess();
}
}
}
private boolean rollWriterOrDie(final long logId) {
try {
return rollWriter(logId);
} catch (IOException e) {
LOG.warn("Unable to roll the log", e);
sendAbortProcessSignal();
return false;
}
}
private boolean rollWriter(final long logId) throws IOException {
ProcedureWALHeader header = ProcedureWALHeader.newBuilder()
.setVersion(ProcedureWALFormat.HEADER_VERSION)
.setType(ProcedureWALFormat.LOG_TYPE_STREAM)
.setMinProcId(storeTracker.getMinProcId())
.setLogId(logId)
.build();
FSDataOutputStream newStream = null;
Path newLogFile = null;
long startPos = -1;
try {
newLogFile = getLogFilePath(logId);
newStream = fs.create(newLogFile, false);
ProcedureWALFormat.writeHeader(newStream, header);
startPos = newStream.getPos();
} catch (FileAlreadyExistsException e) {
LOG.error("Log file with id=" + logId + " already exists", e);
return false;
}
lock.lock();
try {
closeStream();
synchronized (storeTracker) {
storeTracker.resetUpdates();
}
stream = newStream;
flushLogId = logId;
totalSynced.set(0);
lastRollTs = System.currentTimeMillis();
logs.add(new ProcedureWALFile(fs, newLogFile, header, startPos));
} finally {
lock.unlock();
}
LOG.info("Roll new state log: " + logId);
return true;
}
private void closeStream() {
try {
if (stream != null) {
try {
ProcedureWALFormat.writeTrailer(stream, storeTracker);
} catch (IOException e) {
LOG.warn("Unable to write the trailer: " + e.getMessage());
}
stream.close();
}
} catch (IOException e) {
LOG.error("Unable to close the stream", e);
} finally {
stream = null;
}
}
private void removeAllLogs(long lastLogId) {
LOG.info("Remove all state logs with ID less then " + lastLogId);
while (!logs.isEmpty()) {
ProcedureWALFile log = logs.getFirst();
if (lastLogId < log.getLogId()) {
break;
}
removeLogFile(log);
}
}
private boolean removeLogFile(final ProcedureWALFile log) {
try {
LOG.debug("remove log: " + log);
log.removeFile();
logs.remove(log);
} catch (IOException e) {
LOG.error("unable to remove log " + log, e);
return false;
}
return true;
}
public Set<ProcedureWALFile> getCorruptedLogs() {
return corruptedLogs;
}
// ==========================================================================
// FileSystem Log Files helpers
// ==========================================================================
public Path getLogDir() {
return this.logDir;
}
public FileSystem getFileSystem() {
return this.fs;
}
protected Path getLogFilePath(final long logId) throws IOException {
return new Path(logDir, String.format("state-%020d.log", logId));
}
private static long getLogIdFromName(final String name) {
int end = name.lastIndexOf(".log");
int start = name.lastIndexOf('-') + 1;
while (start < end) {
if (name.charAt(start) != '0')
break;
start++;
}
return Long.parseLong(name.substring(start, end));
}
private FileStatus[] getLogFiles() throws IOException {
try {
return fs.listStatus(logDir, new PathFilter() {
@Override
public boolean accept(Path path) {
String name = path.getName();
return name.startsWith("state-") && name.endsWith(".log");
}
});
} catch (FileNotFoundException e) {
LOG.warn("log directory not found: " + e.getMessage());
return null;
}
}
private long getMaxLogId(final FileStatus[] logFiles) {
long maxLogId = 0;
if (logFiles != null && logFiles.length > 0) {
for (int i = 0; i < logFiles.length; ++i) {
maxLogId = Math.max(maxLogId, getLogIdFromName(logFiles[i].getPath().getName()));
}
}
return maxLogId;
}
/**
* @return Max-LogID of the specified log file set
*/
private long initOldLogs(final FileStatus[] logFiles) throws IOException {
this.logs.clear();
long maxLogId = 0;
if (logFiles != null && logFiles.length > 0) {
for (int i = 0; i < logFiles.length; ++i) {
final Path logPath = logFiles[i].getPath();
leaseRecovery.recoverFileLease(fs, logPath);
maxLogId = Math.max(maxLogId, getLogIdFromName(logPath.getName()));
ProcedureWALFile log = initOldLog(logFiles[i]);
if (log != null) {
this.logs.add(log);
}
}
Collections.sort(this.logs);
initTrackerFromOldLogs();
}
return maxLogId;
}
private void initTrackerFromOldLogs() {
// TODO: Load the most recent tracker available
if (!logs.isEmpty()) {
ProcedureWALFile log = logs.getLast();
try {
log.readTracker(storeTracker);
} catch (IOException e) {
LOG.warn("Unable to read tracker for " + log + " - " + e.getMessage());
// try the next one...
storeTracker.clear();
storeTracker.setPartialFlag(true);
}
}
}
private ProcedureWALFile initOldLog(final FileStatus logFile) throws IOException {
ProcedureWALFile log = new ProcedureWALFile(fs, logFile);
if (logFile.getLen() == 0) {
LOG.warn("Remove uninitialized log " + logFile);
log.removeFile();
return null;
}
LOG.debug("opening state-log: " + logFile);
try {
log.open();
} catch (ProcedureWALFormat.InvalidWALDataException e) {
LOG.warn("Remove uninitialized log " + logFile, e);
log.removeFile();
return null;
} catch (IOException e) {
String msg = "Unable to read state log: " + logFile;
LOG.error(msg, e);
throw new IOException(msg, e);
}
if (log.isCompacted()) {
try {
log.readTrailer();
} catch (IOException e) {
// unfinished compacted log throw it away
LOG.warn("Unfinished compacted log " + logFile, e);
log.removeFile();
return null;
}
}
return log;
}
}
| apache-2.0 |
tyagihas/DataflowJavaSDK | sdk/src/main/java/com/google/cloud/dataflow/sdk/runners/worker/WindmillSink.java | 7465 | /*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import static com.google.cloud.dataflow.sdk.util.Structs.getString;
import static com.google.cloud.dataflow.sdk.util.ValueWithRecordId.ValueWithRecordIdCoder;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.KvCoder;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.runners.worker.windmill.Windmill;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.PaneInfo;
import com.google.cloud.dataflow.sdk.transforms.windowing.PaneInfo.PaneInfoCoder;
import com.google.cloud.dataflow.sdk.util.CloudObject;
import com.google.cloud.dataflow.sdk.util.ExecutionContext;
import com.google.cloud.dataflow.sdk.util.ValueWithRecordId;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.WindowedValue.FullWindowedValueCoder;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.worker.Sink;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
class WindmillSink<T> extends Sink<WindowedValue<T>> {
private WindmillStreamWriter writer;
private final Coder<T> valueCoder;
private final Coder<Collection<? extends BoundedWindow>> windowsCoder;
private StreamingModeExecutionContext context;
WindmillSink(String destinationName,
Coder<WindowedValue<T>> coder,
StreamingModeExecutionContext context) {
this.writer = new WindmillStreamWriter(destinationName);
FullWindowedValueCoder<T> inputCoder = (FullWindowedValueCoder<T>) coder;
this.valueCoder = inputCoder.getValueCoder();
this.windowsCoder = inputCoder.getWindowsCoder();
this.context = context;
}
public static ByteString encodeMetadata(
Coder<Collection<? extends BoundedWindow>> windowsCoder,
Collection<? extends BoundedWindow> windows,
PaneInfo pane) throws IOException {
ByteString.Output stream = ByteString.newOutput();
PaneInfoCoder.INSTANCE.encode(pane, stream, Coder.Context.NESTED);
windowsCoder.encode(windows, stream, Coder.Context.OUTER);
return stream.toByteString();
}
public static PaneInfo decodeMetadataPane(ByteString metadata) throws IOException {
InputStream inStream = metadata.newInput();
return PaneInfoCoder.INSTANCE.decode(inStream, Coder.Context.NESTED);
}
public static Collection<? extends BoundedWindow> decodeMetadataWindows(
Coder<Collection<? extends BoundedWindow>> windowsCoder,
ByteString metadata) throws IOException {
InputStream inStream = metadata.newInput();
PaneInfoCoder.INSTANCE.decode(inStream, Coder.Context.NESTED);
return windowsCoder.decode(inStream, Coder.Context.OUTER);
}
public static <T> WindmillSink<T> create(PipelineOptions options,
CloudObject spec,
Coder<WindowedValue<T>> coder,
ExecutionContext context,
CounterSet.AddCounterMutator addCounterMutator)
throws Exception {
return new WindmillSink<>(getString(spec, "stream_id"), coder,
(StreamingModeExecutionContext) context);
}
@Override
public SinkWriter<WindowedValue<T>> writer() {
return writer;
}
class WindmillStreamWriter implements SinkWriter<WindowedValue<T>> {
private Map<ByteString, Windmill.KeyedMessageBundle.Builder> productionMap;
private final String destinationName;
private WindmillStreamWriter(String destinationName) {
this.destinationName = destinationName;
productionMap = new HashMap<ByteString, Windmill.KeyedMessageBundle.Builder>();
}
private <T> ByteString encode(Coder<T> coder, T object) throws IOException {
ByteString.Output stream = ByteString.newOutput();
coder.encode(object, stream, Coder.Context.OUTER);
return stream.toByteString();
}
@Override
public long add(WindowedValue<T> data) throws IOException {
ByteString key, value;
ByteString id = ByteString.EMPTY;
ByteString metadata = encodeMetadata(windowsCoder, data.getWindows(), data.getPane());
if (valueCoder instanceof KvCoder) {
KvCoder kvCoder = (KvCoder) valueCoder;
KV kv = (KV) data.getValue();
key = encode(kvCoder.getKeyCoder(), kv.getKey());
Coder valueCoder = kvCoder.getValueCoder();
// If ids are explicitly provided, use that instead of the windmill-generated id.
// This is used when reading an UnboundedSource to deduplicate records.
if (valueCoder instanceof ValueWithRecordIdCoder) {
ValueWithRecordId valueAndId = (ValueWithRecordId) kv.getValue();
value =
encode(((ValueWithRecordIdCoder) valueCoder).getValueCoder(), valueAndId.getValue());
id = ByteString.copyFrom(valueAndId.getId());
} else {
value = encode(valueCoder, kv.getValue());
}
} else {
key = context.getSerializedKey();
value = encode(valueCoder, data.getValue());
}
Windmill.KeyedMessageBundle.Builder keyedOutput = productionMap.get(key);
if (keyedOutput == null) {
keyedOutput = Windmill.KeyedMessageBundle.newBuilder().setKey(key);
productionMap.put(key, keyedOutput);
}
long timestampMicros = TimeUnit.MILLISECONDS.toMicros(data.getTimestamp().getMillis());
Windmill.Message.Builder builder = Windmill.Message.newBuilder()
.setTimestamp(timestampMicros)
.setData(value)
.setMetadata(metadata);
keyedOutput.addMessages(builder.build());
keyedOutput.addMessagesIds(id);
return key.size() + value.size() + metadata.size() + id.size();
}
@Override
public void close() throws IOException {
Windmill.OutputMessageBundle.Builder outputBuilder =
Windmill.OutputMessageBundle.newBuilder().setDestinationStreamId(destinationName);
for (Windmill.KeyedMessageBundle.Builder keyedOutput : productionMap.values()) {
outputBuilder.addBundles(keyedOutput.build());
}
if (outputBuilder.getBundlesCount() > 0) {
context.getOutputBuilder().addOutputMessages(outputBuilder.build());
}
productionMap.clear();
}
}
@Override
public boolean supportsRestart() {
return true;
}
}
| apache-2.0 |
thr0w/dyuproject | modules/web/src/main/java/com/dyuproject/web/CookieSession.java | 3324 | //========================================================================
//Copyright 2007-2008 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package com.dyuproject.web;
import java.io.Serializable;
import java.util.Map;
import java.util.Set;
import org.mortbay.util.ajax.JSON;
import org.mortbay.util.ajax.JSON.Output;
/**
* Session attribues being stored on the client cookie.
*
* @author David Yu
* @created May 19, 2008
*/
public final class CookieSession implements Serializable, JSON.Convertible
{
private static final long serialVersionUID = 2009100634L;
public static final String ATTR_NAME = "cs";
private Map<String,Object> _attributes;
private long _timeUpdated = 0;
private String _ip;
private transient boolean _persisted = false;
public CookieSession()
{
}
CookieSession(Map<String,Object> attributes)
{
_attributes = attributes;
}
/**
*
* @param name
* @param value can be any object/pojo.
*/
public void setAttribute(String name, Object value)
{
_attributes.put(name, value);
}
public Object getAttribute(String name)
{
return _attributes.get(name);
}
public Set<String> getAttributeNames()
{
return _attributes.keySet();
}
public Map<String,Object> getAttributes()
{
return _attributes;
}
public boolean removeAttribute(String name)
{
return _attributes.remove(name)!=null;
}
public Map<String,Object> getAttrs()
{
return _attributes;
}
public long getTimeUpdated()
{
return _timeUpdated;
}
void markPersisted()
{
_persisted = true;
_timeUpdated = System.currentTimeMillis();
}
public boolean isPersisted()
{
return _persisted;
}
public String getIP()
{
return _ip;
}
void setIP(String ip)
{
_ip = ip;
}
@SuppressWarnings("unchecked")
public void fromJSON(Map map)
{
_attributes = (Map<String,Object>)map.get("a");
_timeUpdated = ((Number)map.get("u")).longValue();
_ip = (String)map.get("i");
}
public void toJSON(Output out)
{
//out.addClass(getClass());
if(_attributes!=null)
out.add("a", _attributes);
out.add("u", _timeUpdated);
if(_ip!=null)
out.add("i", _ip);
}
}
| apache-2.0 |
DomenicPuzio/incubator-metron | metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/GrokParser.java | 8151 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.parsers;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import oi.thekraken.grok.api.Grok;
import oi.thekraken.grok.api.Match;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.metron.common.Constants;
import org.apache.metron.parsers.interfaces.MessageParser;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
public class GrokParser implements MessageParser<JSONObject>, Serializable {
protected static final Logger LOG = LoggerFactory.getLogger(GrokParser.class);
protected transient Grok grok;
protected String grokPath;
protected String patternLabel;
protected List<String> timeFields = new ArrayList<>();
protected String timestampField;
protected SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S z");
protected String patternsCommonDir = "/patterns/common";
@Override
public void configure(Map<String, Object> parserConfig) {
this.grokPath = (String) parserConfig.get("grokPath");
this.patternLabel = (String) parserConfig.get("patternLabel");
this.timestampField = (String) parserConfig.get("timestampField");
List<String> timeFieldsParam = (List<String>) parserConfig.get("timeFields");
if (timeFieldsParam != null) {
this.timeFields = timeFieldsParam;
}
String dateFormatParam = (String) parserConfig.get("dateFormat");
if (dateFormatParam != null) {
this.dateFormat = new SimpleDateFormat(dateFormatParam);
}
String timeZoneParam = (String) parserConfig.get("timeZone");
if (timeZoneParam != null) {
dateFormat.setTimeZone(TimeZone.getTimeZone(timeZoneParam));
LOG.debug("Grok Parser using provided TimeZone: {}", timeZoneParam);
} else {
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
LOG.debug("Grok Parser using default TimeZone (UTC)");
}
}
public InputStream openInputStream(String streamName) throws IOException {
FileSystem fs = FileSystem.get(new Configuration());
Path path = new Path(streamName);
if(fs.exists(path)) {
return fs.open(path);
} else {
return getClass().getResourceAsStream(streamName);
}
}
@Override
public void init() {
grok = new Grok();
try {
InputStream commonInputStream = openInputStream(patternsCommonDir);
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser loading common patterns from: " + patternsCommonDir);
}
if (commonInputStream == null) {
throw new RuntimeException(
"Unable to initialize grok parser: Unable to load " + patternsCommonDir + " from either classpath or HDFS");
}
grok.addPatternFromReader(new InputStreamReader(commonInputStream));
if (LOG.isDebugEnabled()) {
LOG.debug("Loading parser-specific patterns from: " + grokPath);
}
InputStream patterInputStream = openInputStream(grokPath);
if (patterInputStream == null) {
throw new RuntimeException("Grok parser unable to initialize grok parser: Unable to load " + grokPath
+ " from either classpath or HDFS");
}
grok.addPatternFromReader(new InputStreamReader(patterInputStream));
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser set the following grok expression: " + grok.getNamedRegexCollectionById(patternLabel));
}
String grokPattern = "%{" + patternLabel + "}";
grok.compile(grokPattern);
if (LOG.isDebugEnabled()) {
LOG.debug("Compiled grok pattern" + grokPattern);
}
} catch (Throwable e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException("Grok parser Error: " + e.getMessage(), e);
}
}
@SuppressWarnings("unchecked")
@Override
public List<JSONObject> parse(byte[] rawMessage) {
if (grok == null) {
init();
}
List<JSONObject> messages = new ArrayList<>();
String originalMessage = null;
try {
originalMessage = new String(rawMessage, "UTF-8");
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser parsing message: " + originalMessage);
}
Match gm = grok.match(originalMessage);
gm.captures();
JSONObject message = new JSONObject();
message.putAll(gm.toMap());
if (message.size() == 0)
throw new RuntimeException("Grok statement produced a null message. Original message was: "
+ originalMessage + " and the parsed message was: " + message + " . Check the pattern at: "
+ grokPath);
message.put("original_string", originalMessage);
for (String timeField : timeFields) {
String fieldValue = (String) message.get(timeField);
if (fieldValue != null) {
message.put(timeField, toEpoch(fieldValue));
}
}
if (timestampField != null) {
message.put(Constants.Fields.TIMESTAMP.getName(), formatTimestamp(message.get(timestampField)));
}
message.remove(patternLabel);
postParse(message);
messages.add(message);
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser parsed message: " + message);
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new IllegalStateException("Grok parser Error: " + e.getMessage() + " on " + originalMessage , e);
}
return messages;
}
@Override
public boolean validate(JSONObject message) {
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser validating message: " + message);
}
Object timestampObject = message.get(Constants.Fields.TIMESTAMP.getName());
if (timestampObject instanceof Long) {
Long timestamp = (Long) timestampObject;
if (timestamp > 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser validated message: " + message);
}
return true;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser did not validate message: " + message);
}
return false;
}
protected void postParse(JSONObject message) {}
protected long toEpoch(String datetime) throws ParseException {
LOG.debug("Grok parser converting timestamp to epoch: {}", datetime);
LOG.debug("Grok parser's DateFormat has TimeZone: {}", dateFormat.getTimeZone());
Date date = dateFormat.parse(datetime);
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser converted timestamp to epoch: " + date);
}
return date.getTime();
}
protected long formatTimestamp(Object value) {
if (LOG.isDebugEnabled()) {
LOG.debug("Grok parser formatting timestamp" + value);
}
if (value == null) {
throw new RuntimeException(patternLabel + " pattern does not include field " + timestampField);
}
if (value instanceof Number) {
return ((Number) value).longValue();
} else {
return Long.parseLong(Joiner.on("").join(Splitter.on('.').split(value + "")));
}
}
}
| apache-2.0 |
mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/bsntlv/OFBsnTlvUdpDst.java | 1817 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_interface.java
// Do not modify
package org.projectfloodlight.openflow.protocol.bsntlv;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import io.netty.buffer.ByteBuf;
public interface OFBsnTlvUdpDst extends OFObject, OFBsnTlv {
int getType();
int getValue();
OFVersion getVersion();
void writeTo(ByteBuf channelBuffer);
Builder createBuilder();
public interface Builder extends OFBsnTlv.Builder {
OFBsnTlvUdpDst build();
int getType();
int getValue();
Builder setValue(int value);
OFVersion getVersion();
}
}
| apache-2.0 |
massakam/pulsar | pulsar-functions/instance/src/main/java/org/apache/pulsar/functions/instance/stats/SinkStatsManager.java | 14279 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.instance.stats;
import com.google.common.collect.EvictingQueue;
import io.prometheus.client.Counter;
import io.prometheus.client.Gauge;
import java.util.Arrays;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import lombok.Getter;
import org.apache.pulsar.common.util.RateLimiter;
import org.apache.pulsar.functions.proto.InstanceCommunication;
public class SinkStatsManager extends ComponentStatsManager {
public static final String PULSAR_SINK_METRICS_PREFIX = "pulsar_sink_";
/** Declare metric names. **/
public static final String SYSTEM_EXCEPTIONS_TOTAL = "system_exceptions_total";
public static final String SINK_EXCEPTIONS_TOTAL = "sink_exceptions_total";
public static final String LAST_INVOCATION = "last_invocation";
public static final String RECEIVED_TOTAL = "received_total";
public static final String WRITTEN_TOTAL = "written_total";
public static final String SYSTEM_EXCEPTIONS_TOTAL_1min = "system_exceptions_total_1min";
public static final String SINK_EXCEPTIONS_TOTAL_1min = "sink_exceptions_total_1min";
public static final String RECEIVED_TOTAL_1min = "received_total_1min";
public static final String WRITTEN_TOTAL_1min = "written_total_1min";
/** Declare Prometheus stats. **/
private final Counter statTotalRecordsReceived;
private final Counter statTotalSysExceptions;
private final Counter statTotalSinkExceptions;
private final Counter statTotalWritten;
private final Gauge statlastInvocation;
// windowed metrics
private final Counter statTotalRecordsReceived1min;
private final Counter statTotalSysExceptions1min;
private final Counter statTotalSinkExceptions1min;
private final Counter statTotalWritten1min;
// exceptions
final Gauge sysExceptions;
final Gauge sinkExceptions;
// As an optimization
private final Counter.Child statTotalRecordsReceivedChild;
private final Counter.Child statTotalSysExceptionsChild;
private final Counter.Child statTotalSinkExceptionsChild;
private final Counter.Child statTotalWrittenChild;
private final Gauge.Child statlastInvocationChild;
private Counter.Child statTotalRecordsReceivedChild1min;
private Counter.Child statTotalSysExceptions1minChild;
private Counter.Child statTotalSinkExceptionsChild1min;
private Counter.Child statTotalWrittenChild1min;
@Getter
private EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> latestSystemExceptions =
EvictingQueue.create(10);
@Getter
private EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> latestSinkExceptions =
EvictingQueue.create(10);
private final RateLimiter sysExceptionRateLimiter;
private final RateLimiter sinkExceptionRateLimiter;
public SinkStatsManager(FunctionCollectorRegistry collectorRegistry, String[] metricsLabels,
ScheduledExecutorService scheduledExecutorService) {
super(collectorRegistry, metricsLabels, scheduledExecutorService);
statTotalRecordsReceived = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + RECEIVED_TOTAL,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + RECEIVED_TOTAL)
.help("Total number of records sink has received from Pulsar topic(s).")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalRecordsReceivedChild = statTotalRecordsReceived.labels(metricsLabels);
statTotalSysExceptions = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + SYSTEM_EXCEPTIONS_TOTAL,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + SYSTEM_EXCEPTIONS_TOTAL)
.help("Total number of system exceptions.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalSysExceptionsChild = statTotalSysExceptions.labels(metricsLabels);
statTotalSinkExceptions = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + SINK_EXCEPTIONS_TOTAL,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + SINK_EXCEPTIONS_TOTAL)
.help("Total number of sink exceptions.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalSinkExceptionsChild = statTotalSinkExceptions.labels(metricsLabels);
statTotalWritten = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + WRITTEN_TOTAL,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + WRITTEN_TOTAL)
.help("Total number of records processed by sink.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalWrittenChild = statTotalWritten.labels(metricsLabels);
statlastInvocation = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + LAST_INVOCATION,
Gauge.build()
.name(PULSAR_SINK_METRICS_PREFIX + LAST_INVOCATION)
.help("The timestamp of the last invocation of the sink.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statlastInvocationChild = statlastInvocation.labels(metricsLabels);
statTotalRecordsReceived1min = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + RECEIVED_TOTAL_1min,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + RECEIVED_TOTAL_1min)
.help("Total number of messages sink has received from Pulsar topic(s) in the last 1 minute.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalRecordsReceivedChild1min = statTotalRecordsReceived1min.labels(metricsLabels);
statTotalSysExceptions1min = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + SYSTEM_EXCEPTIONS_TOTAL_1min,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + SYSTEM_EXCEPTIONS_TOTAL_1min)
.help("Total number of system exceptions in the last 1 minute.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalSysExceptions1minChild = statTotalSysExceptions1min.labels(metricsLabels);
statTotalSinkExceptions1min = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + SINK_EXCEPTIONS_TOTAL_1min,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + SINK_EXCEPTIONS_TOTAL_1min)
.help("Total number of sink exceptions in the last 1 minute.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalSinkExceptionsChild1min = statTotalSinkExceptions1min.labels(metricsLabels);
statTotalWritten1min = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + WRITTEN_TOTAL_1min,
Counter.build()
.name(PULSAR_SINK_METRICS_PREFIX + WRITTEN_TOTAL_1min)
.help("Total number of records processed by sink the last 1 minute.")
.labelNames(METRICS_LABEL_NAMES)
.create());
statTotalWrittenChild1min = statTotalWritten1min.labels(metricsLabels);
sysExceptions = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + "system_exception",
Gauge.build()
.name(PULSAR_SINK_METRICS_PREFIX + "system_exception")
.labelNames(EXCEPTION_METRICS_LABEL_NAMES)
.help("Exception from system code.")
.create());
sinkExceptions = collectorRegistry.registerIfNotExist(
PULSAR_SINK_METRICS_PREFIX + "sink_exception",
Gauge.build()
.name(PULSAR_SINK_METRICS_PREFIX + "sink_exception")
.labelNames(EXCEPTION_METRICS_LABEL_NAMES)
.help("Exception from sink.")
.create());
sysExceptionRateLimiter = RateLimiter.builder()
.scheduledExecutorService(scheduledExecutorService)
.permits(5)
.rateTime(1)
.timeUnit(TimeUnit.MINUTES)
.build();
sinkExceptionRateLimiter = RateLimiter.builder()
.scheduledExecutorService(scheduledExecutorService)
.permits(5)
.rateTime(1)
.timeUnit(TimeUnit.MINUTES)
.build();
}
@Override
public void reset() {
statTotalRecordsReceived1min.clear();
statTotalRecordsReceivedChild1min = statTotalRecordsReceived1min.labels(metricsLabels);
statTotalSysExceptions1min.clear();
statTotalSysExceptions1minChild = statTotalSysExceptions1min.labels(metricsLabels);
statTotalSinkExceptions1min.clear();
statTotalSinkExceptionsChild1min = statTotalSinkExceptions1min.labels(metricsLabels);
statTotalWritten1min.clear();
statTotalWrittenChild1min = statTotalWritten1min.labels(metricsLabels);
}
@Override
public void incrTotalReceived() {
statTotalRecordsReceivedChild.inc();
statTotalRecordsReceivedChild1min.inc();
}
@Override
public void incrTotalProcessedSuccessfully() {
statTotalWrittenChild.inc();
statTotalWrittenChild1min.inc();
}
@Override
public void incrSysExceptions(Throwable ex) {
statTotalSysExceptionsChild.inc();
statTotalSysExceptions1minChild.inc();
long ts = System.currentTimeMillis();
InstanceCommunication.FunctionStatus.ExceptionInformation info = getExceptionInfo(ex, ts);
latestSystemExceptions.add(info);
// report exception throw prometheus
if (sysExceptionRateLimiter.tryAcquire()) {
String[] exceptionMetricsLabels = getExceptionMetricsLabels(ex);
sysExceptions.labels(exceptionMetricsLabels).set(1.0);
}
}
@Override
public void incrUserExceptions(Throwable ex) {
incrSysExceptions(ex);
}
@Override
public void incrSourceExceptions(Throwable ex) {
incrSysExceptions(ex);
}
@Override
public void incrSinkExceptions(Throwable ex) {
statTotalSinkExceptionsChild.inc();
statTotalSinkExceptionsChild1min.inc();
long ts = System.currentTimeMillis();
InstanceCommunication.FunctionStatus.ExceptionInformation info = getExceptionInfo(ex, ts);
latestSinkExceptions.add(info);
// report exception throw prometheus
if (sinkExceptionRateLimiter.tryAcquire()) {
String[] exceptionMetricsLabels = getExceptionMetricsLabels(ex);
sinkExceptions.labels(exceptionMetricsLabels).set(1.0);
}
}
private String[] getExceptionMetricsLabels(Throwable ex) {
String[] exceptionMetricsLabels = Arrays.copyOf(metricsLabels, metricsLabels.length + 1);
exceptionMetricsLabels[exceptionMetricsLabels.length - 1] = ex.getMessage() != null ? ex.getMessage() : "";
return exceptionMetricsLabels;
}
@Override
public void setLastInvocation(long ts) {
statlastInvocationChild.set(ts);
}
@Override
public void processTimeStart() {
//no-op
}
@Override
public void processTimeEnd() {
//no-op
}
@Override
public double getTotalProcessedSuccessfully() {
return statTotalWrittenChild.get();
}
@Override
public double getTotalRecordsReceived() {
return statTotalRecordsReceivedChild.get();
}
@Override
public double getTotalSysExceptions() {
return statTotalSysExceptionsChild.get();
}
@Override
public double getTotalUserExceptions() {
return 0;
}
@Override
public double getLastInvocation() {
return statlastInvocationChild.get();
}
@Override
public double getAvgProcessLatency() {
return 0;
}
@Override
public double getTotalProcessedSuccessfully1min() {
return statTotalWrittenChild1min.get();
}
@Override
public double getTotalRecordsReceived1min() {
return statTotalRecordsReceivedChild1min.get();
}
@Override
public double getTotalSysExceptions1min() {
return statTotalSysExceptions1minChild.get();
}
@Override
public double getTotalUserExceptions1min() {
return 0;
}
@Override
public double getAvgProcessLatency1min() {
return 0;
}
@Override
public EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> getLatestUserExceptions() {
return emptyQueue;
}
@Override
public EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> getLatestSystemExceptions() {
return latestSystemExceptions;
}
@Override
public EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> getLatestSourceExceptions() {
return emptyQueue;
}
@Override
public EvictingQueue<InstanceCommunication.FunctionStatus.ExceptionInformation> getLatestSinkExceptions() {
return latestSinkExceptions;
}
}
| apache-2.0 |
tduehr/cas | support/cas-server-support-consent-core/src/test/java/org/apereo/cas/consent/InMemoryConsentRepositoryTests.java | 1104 | package org.apereo.cas.consent;
import org.apereo.cas.CipherExecutor;
import org.apereo.cas.services.RegisteredServiceTestUtils;
import org.apereo.cas.util.CollectionUtils;
import lombok.val;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* This is {@link InMemoryConsentRepositoryTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
public class InMemoryConsentRepositoryTests {
@Test
public void verifyConsentDecisionStored() {
val builder = new DefaultConsentDecisionBuilder(CipherExecutor.noOpOfSerializableToString());
val regSvc = RegisteredServiceTestUtils.getRegisteredService("test");
val svc = RegisteredServiceTestUtils.getService();
val decision = builder.build(svc,
regSvc, "casuser",
CollectionUtils.wrap("attribute", "value"));
val repo = new InMemoryConsentRepository();
assertTrue(repo.storeConsentDecision(decision));
assertTrue(repo.getConsentDecisions().size() == 1);
val b = repo.deleteConsentDecision(decision.getId(), "casuser");
assertTrue(b);
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | providers/glesys/src/main/java/org/jclouds/glesys/options/ListIpOptions.java | 3032 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.glesys.options;
import org.jclouds.http.options.BaseHttpRequestOptions;
public class ListIpOptions extends BaseHttpRequestOptions {
public static class Builder {
/**
* @see org.jclouds.glesys.options.ListIpOptions#used
*/
public static ListIpOptions used(boolean used) {
return new ListIpOptions().used(used);
}
/**
* @see org.jclouds.glesys.options.ListIpOptions#serverId
*/
public static ListIpOptions serverId(String serverId) {
return new ListIpOptions().serverId(serverId);
}
/**
* @see org.jclouds.glesys.options.ListIpOptions#ipVersion
*/
public static ListIpOptions ipVersion(int ipVersion) {
return new ListIpOptions().ipVersion(ipVersion);
}
/**
* @see org.jclouds.glesys.options.ListIpOptions#datacenter
*/
public static ListIpOptions datacenter(String datacenter) {
return new ListIpOptions().datacenter(datacenter);
}
/**
* @see org.jclouds.glesys.options.ListIpOptions#platform
*/
public static ListIpOptions platform(String platform) {
return new ListIpOptions().platform(platform);
}
}
/**
* Retrieve only IPs that are in use
*/
public ListIpOptions used(boolean used) {
formParameters.put("used", Boolean.toString(used));
return this;
}
/**
* Retrieve only IP assigned to the specified server
*/
public ListIpOptions serverId(String serverId) {
formParameters.put("serverid", serverId);
return this;
}
/**
* Retrieve only IPs of the requested version
*/
public ListIpOptions ipVersion(int ipVersion) {
formParameters.put("ipversion", Integer.toString(ipVersion));
return this;
}
/**
* Retrieve only IPs served in the specified datacenter
*/
public ListIpOptions datacenter(String datacenter) {
formParameters.put("datacenter", datacenter);
return this;
}
/**
* Retrieve only IPs served on the specified platform
*/
public ListIpOptions platform(String platform) {
formParameters.put("platform", platform);
return this;
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | apis/cloudstack/src/test/java/org/jclouds/cloudstack/parse/ListLoadBalancerRulesResponseTest.java | 2269 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.cloudstack.parse;
import java.util.Set;
import org.jclouds.cloudstack.domain.LoadBalancerRule;
import org.jclouds.json.BaseSetParserTest;
import org.jclouds.json.config.GsonModule;
import org.jclouds.rest.annotations.SelectJson;
import org.testng.annotations.Test;
import org.testng.collections.Sets;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Guice;
import com.google.inject.Injector;
@Test(groups = "unit")
public class ListLoadBalancerRulesResponseTest extends BaseSetParserTest<LoadBalancerRule> {
@Override
protected Injector injector() {
return Guice.createInjector(new GsonModule() {
@Override
protected void configure() {
bind(DateAdapter.class).to(Iso8601DateAdapter.class);
super.configure();
}
});
}
@Override
public String resource() {
return "/listloadbalancerrulesresponse.json";
}
@Override
@SelectJson("loadbalancerrule")
public Set<LoadBalancerRule> expected() {
return ImmutableSet.<LoadBalancerRule> of(LoadBalancerRule.builder()
.id("93").account("admin").algorithm(LoadBalancerRule.Algorithm.ROUNDROBIN)
.description("null").domain("ROOT").domainId("1").name("Ranny").privatePort(80)
.publicIP("10.27.27.59").publicIPId("10").publicPort(80).state(LoadBalancerRule.State.ADD)
.CIDRs(Sets.<String>newHashSet()).zoneId(null)
.build());
}
}
| apache-2.0 |
barnyard/pi | freepastry/src/org/mpisws/p2p/transport/priority/PriorityTransportLayer.java | 4987 | /*******************************************************************************
"FreePastry" Peer-to-Peer Application Development Substrate
Copyright 2002-2007, Rice University. Copyright 2006-2007, Max Planck Institute
for Software Systems. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of Rice University (RICE), Max Planck Institute for Software
Systems (MPI-SWS) nor the names of its contributors may be used to endorse or
promote products derived from this software without specific prior written
permission.
This software is provided by RICE, MPI-SWS and the contributors on an "as is"
basis, without any representations or warranties of any kind, express or implied
including, but not limited to, representations or warranties of
non-infringement, merchantability or fitness for a particular purpose. In no
event shall RICE, MPI-SWS or contributors be liable for any direct, indirect,
incidental, special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use, data, or
profits; or business interruption) however caused and on any theory of
liability, whether in contract, strict liability, or tort (including negligence
or otherwise) arising in any way out of the use of this software, even if
advised of the possibility of such damage.
*******************************************************************************/
package org.mpisws.p2p.transport.priority;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.mpisws.p2p.transport.TransportLayer;
import org.mpisws.p2p.transport.TransportLayerCallback;
import org.mpisws.p2p.transport.TransportLayerListener;
import rice.Continuation;
import rice.p2p.util.tuples.Tuple3;
/**
* Does 3 things:
* a) Sends messages on a Socket (depending on the options).
* b) Prioritizes messages into queues.
* c) calls sendFailed if there is a liveness change
*
* @author Jeff Hoye
*/
public interface PriorityTransportLayer<Identifier> extends TransportLayer<Identifier, ByteBuffer> {
public static final String OPTION_PRIORITY = "OPTION_PRIORITY";
// different priority levels
public static final byte MAX_PRIORITY = -15;
public static final byte HIGH_PRIORITY = -10;
public static final byte MEDIUM_HIGH_PRIORITY = -5;
public static final byte MEDIUM_PRIORITY = 0;
public static final byte MEDIUM_LOW_PRIORITY = 5;
public static final byte LOW_PRIORITY = 10;
public static final byte LOWEST_PRIORITY = 15;
public static final byte DEFAULT_PRIORITY = MEDIUM_PRIORITY;
public static final int STATUS_NOT_CONNECTED = 0;
public static final int STATUS_CONNECTING = 1;
public static final int STATUS_CONNECTED = 2;
public void addTransportLayerListener(TransportLayerListener<Identifier> listener);
public void removeTransportLayerListener(TransportLayerListener<Identifier> listener);
public void addPriorityTransportLayerListener(PriorityTransportLayerListener<Identifier> listener);
public void removePriorityTransportLayerListener(PriorityTransportLayerListener<Identifier> listener);
/**
* Returns if there is a primary connection to the identifier
*
* @param i
* @return STATUS_NOT_CONNECTED, STATUS_CONNECTING, STATUS_CONNECTED
*/
public int connectionStatus(Identifier i);
/**
* Returns the options on the primary connection
* @param i
* @return
*/
public Map<String, Object> connectionOptions(Identifier i);
/**
* usually used with bytesPending() or queueLength()
* @return any Identifier with messages to be sent
*/
public Collection<Identifier> nodesWithPendingMessages();
/**
* Returns the number of messages pending to be sent
* @param i
* @return
*/
public int queueLength(Identifier i);
/**
* The number of bytes to be sent to the identifier
* @param i
* @return
*/
public long bytesPending(Identifier i);
/**
* The number of bytes to be sent to the identifier
* @param i
* @return
*/
public List<MessageInfo> getPendingMessages(Identifier i);
/**
* open a primary connection
* @param i
* @param notifyMe when it is open
*/
public void openPrimaryConnection(Identifier i, Map<String, Object> options);
public void addPrimarySocketListener(PrimarySocketListener<Identifier> listener);
public void removePrimarySocketListener(PrimarySocketListener<Identifier> listener);
}
| apache-2.0 |
drinkjava2/jSQLBox | core/src/main/java/com/github/drinkjava2/jdialects/annotation/jpa/Index.java | 1916 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
* applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.github.drinkjava2.jdialects.annotation.jpa;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Used in schema generation to specify creation of an index.
* <p>
* Note that it is not necessary to specify an index for a primary key,
* as the primary key index will be created automatically.
*
* <p>
* The syntax of the <code>columnList</code> element is a
* <code>column_list</code>, as follows:
*
* <pre>
* column::= index_column [,index_column]*
* index_column::= column_name [ASC | DESC]
* </pre>
*
* <p> If <code>ASC</code> or <code>DESC</code> is not specified,
* <code>ASC</code> (ascending order) is assumed.
*
* @see Table
* @see SecondaryTable
* @see CollectionTable
* @see JoinTable
* @see TableIdGeneratorTest
*
* @since Java Persistence 2.1
*
*/
@Target({})
@Retention(RUNTIME)
public @interface Index {
/**
* (Optional) The name of the index; defaults to a provider-generated name.
*/
String name() default "";
/**
* (Required) The names of the columns to be included in the index,
* in order.
*/
String columnList();
/**
* (Optional) Whether the index is unique.
*/
boolean unique() default false;
}
| apache-2.0 |
gurbuzali/hazelcast-jet | examples/protobuf/src/main/java/com/hazelcast/jet/examples/protobuf/ProtobufSerializerHookAdapter.java | 2296 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.examples.protobuf;
import com.hazelcast.jet.Jet;
import com.hazelcast.jet.JetInstance;
import com.hazelcast.jet.pipeline.Pipeline;
import com.hazelcast.jet.pipeline.Sinks;
import com.hazelcast.jet.pipeline.test.TestSources;
import com.hazelcast.jet.protobuf.Messages.Product;
import com.hazelcast.jet.protobuf.ProtobufSerializerHook;
/**
* Demonstrates the usage of Protobuf serializer hook adapter.
* <p>
* {@link ProductSerializerHook} is discovered & registered via
* 'META-INF/services/com.hazelcast.SerializerHook' and then used to
* serialize local {@link com.hazelcast.collection.IList} items.
*/
public class ProtobufSerializerHookAdapter {
private static final String LIST_NAME = "products";
private JetInstance jet;
public static void main(String[] args) {
new ProtobufSerializerHookAdapter().go();
}
private void go() {
try {
setup();
jet.newJob(buildPipeline()).join();
} finally {
Jet.shutdownAll();
}
}
private void setup() {
jet = Jet.bootstrappedInstance();
}
private static Pipeline buildPipeline() {
Pipeline p = Pipeline.create();
p.readFrom(TestSources.items("jam", "marmalade"))
.map(name -> Product.newBuilder().setName(name).build())
.writeTo(Sinks.list(LIST_NAME));
return p;
}
@SuppressWarnings("unused")
private static class ProductSerializerHook extends ProtobufSerializerHook<Product> {
private static final int TYPE_ID = 17;
ProductSerializerHook() {
super(Product.class, TYPE_ID);
}
}
}
| apache-2.0 |
azureplus/flex-blazeds | modules/remoting/src/flex/management/runtime/messaging/services/remoting/RemotingDestinationControl.java | 4987 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flex.management.runtime.messaging.services.remoting;
import java.io.IOException;
import flex.management.BaseControl;
import flex.management.runtime.AdminConsoleTypes;
import flex.management.runtime.messaging.DestinationControl;
import flex.messaging.services.remoting.RemotingDestination;
/**
* The <code>RemotingDestinationControl</code> class is the MBean implementation for
* monitoring and managing a <code>RemotingDestination</code> at runtime.
*
* This class performs no internal synchronization, so the statistics it tracks may differ slightly from
* the true values but they don't warrant the cost full synchronization.
*
* @author shodgson
*/
public class RemotingDestinationControl extends DestinationControl implements
RemotingDestinationControlMBean
{
private static final String TYPE = "RemotingDestination";
/**
* Constructs a new <code>RemotingDestinationControl</code> instance.
*
* @param destination The <code>RemotingDestination</code> managed by this MBean.
* @param parent The parent MBean in the management hierarchy.
*/
public RemotingDestinationControl(RemotingDestination destination, BaseControl parent)
{
super(destination, parent);
}
private int invocationSuccessCount = 0;
private int invocationFaultCount = 0;
private int totalProcessingTimeMillis = 0;
private int averageProcessingTimeMillis = 0;
/** {@inheritDoc} */
@Override
public String getType()
{
return TYPE;
}
/** {@inheritDoc} */
public Integer getInvocationSuccessCount() throws IOException
{
return Integer.valueOf(invocationSuccessCount);
}
/**
* Increments the invocation success count by one.
*
* @param processingTimeMillis The processing duration of the invocation success.
*/
public void incrementInvocationSuccessCount(int processingTimeMillis)
{
try
{
invocationSuccessCount++;
totalProcessingTimeMillis += processingTimeMillis;
averageProcessingTimeMillis = totalProcessingTimeMillis / (invocationSuccessCount + invocationFaultCount);
}
catch (Exception needsReset)
{
reset();
}
}
/** {@inheritDoc} */
public Integer getInvocationFaultCount() throws IOException
{
return Integer.valueOf(invocationFaultCount);
}
/**
* Increments the invocation fault count by one.
*
* @param processingTimeMillis The processing duration of the invocation fault.
*/
public void incrementInvocationFaultCount(int processingTimeMillis)
{
try
{
invocationFaultCount++;
totalProcessingTimeMillis += processingTimeMillis;
averageProcessingTimeMillis = totalProcessingTimeMillis / (invocationSuccessCount + invocationFaultCount);
}
catch (Exception needsReset)
{
reset();
}
}
/** {@inheritDoc} */
public Integer getAverageInvocationProcessingTimeMillis() throws IOException
{
return Integer.valueOf(averageProcessingTimeMillis);
}
/**
* Callback used to register properties for display in the admin application.
*/
@Override
protected void onRegistrationComplete()
{
String name = this.getObjectName().getCanonicalName();
String[] pollablePerInterval = { "InvocationSuccessCount", "InvocationFaultCount",
"AverageInvocationProcessingTimeMillis" };
getRegistrar().registerObjects(
new int[] {AdminConsoleTypes.DESTINATION_POLLABLE, AdminConsoleTypes.GRAPH_BY_POLL_INTERVAL},
name, pollablePerInterval);
}
/**
* Helper method to reset state in the case of errors updating statistics.
*/
private void reset()
{
invocationSuccessCount = 0;
invocationFaultCount = 0;
totalProcessingTimeMillis = 0;
averageProcessingTimeMillis = 0;
}
}
| apache-2.0 |
apache/axis2-java | modules/addressing/src/org/apache/axis2/handlers/addressing/AddressingOutHandler.java | 30419 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.handlers.addressing;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMNamespace;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPFactory;
import org.apache.axiom.soap.SOAPFault;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axiom.util.UIDGenerator;
import org.apache.axis2.AxisFault;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.addressing.AddressingFaultsHelper;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.addressing.EndpointReferenceHelper;
import org.apache.axis2.addressing.RelatesTo;
import org.apache.axis2.addressing.i18n.AddressingMessages;
import org.apache.axis2.client.Options;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.description.AxisEndpoint;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.Parameter;
import org.apache.axis2.handlers.AbstractHandler;
import org.apache.axis2.handlers.AbstractTemplatedHandler;
import org.apache.axis2.util.JavaUtils;
import org.apache.axis2.util.LoggingControl;
import org.apache.axis2.util.Utils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.xml.namespace.QName;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class AddressingOutHandler extends AbstractTemplatedHandler implements AddressingConstants {
private static final Log log = LogFactory.getLog(AddressingOutHandler.class);
// TODO: This is required for MessageContext#getModuleParameter.
// Not clear why there is no way to automatically determine this!
private static final String MODULE_NAME = "addressing";
public boolean shouldInvoke(MessageContext msgContext) throws AxisFault {
Parameter param = null;
boolean disableAddressing = false;
Object o = msgContext.getProperty(DISABLE_ADDRESSING_FOR_OUT_MESSAGES);
if (o == null || !(o instanceof Boolean)) {
//determine whether outbound addressing has been disabled or not.
// Get default value from module.xml or axis2.xml files
param = msgContext.getModuleParameter(DISABLE_ADDRESSING_FOR_OUT_MESSAGES, MODULE_NAME, handlerDesc);
disableAddressing =
msgContext.isPropertyTrue(DISABLE_ADDRESSING_FOR_OUT_MESSAGES,
JavaUtils.isTrueExplicitly(Utils.getParameterValue(param)));
} else {
disableAddressing = (Boolean) o;
}
if (disableAddressing) {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(msgContext.getLogIDString() +
" Addressing is disabled. Not adding WS-Addressing headers.");
}
return false;
}
return true;
}
public InvocationResponse doInvoke(MessageContext msgContext) throws AxisFault {
// Determine the addressin namespace in effect.
Object addressingVersionFromCurrentMsgCtxt = msgContext.getProperty(WS_ADDRESSING_VERSION);
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("Addressing version string from messageContext=" +
addressingVersionFromCurrentMsgCtxt);
}
boolean isSubmissionNamespace =
Submission.WSA_NAMESPACE.equals(addressingVersionFromCurrentMsgCtxt);
// Determine whether to include optional addressing headers in the output.
// Get default value from module.xml or axis2.xml files
Parameter param = msgContext.getModuleParameter(
INCLUDE_OPTIONAL_HEADERS, MODULE_NAME, handlerDesc);
boolean includeOptionalHeaders =
msgContext.isPropertyTrue(INCLUDE_OPTIONAL_HEADERS,
JavaUtils.isTrueExplicitly(Utils.getParameterValue(param)));
if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) {
log.debug("includeOptionalHeaders=" + includeOptionalHeaders);
}
// Determine if a MustUnderstand attribute will be added to all headers in the
// addressing namespace.
boolean addMustUnderstandAttribute =
msgContext.isPropertyTrue(ADD_MUST_UNDERSTAND_TO_ADDRESSING_HEADERS);
// what if there are addressing headers already in the message. Do you replace that or not?
// Lets have a parameter to control that. The default behavior is you won't replace addressing
// headers if there are any (this was the case so far).
boolean replaceHeaders = msgContext.isPropertyTrue(REPLACE_ADDRESSING_HEADERS);
// Allow the user to specify the role these WS-Addressing headers should be targetted at.
String role = (String) msgContext.getProperty(SOAP_ROLE_FOR_ADDRESSING_HEADERS);
WSAHeaderWriter writer = new WSAHeaderWriter(msgContext, isSubmissionNamespace,
addMustUnderstandAttribute, replaceHeaders,
includeOptionalHeaders, role);
writer.writeHeaders();
return InvocationResponse.CONTINUE;
}
private class WSAHeaderWriter {
private MessageContext messageContext;
private SOAPEnvelope envelope;
private SOAPHeader header;
private SOAPFactory factory;
private Options messageContextOptions;
private OMNamespace addressingNamespaceObject;
private String addressingNamespace;
private String addressingRole;
private boolean isFinalAddressingNamespace;
private boolean addMustUnderstandAttribute;
private boolean replaceHeaders; // determines whether we replace the existing headers or not, if they present
private boolean includeOptionalHeaders;
private ArrayList existingWSAHeaders = null;
public WSAHeaderWriter(MessageContext mc, boolean isSubmissionNamespace, boolean addMU,
boolean replace, boolean includeOptional, String role) {
if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) {
log.debug("WSAHeaderWriter: isFinal=" + !isSubmissionNamespace + " addMU=" + addMU +
" replace=" + replace + " includeOptional=" + includeOptional+" role="+role);
}
messageContext = mc;
envelope = mc.getEnvelope();
factory = (SOAPFactory)envelope.getOMFactory();
messageContextOptions = messageContext.getOptions();
addressingNamespace =
isSubmissionNamespace ? Submission.WSA_NAMESPACE : Final.WSA_NAMESPACE;
header = envelope.getHeader();
// if there is no soap header in the envelope being processed, add one.
if (header == null) {
header = factory.createSOAPHeader(envelope);
}else{
ArrayList addressingHeaders = header.getHeaderBlocksWithNSURI(addressingNamespace);
if(addressingHeaders!=null && !addressingHeaders.isEmpty()){
existingWSAHeaders = new ArrayList(addressingHeaders.size());
for(Iterator iter=addressingHeaders.iterator();iter.hasNext();){
SOAPHeaderBlock oe = (SOAPHeaderBlock)iter.next();
if(addressingRole == null || addressingRole.length() ==0 || addressingRole.equals(oe.getRole())){
existingWSAHeaders.add(oe.getLocalName());
}
}
}
if(addressingHeaders != null && addressingHeaders.size() ==0){
addressingHeaders = null;
}
}
isFinalAddressingNamespace = !isSubmissionNamespace;
addMustUnderstandAttribute = addMU;
replaceHeaders = replace;
includeOptionalHeaders = includeOptional;
addressingRole = role;
if(!isFinalAddressingNamespace && mc.getTo() == null){
mc.setTo(new EndpointReference(AddressingConstants.Submission.WSA_ANONYMOUS_URL));
}
}
public void writeHeaders() throws AxisFault {
// by this time, we definitely have some addressing information to be sent. This is because,
// we have tested at the start of this whether messageInformationHeaders are null or not.
// So rather than declaring addressing namespace in each and every addressing header, lets
// define that in the Header itself.
addressingNamespaceObject = header.declareNamespace(addressingNamespace, WSA_DEFAULT_PREFIX);
// processing WSA To
processToEPR();
// processing WSA replyTo
processReplyTo();
// processing WSA From
processFromEPR();
// processing WSA FaultTo
processFaultToEPR();
// processing WSA MessageID
processMessageID();
// processing WSA Action
processWSAAction();
// processing WSA RelatesTo
processRelatesTo();
// process fault headers, if present
processFaultsInfoIfPresent();
// process mustUnderstand attribute, if required.
processMustUnderstandProperty();
}
private void processMessageID() {
String messageID = messageContextOptions.getMessageId();
//Check whether we want to force a message id to be sent.
if (messageID == null && includeOptionalHeaders) {
messageID = UIDGenerator.generateURNString();
messageContextOptions.setMessageId(messageID);
}
if (messageID != null && !isAddressingHeaderAlreadyAvailable(WSA_MESSAGE_ID, false))
{//optional
ArrayList attributes = (ArrayList)messageContext.getLocalProperty(
AddressingConstants.MESSAGEID_ATTRIBUTES);
createSOAPHeaderBlock(messageID, WSA_MESSAGE_ID, attributes);
}
}
private void processWSAAction() throws AxisFault {
String action = messageContextOptions.getAction();
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(messageContext.getLogIDString() +
" processWSAAction: action from messageContext: " + action);
}
if (action == null || action.length()==0) {
if (messageContext.getAxisOperation() != null) {
action = messageContext.getAxisOperation().getOutputAction();
if(action!=null){
// Set this action back to obviate possible action mismatch problems
messageContext.setWSAAction(action);
}
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(messageContext.getLogIDString() +
" processWSAAction: action from AxisOperation: " + action);
}
}
}else{
// Use the correct fault action for the selected namespace
if(isFinalAddressingNamespace){
if(Submission.WSA_FAULT_ACTION.equals(action)){
action = Final.WSA_FAULT_ACTION;
messageContextOptions.setAction(action);
}
}else{
if(Final.WSA_FAULT_ACTION.equals(action)){
action = Submission.WSA_FAULT_ACTION;
messageContextOptions.setAction(action);
}else if(Final.WSA_SOAP_FAULT_ACTION.equals(action)){
action = Submission.WSA_FAULT_ACTION;
messageContextOptions.setAction(action);
}
}
}
// If we need to add a wsa:Action header
if (!isAddressingHeaderAlreadyAvailable(WSA_ACTION, false)) {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(messageContext.getLogIDString() +
" processWSAAction: No existing wsa:Action header found");
}
// If we don't have an action to add,
if (action == null || action.length()==0) {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(messageContext.getLogIDString() +
" processWSAAction: No action to add to header");
}
// Fault unless validation has been explictily turned off
if (!messageContext.isPropertyTrue(
AddressingConstants.DISABLE_OUTBOUND_ADDRESSING_VALIDATION))
{
throw new AxisFault(AddressingMessages.getMessage("outboundNoAction"));
}
} else {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(messageContext.getLogIDString() +
" processWSAAction: Adding action to header: " + action);
}
// Otherwise just add the header
ArrayList attributes = (ArrayList)messageContext.getLocalProperty(
AddressingConstants.ACTION_ATTRIBUTES);
createSOAPHeaderBlock(action, WSA_ACTION, attributes);
}
}
}
private void processFaultsInfoIfPresent() {
OMElement detailElement = AddressingFaultsHelper
.getDetailElementForAddressingFault(messageContext, addressingNamespaceObject);
if (detailElement != null) {
//The difference between SOAP 1.1 and SOAP 1.2 fault messages is explained in the WS-Addressing Specs.
if (isFinalAddressingNamespace && messageContext.isSOAP11()) {
// Add detail as a wsa:FaultDetail header
if (!isAddressingHeaderAlreadyAvailable(Final.FAULT_HEADER_DETAIL, false)) {
SOAPHeaderBlock faultDetail = header.addHeaderBlock(
Final.FAULT_HEADER_DETAIL, addressingNamespaceObject);
faultDetail.addChild((OMElement)factory.importInformationItem(detailElement));
}
} else if (!messageContext.isSOAP11()) {
// Add detail to the Fault in the SOAP Body
SOAPFault fault = envelope.getBody().getFault();
if (fault != null && fault.getDetail() != null) {
fault.getDetail().addDetailEntry(
(OMElement)factory.importInformationItem(detailElement));
}
}
}
}
private void processRelatesTo() {
if (!isAddressingHeaderAlreadyAvailable(WSA_RELATES_TO, true)) {
RelatesTo[] relatesTo = messageContextOptions.getRelationships();
if (relatesTo != null) {
for (int i = 0, length = relatesTo.length; i < length; i++) {
OMElement relatesToHeader = createSOAPHeaderBlock(relatesTo[i].getValue(),
WSA_RELATES_TO, relatesTo[i].getExtensibilityAttributes());
String relationshipType = relatesTo[i].getRelationshipType();
if (relatesToHeader != null) {
if(!includeOptionalHeaders){
if (Final.WSA_DEFAULT_RELATIONSHIP_TYPE.equals(relationshipType) ||
Submission.WSA_DEFAULT_RELATIONSHIP_TYPE
.equals(relationshipType)) {
relationshipType = null; //Omit the attribute.
}
}
if(relationshipType != null){
relatesToHeader.addAttribute(WSA_RELATES_TO_RELATIONSHIP_TYPE,
relationshipType,
null);
}
}
}
}
}
}
private void processFaultToEPR() throws AxisFault {
EndpointReference epr = messageContextOptions.getFaultTo();
String headerName = AddressingConstants.WSA_FAULT_TO;
//Omit the header if the epr is null.
if (epr != null && !isAddressingHeaderAlreadyAvailable(headerName, false)) {
addToSOAPHeader(epr, headerName);
}
}
private void processFromEPR() throws AxisFault {
EndpointReference epr = messageContextOptions.getFrom();
String headerName = AddressingConstants.WSA_FROM;
//Omit the header if the epr is null.
if (epr != null && !isAddressingHeaderAlreadyAvailable(headerName, false)) {
addToSOAPHeader(epr, headerName);
}
}
private void processReplyTo() throws AxisFault {
EndpointReference epr = messageContextOptions.getReplyTo();
String headerName = AddressingConstants.WSA_REPLY_TO;
//Don't check epr for null here as addToSOAPHeader() will provide an appropriate default.
//This default is especially useful for client side outbound processing.
if (!isAddressingHeaderAlreadyAvailable(headerName, false)) {
addToSOAPHeader(epr, headerName);
}
}
private void processToEPR() throws AxisFault {
EndpointReference epr = messageContextOptions.getTo();
if (epr != null && !isAddressingHeaderAlreadyAvailable(WSA_TO, false)) {
try {
processToEPRReferenceInformation(epr.getAllReferenceParameters());
}
catch (Exception e) {
throw new AxisFault(AddressingMessages.getMessage("referenceParameterError"), e);
}
String address = epr.getAddress();
if (address != null && address.length()!=0) {
if (!includeOptionalHeaders && isFinalAddressingNamespace &&
epr.isWSAddressingAnonymous())
{
return; //Omit the header.
}
createSOAPHeaderBlock(address, WSA_TO, epr.getAddressAttributes());
}
}
}
private OMElement createSOAPHeaderBlock(String value, String headerName, ArrayList<OMAttribute> attributes) {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("createSOAPHeaderBlock: value=" + value + " headerName=" + headerName);
}
if (value != null && value.length()!=0) {
SOAPHeaderBlock soapHeaderBlock =
header.addHeaderBlock(headerName, addressingNamespaceObject);
soapHeaderBlock.addChild(factory.createOMText(value));
if (attributes != null && !attributes.isEmpty()) {
Iterator<OMAttribute> attrIterator = attributes.iterator();
while (attrIterator.hasNext()) {
soapHeaderBlock.addAttribute(
(OMAttribute)soapHeaderBlock.getOMFactory().importInformationItem(attrIterator.next()));
}
}
addRoleToHeader(soapHeaderBlock);
return soapHeaderBlock;
}
return null;
}
private void addToSOAPHeader(EndpointReference epr, String headerName) throws AxisFault {
String prefix = addressingNamespaceObject.getPrefix();
String anonymous = isFinalAddressingNamespace ?
Final.WSA_ANONYMOUS_URL : Submission.WSA_ANONYMOUS_URL;
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("addToSOAPHeader: epr=" + epr + " headerName=" + headerName);
}
if (epr == null) {
if (!includeOptionalHeaders && isFinalAddressingNamespace &&
AddressingConstants.WSA_REPLY_TO.equals(headerName)) {
return; //Omit the header.
} else {
epr = new EndpointReference(anonymous);
}
}
else if (!isFinalAddressingNamespace && epr.hasNoneAddress()) {
return; //Omit the header.
}
else if (epr.isWSAddressingAnonymous())
{
if (!includeOptionalHeaders && isFinalAddressingNamespace &&
AddressingConstants.WSA_REPLY_TO.equals(headerName)) {
return; //Omit the header.
} else {
epr.setAddress(anonymous);
}
}
OMElement soapHeaderBlock = EndpointReferenceHelper.toOM(factory,
epr,
new QName(addressingNamespace,
headerName, prefix),
addressingNamespace);
addRoleToHeader((SOAPHeaderBlock) soapHeaderBlock);
header.addChild(soapHeaderBlock);
}
/**
* This will add reference parameters and/or reference properties in to the message
*
* @param referenceInformation a Map from QName -> OMElement
* @param parent is the element to which the referenceparameters should be
* attached
*/
private void processToEPRReferenceInformation(Map referenceInformation) throws Exception {
if (referenceInformation != null) {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("processToEPRReferenceInformation: " + referenceInformation);
}
Iterator iterator = referenceInformation.values().iterator();
while (iterator.hasNext()) {
OMElement omElement = (OMElement)iterator.next();
SOAPHeaderBlock newElement = factory.createSOAPHeaderBlock(omElement);
if (isFinalAddressingNamespace) {
newElement.addAttribute(Final.WSA_IS_REFERENCE_PARAMETER_ATTRIBUTE,
Final.WSA_TYPE_ATTRIBUTE_VALUE,
addressingNamespaceObject);
}
addRoleToHeader(newElement);
header.addChild(newElement);
}
}
// Now add reference parameters we found in the WSDL (if any)
AxisService service = messageContext.getAxisService();
if(service != null){
AxisEndpoint endpoint = service.getEndpoint(service.getEndpointName());
if(endpoint != null){
ArrayList referenceparameters = (ArrayList) endpoint.getParameterValue(REFERENCE_PARAMETER_PARAMETER);
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("processToEPRReferenceInformation: Reference Parameters from WSDL:" + referenceparameters);
}
if(referenceparameters!=null){
Iterator iterator = referenceparameters.iterator();
while (iterator.hasNext()) {
OMElement omElement = (OMElement)iterator.next();
// Only add the reference parameter from the WSDL if it does not already exist.
// This allows clients to override the values before invoking the service.
if (referenceInformation == null || !referenceInformation.containsKey(omElement.getQName())) {
SOAPHeaderBlock newElement = factory.createSOAPHeaderBlock(omElement);
if (isFinalAddressingNamespace) {
newElement.addAttribute(Final.WSA_IS_REFERENCE_PARAMETER_ATTRIBUTE,
Final.WSA_TYPE_ATTRIBUTE_VALUE,
addressingNamespaceObject);
}
addRoleToHeader(newElement);
header.addChild(newElement);
} else {
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("processToEPRReferenceInformation: Reference parameter already exists so ignoring value from WSDL: " + omElement);
}
}
}
}
}
}
if (log.isTraceEnabled()) {
log.trace("processToEPRReferenceInformation: Header after adding reference parameters: "+header);
}
}
/**
* This will check for the existence of message information headers already in the message.
* If there are already headers, then replacing them or not depends on the replaceHeaders
* property.
*
* @param name - Name of the message information header
* @param multipleHeaders - determines whether to search for multiple headers, or not.
* @return false - if one can add new headers (always the case if multipleHeaders is true),
* true - if new headers can't be added.
*/
private boolean isAddressingHeaderAlreadyAvailable(String name, boolean multipleHeaders) {
boolean status = false;
if (multipleHeaders) {
if (replaceHeaders) {
QName qname = new QName(addressingNamespace, name, WSA_DEFAULT_PREFIX);
Iterator<OMElement> iterator = header.getChildrenWithName(qname);
while (iterator.hasNext()) {
iterator.next();
iterator.remove();
}
}
} else {
boolean exists = didAddressingHeaderExist(name);
if (exists && replaceHeaders) {
QName qname = new QName(addressingNamespace, name, WSA_DEFAULT_PREFIX);
OMElement addressingHeader = header.getFirstChildWithName(qname);
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("isAddressingHeaderAlreadyAvailable: Removing existing header:" +
addressingHeader.getLocalName());
}
addressingHeader.detach();
} else {
status = exists;
}
}
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("isAddressingHeaderAlreadyAvailable: name=" + name + " status=" + status);
}
return status;
}
private boolean didAddressingHeaderExist(String headerName){
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("didAddressingHeaderExist: headerName=" + headerName);
}
boolean result = false;
if(existingWSAHeaders != null){
result = existingWSAHeaders.contains(headerName);
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace("didAddressingHeaderExist: existingWSAHeaders=" + existingWSAHeaders+" result="+result);
}
}
return result;
}
/**
* Sets a mustUnderstand attribute on all headers that are found with the appropriate
* addressing namespace.
*/
private void processMustUnderstandProperty() {
if (addMustUnderstandAttribute) {
List headers = header.getHeaderBlocksWithNSURI(addressingNamespace);
for (int i = 0, size = headers.size(); i < size; i++) {
SOAPHeaderBlock soapHeaderBlock = (SOAPHeaderBlock)headers.get(i);
soapHeaderBlock.setMustUnderstand(true);
if (LoggingControl.debugLoggingAllowed && log.isTraceEnabled()) {
log.trace(
"processMustUnderstandProperty: Setting mustUnderstand=true on: " +
soapHeaderBlock.getLocalName());
}
}
}
}
private void addRoleToHeader(SOAPHeaderBlock header){
if(addressingRole == null || addressingRole.length()==0){
return;
}
header.setRole(addressingRole);
}
}
}
| apache-2.0 |
ccaballe/crossdata | crossdata-driver/src/test/java/com/stratio/crossdata/driver/querybuilder/QueryBuilderJavaTest.java | 4171 | /*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.crossdata.driver.querybuilder;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import org.testng.annotations.Test;
/**
* Test for the Java QueryBuilder.
*/
public class QueryBuilderJavaTest {
@Test
public void selectFrom(){
String expected = "SELECT * FROM table;";
Select s = QueryBuilder.selectAll().from("table");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectFrom2Columns(){
String expected = "SELECT col1, col2 FROM table;";
Select s = QueryBuilder.select("col1", "col2").from("table");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectSelection(){
String expected = "SELECT col1, col2, col3 AS alias3 FROM table;";
Selection selection = new Selection("col1").and("col2").and("col3", "alias3");
Select s = QueryBuilder.select(selection).from("table");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectFromWhere(){
String expected = "SELECT * FROM table WHERE id = 42;";
Query s = QueryBuilder.selectAll().from("table").where("id = 42");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectFromWhere2(){
String expected = "SELECT * FROM table WHERE id = 42 AND name = 'crossdata';";
Query s = QueryBuilder.selectAll().from("table").where("id = 42").and("name = 'crossdata'");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectWindowTime(){
String expected = "SELECT * FROM table WITH WINDOW 1 min WHERE id = 42;";
Query s = QueryBuilder.selectAll().from("table").withWindow("1 min").where("id = 42");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectJoin(){
String expected = "SELECT * FROM table1 "
+ "INNER JOIN table2 ON id1 = id2 "
+ "WHERE name = 'crossdata';";
Query s = QueryBuilder.selectAll().from("table1")
.join("table2").on("id1 = id2")
.where("name = 'crossdata'");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectInnerJoin(){
String expected = "SELECT * FROM table1 "
+ "INNER JOIN table2 ON id1 = id2 "
+ "WHERE name = 'crossdata';";
Query s = QueryBuilder.selectAll().from("table1")
.innerJoin("table2").on("id1 = id2")
.where("name = 'crossdata'");
assertEquals(s.toString(), expected, "Query does not match");
}
@Test
public void selectComplex(){
String expected = "SELECT col1, col2 FROM table1 "
+ "INNER JOIN table2 ON id1 = id2 "
+ "WHERE col1 = 'value1' "
+ "ORDER BY col3 "
+ "GROUP BY col4;";
Query s = QueryBuilder.select("col1", "col2")
.from("table1")
.join("table2").on("id1 = id2")
.where("col1 = 'value1'")
.orderBy("col3")
.groupBy("col4");
assertEquals(s.toString(), expected, "Query does not match");
}
}
| apache-2.0 |
podnov/queryinfo | jpa/src/main/java/com/evanzeimet/queryinfo/jpa/result/QueryInfoResultConverter.java | 981 | package com.evanzeimet.queryinfo.jpa.result;
/*
* #%L
* queryinfo-jpa
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2015 Evan Zeimet
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.List;
import com.evanzeimet.queryinfo.QueryInfoException;
public interface QueryInfoResultConverter<CriteriaQueryResult, QueryInfoResult> {
List<QueryInfoResult> convert(List<CriteriaQueryResult> criteriaQueryResults) throws QueryInfoException;
}
| apache-2.0 |
SharplEr/ignite | modules/core/src/test/java/org/apache/ignite/testsuites/IgniteKernalSelfTestSuite.java | 10164 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testsuites;
import java.util.Set;
import junit.framework.TestSuite;
import org.apache.ignite.internal.ComputeJobCancelWithServiceSelfTest;
import org.apache.ignite.internal.GridCommunicationSelfTest;
import org.apache.ignite.internal.GridDiscoveryEventSelfTest;
import org.apache.ignite.internal.GridDiscoverySelfTest;
import org.apache.ignite.internal.GridFailedInputParametersSelfTest;
import org.apache.ignite.internal.GridHomePathSelfTest;
import org.apache.ignite.internal.GridKernalConcurrentAccessStopSelfTest;
import org.apache.ignite.internal.GridListenActorSelfTest;
import org.apache.ignite.internal.GridLocalEventListenerSelfTest;
import org.apache.ignite.internal.GridNodeFilterSelfTest;
import org.apache.ignite.internal.GridNodeLocalSelfTest;
import org.apache.ignite.internal.GridNodeVisorAttributesSelfTest;
import org.apache.ignite.internal.GridRuntimeExceptionSelfTest;
import org.apache.ignite.internal.GridSameVmStartupSelfTest;
import org.apache.ignite.internal.GridSpiExceptionSelfTest;
import org.apache.ignite.internal.GridVersionSelfTest;
import org.apache.ignite.internal.IgniteConcurrentEntryProcessorAccessStopTest;
import org.apache.ignite.internal.IgniteConnectionConcurrentReserveAndRemoveTest;
import org.apache.ignite.internal.IgniteUpdateNotifierPerClusterSettingSelfTest;
import org.apache.ignite.internal.managers.GridManagerStopSelfTest;
import org.apache.ignite.internal.managers.communication.GridCommunicationSendMessageSelfTest;
import org.apache.ignite.internal.managers.deployment.GridDeploymentManagerStopSelfTest;
import org.apache.ignite.internal.managers.discovery.GridDiscoveryManagerAliveCacheSelfTest;
import org.apache.ignite.internal.managers.discovery.GridDiscoveryManagerAttributesSelfTest;
import org.apache.ignite.internal.managers.discovery.IgniteTopologyPrintFormatSelfTest;
import org.apache.ignite.internal.managers.events.GridEventStorageManagerSelfTest;
import org.apache.ignite.internal.processors.cluster.GridAddressResolverSelfTest;
import org.apache.ignite.internal.processors.cluster.GridUpdateNotifierSelfTest;
import org.apache.ignite.internal.processors.port.GridPortProcessorSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceClientNodeTest;
import org.apache.ignite.internal.processors.service.GridServiceContinuousQueryRedeployTest;
import org.apache.ignite.internal.processors.service.GridServiceDeploymentCompoundFutureSelfTest;
import org.apache.ignite.internal.processors.service.GridServicePackagePrivateSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorBatchDeploySelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorMultiNodeConfigSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorMultiNodeSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorProxySelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorSingleNodeSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProcessorStopSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProxyClientReconnectSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceProxyNodeStopSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceReassignmentSelfTest;
import org.apache.ignite.internal.processors.service.GridServiceSerializationSelfTest;
import org.apache.ignite.internal.processors.service.IgniteServiceDeployment2ClassLoadersDefaultMarshallerTest;
import org.apache.ignite.internal.processors.service.IgniteServiceDeployment2ClassLoadersJdkMarshallerTest;
import org.apache.ignite.internal.processors.service.IgniteServiceDeploymentClassLoadingDefaultMarshallerTest;
import org.apache.ignite.internal.processors.service.IgniteServiceDeploymentClassLoadingJdkMarshallerTest;
import org.apache.ignite.internal.processors.service.IgniteServiceDynamicCachesSelfTest;
import org.apache.ignite.internal.processors.service.IgniteServiceProxyTimeoutInitializedTest;
import org.apache.ignite.internal.processors.service.IgniteServiceReassignmentTest;
import org.apache.ignite.internal.processors.service.ServiceDeploymentOnActivationTest;
import org.apache.ignite.internal.processors.service.ServiceDeploymentOutsideBaselineTest;
import org.apache.ignite.internal.processors.service.ServicePredicateAccessCacheTest;
import org.apache.ignite.internal.util.GridStartupWithUndefinedIgniteHomeSelfTest;
import org.apache.ignite.services.ServiceThreadPoolSelfTest;
import org.apache.ignite.spi.communication.GridCacheMessageSelfTest;
import org.apache.ignite.testframework.GridTestUtils;
/**
* Kernal self test suite.
*/
public class IgniteKernalSelfTestSuite extends TestSuite {
/**
* @return Kernal test suite.
* @throws Exception If failed.
*/
public static TestSuite suite() throws Exception {
return suite(null);
}
/**
* @param ignoredTests Tests don't include in the execution.
* @return Test suite.
* @throws Exception Thrown in case of the failure.
*/
public static TestSuite suite(Set<Class> ignoredTests) throws Exception {
TestSuite suite = new TestSuite("Ignite Kernal Test Suite");
suite.addTestSuite(GridSameVmStartupSelfTest.class);
suite.addTestSuite(GridSpiExceptionSelfTest.class);
suite.addTestSuite(GridRuntimeExceptionSelfTest.class);
suite.addTestSuite(GridFailedInputParametersSelfTest.class);
suite.addTestSuite(GridNodeFilterSelfTest.class);
suite.addTestSuite(GridNodeVisorAttributesSelfTest.class);
suite.addTestSuite(GridDiscoverySelfTest.class);
suite.addTestSuite(GridCommunicationSelfTest.class);
suite.addTestSuite(GridEventStorageManagerSelfTest.class);
suite.addTestSuite(GridCommunicationSendMessageSelfTest.class);
suite.addTestSuite(GridCacheMessageSelfTest.class);
suite.addTestSuite(GridDeploymentManagerStopSelfTest.class);
suite.addTestSuite(GridManagerStopSelfTest.class);
suite.addTestSuite(GridDiscoveryManagerAttributesSelfTest.RegularDiscovery.class);
suite.addTestSuite(GridDiscoveryManagerAttributesSelfTest.ClientDiscovery.class);
suite.addTestSuite(GridDiscoveryManagerAliveCacheSelfTest.class);
suite.addTestSuite(GridDiscoveryEventSelfTest.class);
suite.addTestSuite(GridPortProcessorSelfTest.class);
suite.addTestSuite(GridHomePathSelfTest.class);
suite.addTestSuite(GridStartupWithUndefinedIgniteHomeSelfTest.class);
GridTestUtils.addTestIfNeeded(suite, GridVersionSelfTest.class, ignoredTests);
suite.addTestSuite(GridListenActorSelfTest.class);
suite.addTestSuite(GridNodeLocalSelfTest.class);
suite.addTestSuite(GridKernalConcurrentAccessStopSelfTest.class);
suite.addTestSuite(IgniteConcurrentEntryProcessorAccessStopTest.class);
suite.addTestSuite(GridUpdateNotifierSelfTest.class);
suite.addTestSuite(GridAddressResolverSelfTest.class);
suite.addTestSuite(IgniteUpdateNotifierPerClusterSettingSelfTest.class);
suite.addTestSuite(GridLocalEventListenerSelfTest.class);
suite.addTestSuite(IgniteTopologyPrintFormatSelfTest.class);
suite.addTestSuite(ComputeJobCancelWithServiceSelfTest.class);
suite.addTestSuite(IgniteConnectionConcurrentReserveAndRemoveTest.class);
// Managed Services.
suite.addTestSuite(GridServiceProcessorSingleNodeSelfTest.class);
suite.addTestSuite(GridServiceProcessorMultiNodeSelfTest.class);
suite.addTestSuite(GridServiceProcessorMultiNodeConfigSelfTest.class);
suite.addTestSuite(GridServiceProcessorProxySelfTest.class);
suite.addTestSuite(GridServiceReassignmentSelfTest.class);
suite.addTestSuite(GridServiceClientNodeTest.class);
suite.addTestSuite(GridServiceProcessorStopSelfTest.class);
suite.addTestSuite(ServicePredicateAccessCacheTest.class);
suite.addTestSuite(GridServicePackagePrivateSelfTest.class);
suite.addTestSuite(GridServiceSerializationSelfTest.class);
suite.addTestSuite(GridServiceProxyNodeStopSelfTest.class);
suite.addTestSuite(GridServiceProxyClientReconnectSelfTest.class);
suite.addTestSuite(IgniteServiceReassignmentTest.class);
suite.addTestSuite(IgniteServiceProxyTimeoutInitializedTest.class);
suite.addTestSuite(IgniteServiceDynamicCachesSelfTest.class);
suite.addTestSuite(GridServiceContinuousQueryRedeployTest.class);
suite.addTestSuite(ServiceThreadPoolSelfTest.class);
suite.addTestSuite(GridServiceProcessorBatchDeploySelfTest.class);
suite.addTestSuite(GridServiceDeploymentCompoundFutureSelfTest.class);
suite.addTestSuite(ServiceDeploymentOnActivationTest.class);
suite.addTestSuite(ServiceDeploymentOutsideBaselineTest.class);
suite.addTestSuite(IgniteServiceDeploymentClassLoadingDefaultMarshallerTest.class);
suite.addTestSuite(IgniteServiceDeploymentClassLoadingJdkMarshallerTest.class);
suite.addTestSuite(IgniteServiceDeployment2ClassLoadersDefaultMarshallerTest.class);
suite.addTestSuite(IgniteServiceDeployment2ClassLoadersJdkMarshallerTest.class);
return suite;
}
}
| apache-2.0 |
tbroyer/gwt-maven-archetypes | modular-webapp/src/test/resources/projects/basic-webapp/reference/basic-webapp-shared/src/main/java/it/pkg/FieldVerifier.java | 1492 | package it.pkg;
/**
* <p>
* FieldVerifier validates that the name the user enters is valid.
* </p>
* <p>
* This class is in the <code>shared</code> project because we use it in both
* the client code and on the server. On the client, we verify that the name is
* valid before sending an RPC request so the user doesn't have to wait for a
* network round trip to get feedback. On the server, we verify that the name is
* correct to ensure that the input is correct regardless of where the RPC
* originates.
* </p>
* <p>
* When creating a class that is used on both the client and the server, be sure
* that all code is translatable and does not use native JavaScript. Code that
* is not translatable (such as code that interacts with a database or the file
* system) cannot be compiled into client side JavaScript. Code that uses native
* JavaScript (such as Widgets) cannot be run on the server.
* </p>
*/
public class FieldVerifier {
/**
* Verifies that the specified name is valid for our service.
*
* In this example, we only require that the name is at least four
* characters. In your application, you can use more complex checks to ensure
* that usernames, passwords, email addresses, URLs, and other fields have the
* proper syntax.
*
* @param name the name to validate
* @return true if valid, false if invalid
*/
public static boolean isValidName(String name) {
if (name == null) {
return false;
}
return name.length() > 3;
}
}
| apache-2.0 |
apache/tomcat | java/org/apache/catalina/core/StandardThreadExecutor.java | 8970 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import java.util.concurrent.TimeUnit;
import org.apache.catalina.Executor;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleState;
import org.apache.catalina.util.LifecycleMBeanBase;
import org.apache.tomcat.util.res.StringManager;
import org.apache.tomcat.util.threads.ResizableExecutor;
import org.apache.tomcat.util.threads.TaskQueue;
import org.apache.tomcat.util.threads.TaskThreadFactory;
import org.apache.tomcat.util.threads.ThreadPoolExecutor;
public class StandardThreadExecutor extends LifecycleMBeanBase
implements Executor, ResizableExecutor {
protected static final StringManager sm = StringManager.getManager(StandardThreadExecutor.class);
// ---------------------------------------------- Properties
/**
* Default thread priority
*/
protected int threadPriority = Thread.NORM_PRIORITY;
/**
* Run threads in daemon or non-daemon state
*/
protected boolean daemon = true;
/**
* Default name prefix for the thread name
*/
protected String namePrefix = "tomcat-exec-";
/**
* max number of threads
*/
protected int maxThreads = 200;
/**
* min number of threads
*/
protected int minSpareThreads = 25;
/**
* idle time in milliseconds
*/
protected int maxIdleTime = 60000;
/**
* The executor we use for this component
*/
protected ThreadPoolExecutor executor = null;
/**
* the name of this thread pool
*/
protected String name;
/**
* prestart threads?
*/
protected boolean prestartminSpareThreads = false;
/**
* The maximum number of elements that can queue up before we reject them
*/
protected int maxQueueSize = Integer.MAX_VALUE;
/**
* After a context is stopped, threads in the pool are renewed. To avoid
* renewing all threads at the same time, this delay is observed between 2
* threads being renewed.
*/
protected long threadRenewalDelay =
org.apache.tomcat.util.threads.Constants.DEFAULT_THREAD_RENEWAL_DELAY;
private TaskQueue taskqueue = null;
// ---------------------------------------------- Constructors
public StandardThreadExecutor() {
//empty constructor for the digester
}
// ---------------------------------------------- Public Methods
@Override
protected void initInternal() throws LifecycleException {
super.initInternal();
}
/**
* Start the component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#startInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected void startInternal() throws LifecycleException {
taskqueue = new TaskQueue(maxQueueSize);
TaskThreadFactory tf = new TaskThreadFactory(namePrefix,daemon,getThreadPriority());
executor = new ThreadPoolExecutor(getMinSpareThreads(), getMaxThreads(), maxIdleTime, TimeUnit.MILLISECONDS,taskqueue, tf);
executor.setThreadRenewalDelay(threadRenewalDelay);
if (prestartminSpareThreads) {
executor.prestartAllCoreThreads();
}
taskqueue.setParent(executor);
setState(LifecycleState.STARTING);
}
/**
* Stop the component and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that needs to be reported
*/
@Override
protected void stopInternal() throws LifecycleException {
setState(LifecycleState.STOPPING);
if (executor != null) {
executor.shutdownNow();
}
executor = null;
taskqueue = null;
}
@Override
protected void destroyInternal() throws LifecycleException {
super.destroyInternal();
}
@Override
public void execute(Runnable command) {
if (executor != null) {
// Note any RejectedExecutionException due to the use of TaskQueue
// will be handled by the o.a.t.u.threads.ThreadPoolExecutor
executor.execute(command);
} else {
throw new IllegalStateException(sm.getString("standardThreadExecutor.notStarted"));
}
}
public void contextStopping() {
if (executor != null) {
executor.contextStopping();
}
}
public int getThreadPriority() {
return threadPriority;
}
public boolean isDaemon() {
return daemon;
}
public String getNamePrefix() {
return namePrefix;
}
public int getMaxIdleTime() {
return maxIdleTime;
}
@Override
public int getMaxThreads() {
return maxThreads;
}
public int getMinSpareThreads() {
return minSpareThreads;
}
@Override
public String getName() {
return name;
}
public boolean isPrestartminSpareThreads() {
return prestartminSpareThreads;
}
public void setThreadPriority(int threadPriority) {
this.threadPriority = threadPriority;
}
public void setDaemon(boolean daemon) {
this.daemon = daemon;
}
public void setNamePrefix(String namePrefix) {
this.namePrefix = namePrefix;
}
public void setMaxIdleTime(int maxIdleTime) {
this.maxIdleTime = maxIdleTime;
if (executor != null) {
executor.setKeepAliveTime(maxIdleTime, TimeUnit.MILLISECONDS);
}
}
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
if (executor != null) {
executor.setMaximumPoolSize(maxThreads);
}
}
public void setMinSpareThreads(int minSpareThreads) {
this.minSpareThreads = minSpareThreads;
if (executor != null) {
executor.setCorePoolSize(minSpareThreads);
}
}
public void setPrestartminSpareThreads(boolean prestartminSpareThreads) {
this.prestartminSpareThreads = prestartminSpareThreads;
}
public void setName(String name) {
this.name = name;
}
public void setMaxQueueSize(int size) {
this.maxQueueSize = size;
}
public int getMaxQueueSize() {
return maxQueueSize;
}
public long getThreadRenewalDelay() {
return threadRenewalDelay;
}
public void setThreadRenewalDelay(long threadRenewalDelay) {
this.threadRenewalDelay = threadRenewalDelay;
if (executor != null) {
executor.setThreadRenewalDelay(threadRenewalDelay);
}
}
// Statistics from the thread pool
@Override
public int getActiveCount() {
return (executor != null) ? executor.getActiveCount() : 0;
}
public long getCompletedTaskCount() {
return (executor != null) ? executor.getCompletedTaskCount() : 0;
}
public int getCorePoolSize() {
return (executor != null) ? executor.getCorePoolSize() : 0;
}
public int getLargestPoolSize() {
return (executor != null) ? executor.getLargestPoolSize() : 0;
}
@Override
public int getPoolSize() {
return (executor != null) ? executor.getPoolSize() : 0;
}
public int getQueueSize() {
return (executor != null) ? executor.getQueue().size() : -1;
}
@Override
public boolean resizePool(int corePoolSize, int maximumPoolSize) {
if (executor == null) {
return false;
}
executor.setCorePoolSize(corePoolSize);
executor.setMaximumPoolSize(maximumPoolSize);
return true;
}
@Override
public boolean resizeQueue(int capacity) {
return false;
}
@Override
protected String getDomainInternal() {
// No way to navigate to Engine. Needs to have domain set.
return null;
}
@Override
protected String getObjectNameKeyProperties() {
return "type=Executor,name=" + getName();
}
}
| apache-2.0 |
vimukthi-git/OpenGTS_2.4.9 | src/org/opengts/war/report/ReportException.java | 1492 | // ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2007/03/11 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.war.report;
import java.util.*;
import java.io.*;
public class ReportException
extends Exception
{
// ------------------------------------------------------------------------
public ReportException(String msg, Throwable cause)
{
super(msg, cause);
}
public ReportException(String msg)
{
super(msg);
}
// ------------------------------------------------------------------------
}
| apache-2.0 |
apache/incubator-shardingsphere | shardingsphere-features/shardingsphere-db-discovery/shardingsphere-db-discovery-core/src/main/java/org/apache/shardingsphere/dbdiscovery/constant/DatabaseDiscoveryOrder.java | 1286 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.dbdiscovery.constant;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
/**
* Database discovery order.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class DatabaseDiscoveryOrder {
/**
* Database discovery order.
*/
public static final int ORDER = 30;
/**
* Algorithm provider database discovery order.
*/
public static final int ALGORITHM_PROVIDER_ORDER = ORDER + 1;
}
| apache-2.0 |
alibaba/jstorm | jstorm-core/src/main/java/storm/trident/topology/ITridentBatchBolt.java | 1362 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package storm.trident.topology;
import backtype.storm.coordination.BatchOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IComponent;
import backtype.storm.tuple.Tuple;
import java.util.Map;
public interface ITridentBatchBolt extends IComponent {
void prepare(Map conf, TopologyContext context, BatchOutputCollector collector);
void execute(BatchInfo batchInfo, Tuple tuple);
void finishBatch(BatchInfo batchInfo);
Object initBatchState(String batchGroup, Object batchId);
void cleanup();
}
| apache-2.0 |
xloye/tddl5 | tddl-executor/src/main/java/com/taobao/tddl/executor/function/scalar/datatime/Timestamp.java | 2012 | package com.taobao.tddl.executor.function.scalar.datatime;
import java.util.Calendar;
import com.taobao.tddl.executor.common.ExecutionContext;
import com.taobao.tddl.executor.function.ScalarFunction;
import com.taobao.tddl.executor.utils.ExecUtils;
import com.taobao.tddl.optimizer.core.datatype.DataType;
/**
* With a single argument, this function returns the date or datetime expression
* expr as a datetime value. With two arguments, it adds the time expression
* expr2 to the date or datetime expression expr1 and returns the result as a
* datetime value.
*
* <pre>
* mysql> SELECT TIMESTAMP('2003-12-31');
* -> '2003-12-31 00:00:00'
* mysql> SELECT TIMESTAMP('2003-12-31 12:00:00','12:00:00');
* -> '2004-01-01 00:00:00'
* </pre>
*
* @author jianghang 2014-4-16 下午11:16:06
* @since 5.0.7
*/
public class Timestamp extends ScalarFunction {
@SuppressWarnings("deprecation")
@Override
public Object compute(Object[] args, ExecutionContext ec) {
for (Object arg : args) {
if (ExecUtils.isNull(arg)) {
return null;
}
}
java.sql.Timestamp timestamp = DataType.TimestampType.convertFrom(args[0]);
if (args.length >= 2) {
Calendar cal = Calendar.getInstance();
cal.setTime(timestamp);
java.sql.Time time = DataType.TimeType.convertFrom(args[1]);
cal.add(Calendar.HOUR_OF_DAY, time.getHours());
cal.add(Calendar.MINUTE, time.getMinutes());
cal.add(Calendar.SECOND, time.getSeconds());
DataType type = getReturnType();
return type.convertFrom(cal.getTime());
} else {
return timestamp;
}
}
@Override
public DataType getReturnType() {
return DataType.TimestampType;
}
@Override
public String[] getFunctionNames() {
return new String[] { "TIMESTAMP" };
}
}
| apache-2.0 |
janzoner/cw-omnibus | Introspection/SAWMonitorTile/app/src/main/java/com/commonsware/android/sawmonitor/MainActivity.java | 1338 | /***
Copyright (c) 2016 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
From _The Busy Coder's Guide to Android Development_
https://commonsware.com/Android
*/
package com.commonsware.android.sawmonitor;
import android.app.Activity;
import android.app.NotificationManager;
import android.content.Context;
import android.os.Bundle;
public class MainActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
NotificationManager mgr=
(NotificationManager)getSystemService(NOTIFICATION_SERVICE);
mgr.cancel(PackageReceiver.NOTIFY_ID);
if (getFragmentManager().findFragmentById(android.R.id.content)==null) {
getFragmentManager().beginTransaction()
.add(android.R.id.content,
new SettingsFragment()).commit();
}
}
} | apache-2.0 |
riftsaw/riftsaw-ode | bpel-compiler/src/main/java/org/apache/ode/bpel/compiler/DefaultResourceFinder.java | 5182 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.compiler;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Basic implementation of the {@link ResourceFinder} interface. Resolves
* URIs relative to a base URI specified at the time of construction.
*
* @author Maciej Szefler - m s z e f l e r @ g m a i l . c o m
*
*/
public class DefaultResourceFinder implements ResourceFinder {
private static final Log __log = LogFactory.getLog(DefaultResourceFinder.class);
private File _relativeDir;
private File _absoluteDir;
/**
* Default constructor: resolve relative URIs against current working directory.
*/
public DefaultResourceFinder() {
_absoluteDir = new File("");
_relativeDir = _absoluteDir;
}
/**
* Constructor: resolve relative URIs against specified directory.
* @param relativeDir base path for relative URLs.
* @param absoluteDir base path for absolute URLs.
*/
public DefaultResourceFinder(File relativeDir, File absoluteDir) {
checkDir("relativeDir", relativeDir);
checkDir("absoluteDir", absoluteDir);
_relativeDir = relativeDir;
_absoluteDir = absoluteDir;
}
private void checkDir(String arg, File dir) {
if (dir == null) {
throw new IllegalArgumentException("Argument '"+arg+"' is null");
}
if (!dir.exists()) {
throw new IllegalArgumentException("Directory does not exist: " + dir);
}
}
public InputStream openResource(URI uri) throws MalformedURLException, IOException {
uri = relativize(uri);
InputStream r = openFileResource(uri);
if (r != null) {
return r;
}
if (__log.isDebugEnabled()) {
__log.debug("trying classpath resource for " + uri);
}
r = Thread.currentThread().getContextClassLoader().getResourceAsStream(uri.getPath());
if (r != null) {
return r;
} else {
if (__log.isDebugEnabled()) {
__log.debug("classpath resource not found " + uri);
}
return null;
}
}
private InputStream openFileResource(URI uri) throws MalformedURLException, IOException {
URI absolute = _absoluteDir.toURI();
if (__log.isDebugEnabled()) {
__log.debug("openResource: uri="+uri+" relativeDir="+_relativeDir+" absoluteDir="+_absoluteDir);
}
if (uri.isAbsolute() && uri.getScheme().equals("file")) {
try {
return uri.toURL().openStream();
} catch (Exception except) {
__log.debug("openResource: unable to open file URL " + uri + "; " + except.toString());
return null;
}
}
// Note that if we get an absolute URI, the relativize operation will simply
// return the absolute URI.
URI relative = _relativeDir.toURI().relativize(uri);
if (relative.isAbsolute() && !(relative.getScheme().equals("urn"))) {
__log.fatal("openResource: invalid scheme (should be urn:) " + uri);
return null;
}
File f = new File(absolute.getPath(), relative.getPath());
if (f.exists()) {
return new FileInputStream(f);
} else {
if (__log.isDebugEnabled()) {
__log.debug("fileNotFound: " + f);
}
return null;
}
}
public URI getBaseResourceURI() {
return _absoluteDir.toURI();
}
private URI relativize(URI u) {
if (u.isAbsolute()) {
return _absoluteDir.toURI().relativize(u);
} else return u;
}
public URI resolve(URI parent, URI child) {
parent = relativize(parent);
child = relativize(child);
URI result = parent.resolve(child);
URI result2 = _absoluteDir.toURI().resolve(result);
if (__log.isDebugEnabled()) {
__log.debug("resolving URI: parent " + parent + " child " + child + " result " + result + " resultAbsolute:" + result2);
}
return result2;
}
}
| apache-2.0 |
jenkinsci/commons-fileupload | src/test/java/org/apache/commons/fileupload/MultipartStreamTest.java | 2849 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.fileupload;
import static org.junit.Assert.assertNotNull;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.junit.Test;
/**
* Unit tests {@link org.apache.commons.fileupload.MultipartStream}.
*
* @version $Id$
*/
public class MultipartStreamTest {
static private final String BOUNDARY_TEXT = "myboundary";
@Test
public void testThreeParamConstructor() throws Exception {
final String strData = "foobar";
final byte[] contents = strData.getBytes();
InputStream input = new ByteArrayInputStream(contents);
byte[] boundary = BOUNDARY_TEXT.getBytes();
int iBufSize =
boundary.length + MultipartStream.BOUNDARY_PREFIX.length + 1;
MultipartStream ms = new MultipartStream(
input,
boundary,
iBufSize,
new MultipartStream.ProgressNotifier(null, contents.length));
assertNotNull(ms);
}
@SuppressWarnings("unused")
@Test(expected=IllegalArgumentException.class)
public void testSmallBuffer() throws Exception {
final String strData = "foobar";
final byte[] contents = strData.getBytes();
InputStream input = new ByteArrayInputStream(contents);
byte[] boundary = BOUNDARY_TEXT.getBytes();
int iBufSize = 1;
new MultipartStream(
input,
boundary,
iBufSize,
new MultipartStream.ProgressNotifier(null, contents.length));
}
@Test
public void testTwoParamConstructor() throws Exception {
final String strData = "foobar";
final byte[] contents = strData.getBytes();
InputStream input = new ByteArrayInputStream(contents);
byte[] boundary = BOUNDARY_TEXT.getBytes();
MultipartStream ms = new MultipartStream(
input,
boundary,
new MultipartStream.ProgressNotifier(null, contents.length));
assertNotNull(ms);
}
}
| apache-2.0 |
IllusionRom-deprecated/android_platform_tools_idea | java/java-impl/src/com/intellij/codeInsight/completion/methodChains/completion/lookup/WeightableChainLookupElement.java | 816 | package com.intellij.codeInsight.completion.methodChains.completion.lookup;
import com.intellij.codeInsight.completion.methodChains.search.ChainRelevance;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementDecorator;
import com.intellij.codeInsight.lookup.LookupElementPresentation;
import org.jetbrains.annotations.NotNull;
/**
* @author Dmitry Batkovich
*/
public final class WeightableChainLookupElement extends LookupElementDecorator<LookupElement> {
private final ChainRelevance myChainRelevance;
public WeightableChainLookupElement(final @NotNull LookupElement delegate, final ChainRelevance relevance) {
super(delegate);
myChainRelevance = relevance;
}
public ChainRelevance getChainRelevance() {
return myChainRelevance;
}
}
| apache-2.0 |
tanhaichao/leopard-data | leopard-memdb/src/test/java/io/leopard/data4j/memdb/rsync/QueueListenerTest.java | 1179 | package io.leopard.data4j.memdb.rsync;
import io.leopard.data4j.memdb.MemdbRsyncQueue;
import io.leopard.data4j.memdb.QueueBean;
import io.leopard.data4j.memdb.QueueListener;
import io.leopard.data4j.memdb.SerializeImpl;
import org.junit.Test;
import org.mockito.Mockito;
public class QueueListenerTest {
MemdbRsyncQueue memdbRsyncQueue = Mockito.mock(MemdbRsyncQueue.class);
private final QueueListener listener = new QueueListener(memdbRsyncQueue, "sender");
@Test
public void onSubscribe() {
listener.onSubscribe("channel", 1);
}
@Test
public void onMessage() {
QueueBean bean = new QueueBean();
bean.setKey("key");
this.listener.onMessage("channel", SerializeImpl.getInstance().serialize(bean));
bean.setSender("sender");
this.listener.onMessage("channel", SerializeImpl.getInstance().serialize(bean));
}
@Test
public void onUnsubscribe() {
listener.onUnsubscribe("channel", 1);
}
@Test
public void onPSubscribe() {
listener.onPSubscribe("pattern", 1);
}
@Test
public void onPUnsubscribe() {
listener.onPUnsubscribe("pattern", 1);
}
@Test
public void onPMessage() {
listener.onPMessage("pattern", "channel", "message");
}
} | apache-2.0 |
ground-context/ground | modules/common/app/edu/berkeley/ground/common/dao/core/NodeDao.java | 1238 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.ground.common.dao.core;
import edu.berkeley.ground.common.dao.version.ItemDao;
import edu.berkeley.ground.common.exception.GroundException;
import edu.berkeley.ground.common.model.core.Node;
import java.util.List;
import java.util.Map;
public interface NodeDao extends ItemDao<Node> {
@Override
default Class<Node> getType() {
return Node.class;
}
@Override
Node retrieveFromDatabase(String sourceKey) throws GroundException;
@Override
Node retrieveFromDatabase(long id) throws GroundException;
List<Long> getLeaves(String sourceKey) throws GroundException;
Map<Long, Long> getHistory(String sourceKye) throws GroundException;
}
| apache-2.0 |
dCache/jglobus-1.8 | src/org/globus/ftp/dc/GridFTPDataChannel.java | 1499 | /*
* Copyright 1999-2006 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.globus.ftp.dc;
import org.globus.ftp.GridFTPSession;
import org.globus.ftp.Session;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/*
GridFTPDataChannel, unlike SimpleDataChannel, does not own the associated socket and does not destroy it when the transfer completes. It is the facade who is responsible for socket lifetime management. This approach allows for data channel reuse.
**/
public class GridFTPDataChannel extends SimpleDataChannel {
private static Log logger =
LogFactory.getLog(GridFTPDataChannel.class.getName());
// utility alias to session
protected GridFTPSession gSession;
public GridFTPDataChannel(Session session, SocketBox socketBox) {
super(session, socketBox);
gSession = (GridFTPSession) session;
transferThreadFactory = new GridFTPTransferThreadFactory();
}
// todo: reimplement close()?
}
| apache-2.0 |
studanshu/datacollector | container/src/test/java/com/streamsets/datacollector/el/TestMathEL.java | 4301 | /**
* Copyright 2016 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.el;
import com.streamsets.pipeline.lib.el.MathEL;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
*/
public class TestMathEL {
ELEvaluator eval;
ELVariables variables;
@Before
public void setUpELs() {
eval = new ELEvaluator("test", MathEL.class);
variables = new ELVariables();
}
@Test
public void testStringCasting() throws Exception {
Assert.assertEquals(1.0, eval.eval(variables, "${math:abs(\"1.0\")}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:abs(\"-1\")}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(1, \"-1\")}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(\"-1.0\", 1.0)}", Double.class), 0.1);
}
@Test
public void testAbs() throws Exception {
Assert.assertEquals(1.0, eval.eval(variables, "${math:abs(1)}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:abs(-1)}", Double.class), 0.1);
Assert.assertEquals(1.1, eval.eval(variables, "${math:abs(1.1)}", Double.class), 0.1);
Assert.assertEquals(1.1, eval.eval(variables, "${math:abs(-1.1)}", Double.class), 0.1);
}
@Test
public void testCeil() throws Exception {
Assert.assertEquals(1.0, eval.eval(variables, "${math:ceil(0.999)}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:ceil(0.0009)}", Double.class), 0.1);
Assert.assertEquals(0.0, eval.eval(variables, "${math:ceil(-0.999)}", Double.class), 0.1);
Assert.assertEquals(0.0, eval.eval(variables, "${math:ceil(-0.0009)}", Double.class), 0.1);
}
@Test
public void testFloor() throws Exception {
Assert.assertEquals(0.0, eval.eval(variables, "${math:floor(0.999)}", Double.class), 0.1);
Assert.assertEquals(0.0, eval.eval(variables, "${math:floor(0.0009)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:floor(-0.999)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:floor(-0.0009)}", Double.class), 0.1);
}
@Test
public void testMax() throws Exception {
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(1, -1)}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(-1, 1)}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(1.0, -1.0)}", Double.class), 0.1);
Assert.assertEquals(1.0, eval.eval(variables, "${math:max(-1.0, 1.0)}", Double.class), 0.1);
}
@Test
public void testMin() throws Exception {
Assert.assertEquals(-1.0, eval.eval(variables, "${math:min(1, -1)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:min(-1, 1)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:min(1.0, -1.0)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:min(-1.0, 1.0)}", Double.class), 0.1);
}
@Test
public void testRound() throws Exception {
Assert.assertEquals(1.0, eval.eval(variables, "${math:round(0.999)}", Double.class), 0.1);
Assert.assertEquals(0.0, eval.eval(variables, "${math:round(0.0009)}", Double.class), 0.1);
Assert.assertEquals(-1.0, eval.eval(variables, "${math:round(-0.999)}", Double.class), 0.1);
Assert.assertEquals(0.0, eval.eval(variables, "${math:round(-0.0009)}", Double.class), 0.1);
}
}
| apache-2.0 |
linkedin/ambry | ambry-store/src/main/java/com/github/ambry/store/DiskManager.java | 23944 | /*
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.github.ambry.account.AccountService;
import com.github.ambry.clustermap.DiskId;
import com.github.ambry.clustermap.PartitionId;
import com.github.ambry.clustermap.ReplicaId;
import com.github.ambry.clustermap.ReplicaStatusDelegate;
import com.github.ambry.config.DiskManagerConfig;
import com.github.ambry.config.StoreConfig;
import com.github.ambry.utils.Throttler;
import com.github.ambry.utils.Time;
import com.github.ambry.utils.Utils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.github.ambry.store.StorageManager.*;
/**
* Manages all the stores on a disk.
*/
class DiskManager {
private final ConcurrentHashMap<PartitionId, BlobStore> stores = new ConcurrentHashMap<>();
private final ConcurrentHashMap<PartitionId, ReplicaId> partitionToReplicaMap = new ConcurrentHashMap<>();
private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
private final DiskId disk;
private final StorageManagerMetrics metrics;
private final Time time;
private final DiskIOScheduler diskIOScheduler;
private final ScheduledExecutorService longLivedTaskScheduler;
private final DiskSpaceAllocator diskSpaceAllocator;
private final CompactionManager compactionManager;
private final Set<String> stoppedReplicas;
private final List<ReplicaStatusDelegate> replicaStatusDelegates;
private final Set<String> expectedDirs = new HashSet<>();
private final StoreConfig storeConfig;
private final ScheduledExecutorService scheduler;
private final StoreMetrics storeMainMetrics;
private final StoreMetrics storeUnderCompactionMetrics;
private final StoreKeyFactory keyFactory;
private final MessageStoreRecovery recovery;
private final MessageStoreHardDelete hardDelete;
private final List<String> unexpectedDirs = new ArrayList<>();
private final AccountService accountService;
private boolean running = false;
private static final Logger logger = LoggerFactory.getLogger(DiskManager.class);
/**
* Constructs a {@link DiskManager}
* @param disk representation of the disk.
* @param replicas all the replicas on this disk.
* @param storeConfig the settings for store configuration.
* @param diskManagerConfig the settings for disk manager configuration.
* @param scheduler the {@link ScheduledExecutorService} for executing background tasks.
* @param metrics the {@link StorageManagerMetrics} instance to use.
* @param storeMainMetrics the {@link StoreMetrics} object used for store-related metrics.
* @param storeUnderCompactionMetrics the {@link StoreMetrics} object used by stores created for compaction.
* @param keyFactory the {@link StoreKeyFactory} for parsing store keys.
* @param recovery the {@link MessageStoreRecovery} instance to use.
* @param hardDelete the {@link MessageStoreHardDelete} instance to use.
* @param replicaStatusDelegates a list of {@link ReplicaStatusDelegate} representing replica status agent for each
* cluster current node has participated in.
* @param stoppedReplicas a set of replicas that have been stopped (which should be skipped during startup).
* @param time the {@link Time} instance to use.
* @param accountService the {@link AccountService} instance to use.
*/
DiskManager(DiskId disk, List<ReplicaId> replicas, StoreConfig storeConfig, DiskManagerConfig diskManagerConfig,
ScheduledExecutorService scheduler, StorageManagerMetrics metrics, StoreMetrics storeMainMetrics,
StoreMetrics storeUnderCompactionMetrics, StoreKeyFactory keyFactory, MessageStoreRecovery recovery,
MessageStoreHardDelete hardDelete, List<ReplicaStatusDelegate> replicaStatusDelegates,
Set<String> stoppedReplicas, Time time, AccountService accountService) {
this.disk = disk;
this.storeConfig = storeConfig;
this.scheduler = scheduler;
this.metrics = metrics;
this.storeMainMetrics = storeMainMetrics;
this.storeUnderCompactionMetrics = storeUnderCompactionMetrics;
this.keyFactory = keyFactory;
this.recovery = recovery;
this.hardDelete = hardDelete;
this.accountService = accountService;
this.time = time;
diskIOScheduler = new DiskIOScheduler(getThrottlers(storeConfig, time));
longLivedTaskScheduler = Utils.newScheduler(1, true);
File reserveFileDir = new File(disk.getMountPath(), diskManagerConfig.diskManagerReserveFileDirName);
diskSpaceAllocator = new DiskSpaceAllocator(diskManagerConfig.diskManagerEnableSegmentPooling, reserveFileDir,
diskManagerConfig.diskManagerRequiredSwapSegmentsPerSize, metrics);
this.replicaStatusDelegates = replicaStatusDelegates;
this.stoppedReplicas = stoppedReplicas;
expectedDirs.add(reserveFileDir.getAbsolutePath());
for (ReplicaId replica : replicas) {
if (disk.equals(replica.getDiskId())) {
DiskMetrics diskMetrics = new DiskMetrics(storeMainMetrics.getRegistry(), disk.getMountPath(),
storeConfig.storeDiskIoReservoirTimeWindowMs);
BlobStore store =
new BlobStore(replica, storeConfig, scheduler, longLivedTaskScheduler, diskIOScheduler, diskSpaceAllocator,
storeMainMetrics, storeUnderCompactionMetrics, keyFactory, recovery, hardDelete, replicaStatusDelegates,
time, accountService, diskMetrics);
stores.put(replica.getPartitionId(), store);
partitionToReplicaMap.put(replica.getPartitionId(), replica);
expectedDirs.add(replica.getReplicaPath());
}
}
compactionManager = new CompactionManager(disk.getMountPath(), storeConfig, stores.values(), metrics, time);
}
/**
* Starts all the stores on this disk.
* @throws InterruptedException
*/
void start() throws InterruptedException {
long startTimeMs = time.milliseconds();
final AtomicInteger numStoreFailures = new AtomicInteger(0);
rwLock.readLock().lock();
try {
checkMountPathAccessible();
List<Thread> startupThreads = new ArrayList<>();
for (final Map.Entry<PartitionId, BlobStore> partitionAndStore : stores.entrySet()) {
if (stoppedReplicas.contains(partitionAndStore.getKey().toPathString())) {
logger.info("Skip the store {} because it is on the stopped list", partitionAndStore.getKey());
continue;
}
Thread thread = Utils.newThread("store-startup-" + partitionAndStore.getKey(), () -> {
try {
partitionAndStore.getValue().start();
} catch (Exception e) {
numStoreFailures.incrementAndGet();
logger.error("Exception while starting store for the {}", partitionAndStore.getKey(), e);
}
}, false);
thread.start();
startupThreads.add(thread);
}
for (Thread startupThread : startupThreads) {
startupThread.join();
}
if (numStoreFailures.get() > 0) {
logger.error("Could not start {} out of {} stores on the disk {}", numStoreFailures.get(), stores.size(), disk);
}
// DiskSpaceAllocator startup. This happens after BlobStore startup because it needs disk space requirements
// from each store.
List<DiskSpaceRequirements> requirementsList = new ArrayList<>();
for (BlobStore blobStore : stores.values()) {
if (blobStore.isStarted()) {
DiskSpaceRequirements requirements = blobStore.getDiskSpaceRequirements();
if (requirements != null) {
requirementsList.add(requirements);
}
}
}
diskSpaceAllocator.initializePool(requirementsList);
compactionManager.enable();
reportUnrecognizedDirs();
running = true;
} catch (StoreException e) {
logger.error("Error while starting the DiskManager for {} ; no stores will be accessible on this disk.",
disk.getMountPath(), e);
} finally {
if (!running) {
metrics.totalStoreStartFailures.inc(stores.size());
metrics.diskDownCount.inc();
} else {
metrics.totalStoreStartFailures.inc(numStoreFailures.get());
}
metrics.diskStartTimeMs.update(time.milliseconds() - startTimeMs);
rwLock.readLock().unlock();
}
}
/**
* Shuts down all the stores on this disk.
* @throws InterruptedException
*/
void shutdown() throws InterruptedException {
long startTimeMs = time.milliseconds();
rwLock.readLock().lock();
try {
running = false;
compactionManager.disable();
diskIOScheduler.disable();
final AtomicInteger numFailures = new AtomicInteger(0);
List<Thread> shutdownThreads = new ArrayList<>();
for (final Map.Entry<PartitionId, BlobStore> partitionAndStore : stores.entrySet()) {
if (!partitionAndStore.getValue().isStarted()) {
continue;
}
Thread thread = Utils.newThread("store-shutdown-" + partitionAndStore.getKey(), () -> {
try {
partitionAndStore.getValue().shutdown();
} catch (Exception e) {
numFailures.incrementAndGet();
metrics.totalStoreShutdownFailures.inc();
logger.error("Exception while shutting down store {} on disk {}", partitionAndStore.getKey(), disk, e);
}
}, false);
thread.start();
shutdownThreads.add(thread);
}
for (Thread shutdownThread : shutdownThreads) {
shutdownThread.join();
}
if (numFailures.get() > 0) {
logger.error("Could not shutdown {} out of {} stores on the disk {}", numFailures.get(), stores.size(), disk);
}
compactionManager.awaitTermination();
longLivedTaskScheduler.shutdown();
if (!longLivedTaskScheduler.awaitTermination(30, TimeUnit.SECONDS)) {
logger.error("Could not terminate long live tasks after DiskManager shutdown");
}
} finally {
rwLock.readLock().unlock();
metrics.diskShutdownTimeMs.update(time.milliseconds() - startTimeMs);
}
}
/**
* @param id the {@link PartitionId} to find the store for.
* @param skipStateCheck whether to skip checking state of store.
* @return the associated {@link Store}, or {@code null} if the partition is not on this disk, or the store is not
* started.
*/
Store getStore(PartitionId id, boolean skipStateCheck) {
BlobStore storeToReturn;
rwLock.readLock().lock();
try {
BlobStore store = stores.get(id);
storeToReturn = (running && store != null && (store.isStarted() || skipStateCheck)) ? store : null;
} finally {
rwLock.readLock().unlock();
}
return storeToReturn;
}
/**
* @return {@code true} if the compaction thread is running. {@code false} otherwise.
*/
boolean isCompactionExecutorRunning() {
return compactionManager.isCompactionExecutorRunning();
}
/**
* @return the {@link DiskId} that is managed by this {@link DiskManager}.
*/
DiskId getDisk() {
return disk;
}
/**
* Schedules the {@link PartitionId} {@code id} for compaction next.
* @param id the {@link PartitionId} of the {@link BlobStore} to compact.
* @return {@code true} if the scheduling was successful. {@code false} if not.
*/
boolean scheduleNextForCompaction(PartitionId id) {
BlobStore store = (BlobStore) getStore(id, false);
return store != null && compactionManager.scheduleNextForCompaction(store);
}
/**
* Enable or disable compaction on the {@link PartitionId} {@code id}.
* @param id the {@link PartitionId} of the {@link BlobStore} on which compaction is disabled or enabled.
* @param enabled whether to enable ({@code true}) or disable.
* @return {@code true} if disabling was successful. {@code false} if not.
*/
boolean controlCompactionForBlobStore(PartitionId id, boolean enabled) {
rwLock.readLock().lock();
boolean succeed = false;
try {
BlobStore store = stores.get(id);
if (store != null) {
compactionManager.controlCompactionForBlobStore(store, enabled);
succeed = true;
}
} finally {
rwLock.readLock().unlock();
}
return succeed;
}
/**
* Add a new BlobStore with given {@link ReplicaId}.
* @param replica the {@link ReplicaId} of the {@link Store} which would be added.
* @return {@code true} if adding store was successful. {@code false} if not.
*/
boolean addBlobStore(ReplicaId replica) {
rwLock.writeLock().lock();
boolean succeed = false;
try {
if (!running) {
logger.error("Failed to add {} because disk manager is not running", replica.getPartitionId());
} else {
// Clean up existing dir associated with this replica to add. Here we re-create a new store because we don't
// know the state of files in old directory. (The old directory was created last time when adding this replica
// but failed at some point before updating InstanceConfig)
File storeDir = new File(replica.getReplicaPath());
if (storeDir.exists()) {
logger.info("Deleting previous store directory associated with {}", replica);
try {
Utils.deleteFileOrDirectory(storeDir);
} catch (Exception e) {
throw new IOException("Couldn't delete store directory " + replica.getReplicaPath(), e);
}
logger.info("Old store directory is deleted for {}", replica);
}
BlobStore store =
new BlobStore(replica, storeConfig, scheduler, longLivedTaskScheduler, diskIOScheduler, diskSpaceAllocator,
storeMainMetrics, storeUnderCompactionMetrics, keyFactory, recovery, hardDelete, replicaStatusDelegates,
time, accountService, null);
store.start();
// collect store segment requirements and add into DiskSpaceAllocator
List<DiskSpaceRequirements> storeRequirements = Collections.singletonList(store.getDiskSpaceRequirements());
diskSpaceAllocator.addRequiredSegments(diskSpaceAllocator.getOverallRequirements(storeRequirements), false);
// add store into CompactionManager
compactionManager.addBlobStore(store);
// add new created store into in-memory data structures.
stores.put(replica.getPartitionId(), store);
partitionToReplicaMap.put(replica.getPartitionId(), replica);
// create a bootstrap-in-progress file to distinguish it from regular stores (the file will be checked during
// BOOTSTRAP -> STANDBY transition)
createBootstrapFileIfAbsent(replica);
logger.info("New store is successfully added into DiskManager.");
succeed = true;
}
} catch (Exception e) {
logger.error("Failed to start new added store {} or add requirements to disk allocator",
replica.getPartitionId());
} finally {
rwLock.writeLock().unlock();
}
return succeed;
}
/**
* Start the BlobStore with given {@link PartitionId} {@code id}.
* @param id the {@link PartitionId} of the {@link BlobStore} which should be started.
* @return {@code true} if start store was successful. {@code false} if not.
*/
boolean startBlobStore(PartitionId id) {
rwLock.readLock().lock();
boolean succeed = false;
try {
BlobStore store = stores.get(id);
if (store == null || !running) {
logger.error("Failed to start store because {} is not found or DiskManager is not running.", id);
} else if (store.isStarted()) {
succeed = true;
} else {
store.start();
succeed = true;
}
} catch (Exception e) {
logger.error("Exception while starting store {} on disk {}", id, disk, e);
} finally {
rwLock.readLock().unlock();
}
return succeed;
}
/**
* Shutdown the BlobStore with given {@link PartitionId} {@code id}.
* @param id the {@link PartitionId} of the {@link BlobStore} which should be shutdown.
* @return {@code true} if shutdown store was successful. {@code false} if not.
*/
boolean shutdownBlobStore(PartitionId id) {
rwLock.readLock().lock();
boolean succeed = false;
try {
BlobStore store = stores.get(id);
if (store == null || !running) {
logger.error("Failed to shut down store because {} is not found or DiskManager is not running", id);
} else if (!store.isStarted()) {
succeed = true;
} else {
store.shutdown();
succeed = true;
}
} catch (Exception e) {
logger.error("Exception while shutting down store {} on disk {}", id, disk, e);
} finally {
rwLock.readLock().unlock();
}
return succeed;
}
/**
* Given partition id, remove the corresponding blob store in disk manager
* @param id the {@link PartitionId} of the {@link BlobStore} which should be removed.
* @return {@code true} if store removal was successful. {@code false} if not.
*/
boolean removeBlobStore(PartitionId id) {
rwLock.writeLock().lock();
boolean succeed = false;
try {
BlobStore store = stores.get(id);
if (store == null) {
logger.error("Store {} is not found in disk manager", id);
} else if (!running || store.isStarted()) {
logger.error("Removing store {} failed. Disk running = {}, store running = {}", id, running, store.isStarted());
} else if (!compactionManager.removeBlobStore(store)) {
logger.error("Fail to remove store {} from compaction manager.", id);
} else {
stores.remove(id);
stoppedReplicas.remove(id.toPathString());
partitionToReplicaMap.remove(id);
logger.info("Store {} is successfully removed from disk manager", id);
succeed = true;
}
} finally {
rwLock.writeLock().unlock();
}
return succeed;
}
/**
* Set the BlobStore stopped state with given {@link PartitionId} {@code id}.
* @param partitionIds a list of {@link PartitionId} of the {@link BlobStore} whose stopped state should be set.
* @param markStop whether to mark BlobStore as stopped ({@code true}) or started.
* @return a list of {@link PartitionId} whose stopped state fails to be updated.
*/
List<PartitionId> setBlobStoreStoppedState(List<PartitionId> partitionIds, boolean markStop) {
Set<PartitionId> failToUpdateStores = new HashSet<>();
List<ReplicaId> replicasToUpdate = new ArrayList<>();
rwLock.readLock().lock();
try {
for (PartitionId id : partitionIds) {
BlobStore store = stores.get(id);
if (store == null) {
// no need to check if the store is started because this method could be called after store is successfully shutdown.
logger.error("store is not found on this disk when trying to update stoppedReplicas list");
failToUpdateStores.add(id);
} else {
replicasToUpdate.add(partitionToReplicaMap.get(id));
}
}
} finally {
rwLock.readLock().unlock();
}
boolean updated = true;
if (replicaStatusDelegates != null && !replicaStatusDelegates.isEmpty()) {
logger.trace("Setting replica stopped state via ReplicaStatusDelegate on replica {}",
Arrays.toString(replicasToUpdate.toArray()));
for (ReplicaStatusDelegate replicaStatusDelegate : replicaStatusDelegates) {
updated &= markStop ? replicaStatusDelegate.markStopped(replicasToUpdate)
: replicaStatusDelegate.unmarkStopped(replicasToUpdate);
}
} else {
logger.warn("The ReplicaStatusDelegate is not instantiated");
updated = false;
}
if (!updated) {
// either mark/unmark operation fails or ReplicaStatusDelegate is not instantiated.
failToUpdateStores.addAll(partitionIds);
}
return new ArrayList<>(failToUpdateStores);
}
/**
* Gets all the throttlers that the {@link DiskIOScheduler} will be constructed with.
* @param config the {@link StoreConfig} with configuration values.
* @param time the {@link Time} instance to use in the throttlers
* @return the throttlers that the {@link DiskIOScheduler} will be constructed with.
*/
private Map<String, Throttler> getThrottlers(StoreConfig config, Time time) {
Map<String, Throttler> throttlers = new HashMap<>();
// compaction ops
Throttler compactionOpsThrottler =
new Throttler(config.storeCompactionOperationsBytesPerSec, config.storeCompactionThrottlerCheckIntervalMs, true,
time);
throttlers.put(BlobStoreCompactor.COMPACTION_CLEANUP_JOB_NAME, compactionOpsThrottler);
// hard delete ops
Throttler hardDeleteOpsThrottler = new Throttler(config.storeHardDeleteOperationsBytesPerSec, -1, true, time);
throttlers.put(HardDeleter.HARD_DELETE_CLEANUP_JOB_NAME, hardDeleteOpsThrottler);
// stats
Throttler statsIndexScanThrottler = new Throttler(config.storeStatsIndexEntriesPerSecond, 1000, true, time);
throttlers.put(BlobStoreStats.IO_SCHEDULER_JOB_TYPE, statsIndexScanThrottler);
return throttlers;
}
/**
* @throws StoreException if the disk's mount path is inaccessible.
*/
private void checkMountPathAccessible() throws StoreException {
File mountPath = new File(disk.getMountPath());
if (!mountPath.exists()) {
metrics.diskMountPathFailures.inc();
throw new StoreException("Mount path does not exist: " + mountPath + " ; cannot start stores on this disk",
StoreErrorCodes.Initialization_Error);
}
}
/**
* Check if all stores on this disk are down.
* @return {@code true} if all stores are down. {@code false} at least one store is up.
*/
boolean areAllStoresDown() {
rwLock.readLock().lock();
boolean storesAllDown = true;
try {
for (BlobStore store : stores.values()) {
if (store.isStarted()) {
storesAllDown = false;
break;
}
}
} finally {
rwLock.readLock().unlock();
}
return storesAllDown;
}
/**
* @return unexpected directories on this disk.
*/
List<String> getUnexpectedDirs() {
return unexpectedDirs;
}
/**
* Reports any unrecognized directories on disk
* store dir and return swap segment to reserve pool if needed.
*/
private void reportUnrecognizedDirs() {
File[] dirs = new File(disk.getMountPath()).listFiles(File::isDirectory);
if (dirs == null) {
metrics.diskMountPathFailures.inc();
logger.warn("Could not list the directories in {}", disk.getMountPath());
} else {
for (File dir : dirs) {
String absPath = dir.getAbsolutePath();
if (!expectedDirs.contains(absPath)) {
unexpectedDirs.add(absPath);
}
}
if (unexpectedDirs.size() > 0) {
logger.warn("Encountered unexpected dirs in {} : {}", disk.getMountPath(), unexpectedDirs);
metrics.unexpectedDirsOnDisk.inc(unexpectedDirs.size());
}
}
}
}
| apache-2.0 |
icloudkit/example | net.cloudkit.enterprises/src/main/java/net/cloudkit/experiment/infrastructure/support/messaging/Waiter.java | 109 | package net.cloudkit.experiment.infrastructure.support.messaging;
/**
* Waiter
*/
public class Waiter {
}
| apache-2.0 |
janstey/fuse | tooling/tooling-activemq-facade/src/main/java/org/fusesource/fabric/activemq/facade/TopicViewFacade.java | 1092 | /*
* Copyright 2010 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.fusesource.fabric.activemq.facade;
import org.apache.activemq.broker.jmx.QueueViewMBean;
import org.apache.activemq.broker.jmx.TopicViewMBean;
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
public interface TopicViewFacade extends TopicViewMBean {
/**
* @return a unique id for this resource, typically a JMX ObjectName
* @throws Exception
*/
String getId() throws Exception;
}
| apache-2.0 |
google/binnavi | src/main/java/com/google/security/zynamics/binnavi/Gui/Debug/RemoteBrowser/FileBrowser/CRemoteFile.java | 3239 | // Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Gui.Debug.RemoteBrowser.FileBrowser;
import java.io.File;
import java.io.FileFilter;
import java.io.FilenameFilter;
/**
* Simulates remote files when displayed in the file selection dialog.
*/
public final class CRemoteFile extends File {
/**
* Used for serialization.
*/
private static final long serialVersionUID = -1913040405965661147L;
/**
* Path to the file on the remote system.
*/
private final String m_pathname;
/**
* Flag that indicates whether the remote file is a directory.
*/
private final boolean m_isDirectory;
/**
* Creates a new remote file object.
*
* @param pathname Path to the file on the remote system.
* @param isDirectory Flag that indicates whether the remote file is a directory.
*/
public CRemoteFile(final String pathname, final boolean isDirectory) {
super(pathname);
m_pathname = pathname;
m_isDirectory = isDirectory;
}
@Override
public boolean canExecute() {
throw new IllegalStateException("IE01127: Not yet implemented");
}
@Override
public boolean canRead() // NO_UCD
{
throw new IllegalStateException("IE01128: Not yet implemented");
}
@Override
public boolean canWrite() // NO_UCD
{
return false; // Disables the option to rename files from the dialog
}
@Override
public boolean exists() {
return true;
}
@Override
public File getAbsoluteFile() {
return new CRemoteFile(m_pathname, m_isDirectory);
}
@Override
public String getAbsolutePath() {
return m_pathname;
}
@Override
public File getCanonicalFile() {
return new CRemoteFile(m_pathname, m_isDirectory);
}
@Override
public File getParentFile() {
final String parent = this.getParent();
if (parent == null) {
return null;
}
return new CRemoteFile(parent, true);
}
@Override
public String getPath() {
return m_pathname;
}
@Override
public boolean isDirectory() {
return m_isDirectory;
}
@Override
public long lastModified() // NO_UCD
{
return 0;
}
@Override
public long length() {
return 0;
}
@Override
public File[] listFiles() {
throw new IllegalStateException("IE01129: Not yet implemented");
}
@Override
public File[] listFiles(final FileFilter filter) {
throw new IllegalStateException("IE01130: Not yet implemented");
}
@Override
public File[] listFiles(final FilenameFilter filter) {
throw new IllegalStateException("IE01131: Not yet implemented");
}
@Override
public boolean renameTo(final File dest) // NO_UCD
{
return false;
}
}
| apache-2.0 |
raidenovski/webapp | src/main/java/ai/elimu/dao/jpa/ContributorDaoJpa.java | 2174 | package ai.elimu.dao.jpa;
import java.util.Calendar;
import java.util.List;
import javax.persistence.NoResultException;
import ai.elimu.dao.ContributorDao;
import org.springframework.dao.DataAccessException;
import ai.elimu.model.Contributor;
public class ContributorDaoJpa extends GenericDaoJpa<Contributor> implements ContributorDao {
@Override
public Contributor read(String email) throws DataAccessException {
try {
return (Contributor) em.createQuery(
"SELECT c " +
"FROM Contributor c " +
"WHERE c.email = :email")
.setParameter("email", email)
.getSingleResult();
} catch (NoResultException e) {
logger.warn("Contributor with e-mail \"" + email + "\" was not found");
return null;
}
}
@Override
public Contributor readByProviderIdGitHub(String id) throws DataAccessException {
try {
return (Contributor) em.createQuery(
"SELECT c " +
"FROM Contributor c " +
"WHERE c.providerIdGitHub = :id")
.setParameter("id", id)
.getSingleResult();
} catch (NoResultException e) {
logger.warn("Contributor with GitHub id \"" + id + "\" was not found");
return null;
}
}
@Override
public List<Contributor> readAllOrderedDesc() throws DataAccessException {
return em.createQuery(
"SELECT c " +
"FROM Contributor c " +
"ORDER BY c.registrationTime DESC")
.getResultList();
}
@Override
public List<Contributor> readAll(Calendar calendarFrom, Calendar calendarTo) throws DataAccessException {
return em.createQuery(
"SELECT c " +
"FROM Contributor c " +
"WHERE c.registrationTime >= :calendarFrom " +
"AND c.registrationTime < :calendarTo " +
"ORDER BY c.registrationTime DESC")
.setParameter("calendarFrom", calendarFrom)
.setParameter("calendarTo", calendarTo)
.getResultList();
}
}
| apache-2.0 |
retomerz/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByBlockCommentHandler.java | 28836 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.generation;
import com.intellij.codeInsight.CodeInsightUtilBase;
import com.intellij.codeInsight.CommentUtil;
import com.intellij.codeInsight.actions.MultiCaretCodeInsightActionHandler;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.highlighter.custom.CustomFileTypeLexer;
import com.intellij.lang.Commenter;
import com.intellij.lang.CustomUncommenter;
import com.intellij.lang.Language;
import com.intellij.lang.LanguageCommenters;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.impl.AbstractFileType;
import com.intellij.openapi.fileTypes.impl.CustomSyntaxTableFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Couple;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.Indent;
import com.intellij.psi.templateLanguages.MultipleLangCommentProvider;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.util.containers.IntArrayList;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
public class CommentByBlockCommentHandler extends MultiCaretCodeInsightActionHandler {
private Project myProject;
private Editor myEditor;
private Caret myCaret;
private @NotNull PsiFile myFile;
private Document myDocument;
private CommenterDataHolder mySelfManagedCommenterData;
@Override
public void invoke(@NotNull Project project, @NotNull Editor editor, @NotNull Caret caret, @NotNull PsiFile file) {
if (!CodeInsightUtilBase.prepareEditorForWrite(editor)) return;
myProject = project;
myEditor = editor;
myCaret = caret;
myFile = file;
myDocument = editor.getDocument();
if (!FileDocumentManager.getInstance().requestWriting(myDocument, project)) {
return;
}
FeatureUsageTracker.getInstance().triggerFeatureUsed("codeassists.comment.block");
final Commenter commenter = findCommenter(myFile, myEditor, caret);
if (commenter == null) return;
final String prefix;
final String suffix;
if (commenter instanceof SelfManagingCommenter) {
final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter;
mySelfManagedCommenterData = selfManagingCommenter.createBlockCommentingState(
caret.getSelectionStart(),
caret.getSelectionEnd(),
myDocument,
myFile
);
if (mySelfManagedCommenterData == null) {
mySelfManagedCommenterData = SelfManagingCommenter.EMPTY_STATE;
}
prefix = selfManagingCommenter.getBlockCommentPrefix(
caret.getSelectionStart(),
myDocument,
mySelfManagedCommenterData
);
suffix = selfManagingCommenter.getBlockCommentSuffix(
caret.getSelectionEnd(),
myDocument,
mySelfManagedCommenterData
);
}
else {
prefix = commenter.getBlockCommentPrefix();
suffix = commenter.getBlockCommentSuffix();
}
if (prefix == null || suffix == null) return;
TextRange commentedRange = findCommentedRange(commenter);
if (commentedRange != null) {
final int commentStart = commentedRange.getStartOffset();
final int commentEnd = commentedRange.getEndOffset();
int selectionStart = commentStart;
int selectionEnd = commentEnd;
if (myCaret.hasSelection()) {
selectionStart = myCaret.getSelectionStart();
selectionEnd = myCaret.getSelectionEnd();
}
if ((commentStart < selectionStart || commentStart >= selectionEnd) && (commentEnd <= selectionStart || commentEnd > selectionEnd)) {
commentRange(selectionStart, selectionEnd, prefix, suffix, commenter);
}
else {
uncommentRange(commentedRange, trim(prefix), trim(suffix), commenter);
}
}
else {
if (myCaret.hasSelection()) {
int selectionStart = myCaret.getSelectionStart();
int selectionEnd = myCaret.getSelectionEnd();
if (commenter instanceof IndentedCommenter) {
final Boolean value = ((IndentedCommenter)commenter).forceIndentedLineComment();
if (value != null && value == Boolean.TRUE) {
selectionStart = myDocument.getLineStartOffset(myDocument.getLineNumber(selectionStart));
selectionEnd = myDocument.getLineEndOffset(myDocument.getLineNumber(selectionEnd));
}
}
commentRange(selectionStart, selectionEnd, prefix, suffix, commenter);
}
else {
EditorUtil.fillVirtualSpaceUntilCaret(editor);
int caretOffset = myCaret.getOffset();
if (commenter instanceof IndentedCommenter) {
final Boolean value = ((IndentedCommenter)commenter).forceIndentedLineComment();
if (value != null && value == Boolean.TRUE) {
final int lineNumber = myDocument.getLineNumber(caretOffset);
final int start = myDocument.getLineStartOffset(lineNumber);
final int end = myDocument.getLineEndOffset(lineNumber);
commentRange(start, end, prefix, suffix, commenter);
return;
}
}
myDocument.insertString(caretOffset, prefix + suffix);
myCaret.moveToOffset(caretOffset + prefix.length());
}
}
}
@Nullable
private static String trim(String s) {
return s == null ? null : s.trim();
}
private boolean testSelectionForNonComments() {
if (!myCaret.hasSelection()) {
return true;
}
TextRange range
= new TextRange(myCaret.getSelectionStart(), myCaret.getSelectionEnd() - 1);
for (PsiElement element = myFile.findElementAt(range.getStartOffset()); element != null && range.intersects(element.getTextRange());
element = element.getNextSibling()) {
if (element instanceof OuterLanguageElement) {
if (!isInjectedWhiteSpace(range, (OuterLanguageElement)element)) {
return false;
}
}
else {
if (!isWhiteSpaceOrComment(element, range)) {
return false;
}
}
}
return true;
}
private boolean isInjectedWhiteSpace(@NotNull TextRange range, @NotNull OuterLanguageElement element) {
PsiElement psi = element.getContainingFile().getViewProvider().getPsi(element.getLanguage());
if (psi == null) {
return false;
}
List<PsiElement> injectedElements = PsiTreeUtil.getInjectedElements(element);
for (PsiElement el : injectedElements) {
if (!isWhiteSpaceOrComment(el, range)) {
return false;
}
}
return true;
}
private boolean isWhiteSpaceOrComment(@NotNull PsiElement element, @NotNull TextRange range) {
final TextRange textRange = element.getTextRange();
TextRange intersection = range.intersection(textRange);
if (intersection == null) {
return false;
}
intersection = TextRange.create(Math.max(intersection.getStartOffset() - textRange.getStartOffset(), 0),
Math.min(intersection.getEndOffset() - textRange.getStartOffset(), textRange.getLength()));
return isWhiteSpaceOrComment(element) ||
intersection.substring(element.getText()).trim().length() == 0;
}
private static boolean isWhiteSpaceOrComment(PsiElement element) {
return element instanceof PsiWhiteSpace ||
PsiTreeUtil.getParentOfType(element, PsiComment.class, false) != null;
}
@Nullable
private TextRange findCommentedRange(final Commenter commenter) {
final CharSequence text = myDocument.getCharsSequence();
final FileType fileType = myFile.getFileType();
if (fileType instanceof CustomSyntaxTableFileType) {
Lexer lexer = new CustomFileTypeLexer(((CustomSyntaxTableFileType)fileType).getSyntaxTable());
final int caretOffset = myCaret.getOffset();
int commentStart = CharArrayUtil.lastIndexOf(text, commenter.getBlockCommentPrefix(), caretOffset);
if (commentStart == -1) return null;
lexer.start(text, commentStart, text.length());
if (lexer.getTokenType() == CustomHighlighterTokenType.MULTI_LINE_COMMENT && lexer.getTokenEnd() >= caretOffset) {
return new TextRange(commentStart, lexer.getTokenEnd());
}
return null;
}
final String prefix;
final String suffix;
// Custom uncommenter is able to find commented block inside of selected text
final String selectedText = myCaret.getSelectedText();
if ((commenter instanceof CustomUncommenter) && selectedText != null) {
final TextRange commentedRange = ((CustomUncommenter)commenter).findMaximumCommentedRange(selectedText);
if (commentedRange == null) {
return null;
}
// Uncommenter returns range relative to text start, so we need to shift it to make abosolute.
return commentedRange.shiftRight(myCaret.getSelectionStart());
}
if (commenter instanceof SelfManagingCommenter) {
SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter;
prefix = selfManagingCommenter.getBlockCommentPrefix(
myCaret.getSelectionStart(),
myDocument,
mySelfManagedCommenterData
);
suffix = selfManagingCommenter.getBlockCommentSuffix(
myCaret.getSelectionEnd(),
myDocument,
mySelfManagedCommenterData
);
}
else {
prefix = trim(commenter.getBlockCommentPrefix());
suffix = trim(commenter.getBlockCommentSuffix());
}
if (prefix == null || suffix == null) return null;
TextRange commentedRange;
if (commenter instanceof SelfManagingCommenter) {
commentedRange = ((SelfManagingCommenter)commenter).getBlockCommentRange(
myCaret.getSelectionStart(),
myCaret.getSelectionEnd(),
myDocument,
mySelfManagedCommenterData
);
}
else {
if (!testSelectionForNonComments()) {
return null;
}
commentedRange = getSelectedComments(text, prefix, suffix);
}
if (commentedRange == null) {
PsiElement comment = findCommentAtCaret();
if (comment != null) {
String commentText = comment.getText();
if (commentText.startsWith(prefix) && commentText.endsWith(suffix)) {
commentedRange = comment.getTextRange();
}
}
}
return commentedRange;
}
@Nullable
private TextRange getSelectedComments(CharSequence text, String prefix, String suffix) {
TextRange commentedRange = null;
if (myCaret.hasSelection()) {
int selectionStart = myCaret.getSelectionStart();
selectionStart = CharArrayUtil.shiftForward(text, selectionStart, " \t\n");
int selectionEnd = myCaret.getSelectionEnd() - 1;
selectionEnd = CharArrayUtil.shiftBackward(text, selectionEnd, " \t\n") + 1;
if (selectionEnd - selectionStart >= prefix.length() + suffix.length() &&
CharArrayUtil.regionMatches(text, selectionStart, prefix) &&
CharArrayUtil.regionMatches(text, selectionEnd - suffix.length(), suffix)) {
commentedRange = new TextRange(selectionStart, selectionEnd);
}
}
return commentedRange;
}
@Nullable
private static Commenter findCommenter(PsiFile file, Editor editor, Caret caret) {
final FileType fileType = file.getFileType();
if (fileType instanceof AbstractFileType) {
return ((AbstractFileType)fileType).getCommenter();
}
Language lang = PsiUtilBase.getLanguageInEditor(caret, file.getProject());
return getCommenter(file, editor, lang, lang);
}
@Nullable
public static Commenter getCommenter(final PsiFile file, final Editor editor,
final Language lineStartLanguage, final Language lineEndLanguage) {
final FileViewProvider viewProvider = file.getViewProvider();
for (MultipleLangCommentProvider provider : MultipleLangCommentProvider.EP_NAME.getExtensions()) {
if (provider.canProcess(file, viewProvider)) {
return provider.getLineCommenter(file, editor, lineStartLanguage, lineEndLanguage);
}
}
final Language fileLanguage = file.getLanguage();
Language lang = lineStartLanguage == null || LanguageCommenters.INSTANCE.forLanguage(lineStartLanguage) == null ||
fileLanguage.getBaseLanguage() == lineStartLanguage // file language is a more specific dialect of the line language
? fileLanguage
: lineStartLanguage;
if (viewProvider instanceof TemplateLanguageFileViewProvider &&
lang == ((TemplateLanguageFileViewProvider)viewProvider).getTemplateDataLanguage()) {
lang = viewProvider.getBaseLanguage();
}
return LanguageCommenters.INSTANCE.forLanguage(lang);
}
@Nullable
private PsiElement findCommentAtCaret() {
int offset = myCaret.getOffset();
TextRange range = new TextRange(myCaret.getSelectionStart(), myCaret.getSelectionEnd());
if (offset == range.getEndOffset()) {
offset--;
}
if (offset <= range.getStartOffset()) {
offset++;
}
PsiElement elt = myFile.getViewProvider().findElementAt(offset);
if (elt == null) return null;
PsiElement comment = PsiTreeUtil.getParentOfType(elt, PsiComment.class, false);
if (comment == null || myCaret.hasSelection() && !range.contains(comment.getTextRange())) {
return null;
}
return comment;
}
public void commentRange(int startOffset, int endOffset, String commentPrefix, String commentSuffix, Commenter commenter) {
final CharSequence chars = myDocument.getCharsSequence();
LogicalPosition caretPosition = myCaret.getLogicalPosition();
if (startOffset == 0 || chars.charAt(startOffset - 1) == '\n') {
if (endOffset == myDocument.getTextLength() || endOffset > 0 && chars.charAt(endOffset - 1) == '\n') {
CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myProject);
CommonCodeStyleSettings settings = CodeStyleSettingsManager.getSettings(myProject).getCommonSettings(myFile.getLanguage());
String space;
if (!settings.BLOCK_COMMENT_AT_FIRST_COLUMN) {
final FileType fileType = myFile.getFileType();
int line1 = myEditor.offsetToLogicalPosition(startOffset).line;
int line2 = myEditor.offsetToLogicalPosition(endOffset - 1).line;
Indent minIndent = CommentUtil.getMinLineIndent(myProject, myDocument, line1, line2, fileType);
if (minIndent == null) {
minIndent = codeStyleManager.zeroIndent();
}
space = codeStyleManager.fillIndent(minIndent, fileType);
}
else {
space = "";
}
final StringBuilder nestingPrefix = new StringBuilder(space).append(commentPrefix);
if (!commentPrefix.endsWith("\n")) {
nestingPrefix.append("\n");
}
final StringBuilder nestingSuffix = new StringBuilder(space);
nestingSuffix.append(commentSuffix.startsWith("\n") ? commentSuffix.substring(1) : commentSuffix);
nestingSuffix.append("\n");
TextRange range =
insertNestedComments(startOffset, endOffset, nestingPrefix.toString(), nestingSuffix.toString(), commenter);
myCaret.setSelection(range.getStartOffset(), range.getEndOffset());
LogicalPosition pos = new LogicalPosition(caretPosition.line + 1, caretPosition.column);
myCaret.moveToLogicalPosition(pos);
return;
}
}
TextRange range = insertNestedComments(startOffset, endOffset, commentPrefix, commentSuffix, commenter);
myCaret.setSelection(range.getStartOffset(), range.getEndOffset());
LogicalPosition pos = new LogicalPosition(caretPosition.line, caretPosition.column + commentPrefix.length());
myCaret.moveToLogicalPosition(pos);
}
private int doBoundCommentingAndGetShift(int offset,
String commented,
int skipLength,
String toInsert,
boolean skipBrace,
TextRange selection) {
if (commented == null && (offset == selection.getStartOffset() || offset + (skipBrace ? skipLength : 0) == selection.getEndOffset())) {
return 0;
}
if (commented == null) {
myDocument.insertString(offset + (skipBrace ? skipLength : 0), toInsert);
return toInsert.length();
}
else {
myDocument.replaceString(offset, offset + skipLength, commented);
return commented.length() - skipLength;
}
}
private TextRange insertNestedComments(int startOffset,
int endOffset,
String commentPrefix,
String commentSuffix,
Commenter commenter) {
if (commenter instanceof SelfManagingCommenter) {
final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter;
return selfManagingCommenter.insertBlockComment(
startOffset,
endOffset,
myDocument,
mySelfManagedCommenterData
);
}
String normalizedPrefix = commentPrefix.trim();
String normalizedSuffix = commentSuffix.trim();
IntArrayList nestedCommentPrefixes = new IntArrayList();
IntArrayList nestedCommentSuffixes = new IntArrayList();
String commentedPrefix = commenter.getCommentedBlockCommentPrefix();
String commentedSuffix = commenter.getCommentedBlockCommentSuffix();
CharSequence chars = myDocument.getCharsSequence();
for (int i = startOffset; i < endOffset; ++i) {
if (CharArrayUtil.regionMatches(chars, i, normalizedPrefix)) {
nestedCommentPrefixes.add(i);
}
else {
if (CharArrayUtil.regionMatches(chars, i, normalizedSuffix)) {
nestedCommentSuffixes.add(i);
}
}
}
int shift = 0;
if (!(commentedSuffix == null &&
!nestedCommentSuffixes.isEmpty() &&
nestedCommentSuffixes.get(nestedCommentSuffixes.size() - 1) + commentSuffix.length() == endOffset)) {
myDocument.insertString(endOffset, commentSuffix);
shift += commentSuffix.length();
}
// process nested comments in back order
int i = nestedCommentPrefixes.size() - 1;
int j = nestedCommentSuffixes.size() - 1;
final TextRange selection = new TextRange(startOffset, endOffset);
while (i >= 0 && j >= 0) {
final int prefixIndex = nestedCommentPrefixes.get(i);
final int suffixIndex = nestedCommentSuffixes.get(j);
if (prefixIndex > suffixIndex) {
shift += doBoundCommentingAndGetShift(prefixIndex, commentedPrefix, normalizedPrefix.length(), commentSuffix, false, selection);
--i;
}
else {
//if (insertPos < myDocument.getTextLength() && Character.isWhitespace(myDocument.getCharsSequence().charAt(insertPos))) {
// insertPos = suffixIndex + commentSuffix.length();
//}
shift += doBoundCommentingAndGetShift(suffixIndex, commentedSuffix, normalizedSuffix.length(), commentPrefix, true, selection);
--j;
}
}
while (i >= 0) {
final int prefixIndex = nestedCommentPrefixes.get(i);
shift += doBoundCommentingAndGetShift(prefixIndex, commentedPrefix, normalizedPrefix.length(), commentSuffix, false, selection);
--i;
}
while (j >= 0) {
final int suffixIndex = nestedCommentSuffixes.get(j);
shift += doBoundCommentingAndGetShift(suffixIndex, commentedSuffix, normalizedSuffix.length(), commentPrefix, true, selection);
--j;
}
if (!(commentedPrefix == null && !nestedCommentPrefixes.isEmpty() && nestedCommentPrefixes.get(0) == startOffset)) {
myDocument.insertString(startOffset, commentPrefix);
shift += commentPrefix.length();
}
RangeMarker marker = myDocument.createRangeMarker(startOffset, endOffset + shift);
try {
return processDocument(myDocument, marker, commenter, true);
}
finally {
marker.dispose();
}
}
static TextRange processDocument(Document document, RangeMarker marker, Commenter commenter, boolean escape) {
if (commenter instanceof EscapingCommenter) {
if (escape) {
((EscapingCommenter)commenter).escape(document, marker);
}
else {
((EscapingCommenter)commenter).unescape(document, marker);
}
}
return TextRange.create(marker.getStartOffset(), marker.getEndOffset());
}
private static int getNearest(String text, String pattern, int position) {
int result = text.indexOf(pattern, position);
return result == -1 ? text.length() : result;
}
static void commentNestedComments(@NotNull Document document, TextRange range, Commenter commenter) {
final int offset = range.getStartOffset();
final IntArrayList toReplaceWithComments = new IntArrayList();
final IntArrayList prefixes = new IntArrayList();
final String text = document.getCharsSequence().subSequence(range.getStartOffset(), range.getEndOffset()).toString();
final String commentedPrefix = commenter.getCommentedBlockCommentPrefix();
final String commentedSuffix = commenter.getCommentedBlockCommentSuffix();
final String commentPrefix = commenter.getBlockCommentPrefix();
final String commentSuffix = commenter.getBlockCommentSuffix();
int nearestSuffix = getNearest(text, commentedSuffix, 0);
int nearestPrefix = getNearest(text, commentedPrefix, 0);
int level = 0;
int lastSuffix = -1;
for (int i = Math.min(nearestPrefix, nearestSuffix); i < text.length(); i = Math.min(nearestPrefix, nearestSuffix)) {
if (i > nearestPrefix) {
nearestPrefix = getNearest(text, commentedPrefix, i);
continue;
}
if (i > nearestSuffix) {
nearestSuffix = getNearest(text, commentedSuffix, i);
continue;
}
if (i == nearestPrefix) {
if (level <= 0) {
if (lastSuffix != -1) {
toReplaceWithComments.add(lastSuffix);
}
level = 1;
lastSuffix = -1;
toReplaceWithComments.add(i);
prefixes.add(i);
}
else {
level++;
}
nearestPrefix = getNearest(text, commentedPrefix, nearestPrefix + 1);
}
else {
lastSuffix = i;
level--;
nearestSuffix = getNearest(text, commentedSuffix, nearestSuffix + 1);
}
}
if (lastSuffix != -1) {
toReplaceWithComments.add(lastSuffix);
}
int prefixIndex = prefixes.size() - 1;
for (int i = toReplaceWithComments.size() - 1; i >= 0; i--) {
int position = toReplaceWithComments.get(i);
if (prefixIndex >= 0 && position == prefixes.get(prefixIndex)) {
prefixIndex--;
document.replaceString(offset + position, offset + position + commentedPrefix.length(), commentPrefix);
}
else {
document.replaceString(offset + position, offset + position + commentedSuffix.length(), commentSuffix);
}
}
}
private TextRange expandRange(int delOffset1, int delOffset2) {
CharSequence chars = myDocument.getCharsSequence();
int offset1 = CharArrayUtil.shiftBackward(chars, delOffset1 - 1, " \t");
if (offset1 < 0 || chars.charAt(offset1) == '\n' || chars.charAt(offset1) == '\r') {
int offset2 = CharArrayUtil.shiftForward(chars, delOffset2, " \t");
if (offset2 == myDocument.getTextLength() || chars.charAt(offset2) == '\r' || chars.charAt(offset2) == '\n') {
delOffset1 = offset1 + 1;
if (offset2 < myDocument.getTextLength()) {
delOffset2 = offset2 + 1;
if (chars.charAt(offset2) == '\r' && offset2 + 1 < myDocument.getTextLength() && chars.charAt(offset2 + 1) == '\n') {
delOffset2++;
}
}
}
}
return new TextRange(delOffset1, delOffset2);
}
private Couple<TextRange> findCommentBlock(TextRange range, String commentPrefix, String commentSuffix) {
CharSequence chars = myDocument.getCharsSequence();
int startOffset = range.getStartOffset();
boolean endsProperly = CharArrayUtil.regionMatches(chars, range.getEndOffset() - commentSuffix.length(), commentSuffix);
TextRange start = expandRange(startOffset, startOffset + commentPrefix.length());
TextRange end;
if (endsProperly) {
end = expandRange(range.getEndOffset() - commentSuffix.length(), range.getEndOffset());
}
else {
end = new TextRange(range.getEndOffset(), range.getEndOffset());
}
return Couple.of(start, end);
}
public void uncommentRange(TextRange range, String commentPrefix, String commentSuffix, Commenter commenter) {
if (commenter instanceof SelfManagingCommenter) {
final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter;
selfManagingCommenter.uncommentBlockComment(
range.getStartOffset(),
range.getEndOffset(),
myDocument,
mySelfManagedCommenterData
);
return;
}
String text = myDocument.getCharsSequence().subSequence(range.getStartOffset(), range.getEndOffset()).toString();
int startOffset = range.getStartOffset();
//boolean endsProperly = CharArrayUtil.regionMatches(chars, range.getEndOffset() - commentSuffix.length(), commentSuffix);
List<Couple<TextRange>> ranges = new ArrayList<Couple<TextRange>>();
if (commenter instanceof CustomUncommenter) {
/*
In case of custom uncommenter, we need to ask it for list of [commentOpen-start,commentOpen-end], [commentClose-start,commentClose-end]
and shift if according to current offset
*/
CustomUncommenter customUncommenter = (CustomUncommenter)commenter;
for (Couple<TextRange> coupleFromCommenter : customUncommenter.getCommentRangesToDelete(text)) {
TextRange openComment = coupleFromCommenter.first.shiftRight(startOffset);
TextRange closeComment = coupleFromCommenter.second.shiftRight(startOffset);
ranges.add(Couple.of(openComment, closeComment));
}
}
else {
// If commenter is not custom, we need to get this list by our selves
int position = 0;
while (true) {
int start = getNearest(text, commentPrefix, position);
if (start == text.length()) {
break;
}
position = start;
int end = getNearest(text, commentSuffix, position + commentPrefix.length()) + commentSuffix.length();
position = end;
Couple<TextRange> pair =
findCommentBlock(new TextRange(start + startOffset, end + startOffset), commentPrefix, commentSuffix);
ranges.add(pair);
}
}
RangeMarker marker = myDocument.createRangeMarker(range);
try {
for (int i = ranges.size() - 1; i >= 0; i--) {
Couple<TextRange> toDelete = ranges.get(i);
myDocument.deleteString(toDelete.first.getStartOffset(), toDelete.first.getEndOffset());
int shift = toDelete.first.getEndOffset() - toDelete.first.getStartOffset();
myDocument.deleteString(toDelete.second.getStartOffset() - shift, toDelete.second.getEndOffset() - shift);
if (commenter.getCommentedBlockCommentPrefix() != null) {
commentNestedComments(myDocument, new TextRange(toDelete.first.getEndOffset() - shift, toDelete.second.getStartOffset() - shift),
commenter);
}
}
processDocument(myDocument, marker, commenter, false);
}
finally {
marker.dispose();
}
}
}
| apache-2.0 |
internetisalie/lua-for-idea | src/main/java/com/sylvanaar/idea/Lua/editor/inspections/unassignedVariable/UnassignedVariableAccessInspection.java | 4219 | /*
* Copyright 2011 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua.editor.inspections.unassignedVariable;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.codeInspection.ProblemsHolder;
import com.intellij.codeInspection.ex.*;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.PsiFile;
import com.sylvanaar.idea.Lua.editor.inspections.AbstractInspection;
import com.sylvanaar.idea.Lua.lang.psi.LuaControlFlowOwner;
import com.sylvanaar.idea.Lua.lang.psi.LuaPsiFile;
import com.sylvanaar.idea.Lua.lang.psi.LuaReferenceElement;
import com.sylvanaar.idea.Lua.lang.psi.controlFlow.ControlFlowUtil;
import com.sylvanaar.idea.Lua.lang.psi.controlFlow.Instruction;
import com.sylvanaar.idea.Lua.lang.psi.controlFlow.ReadWriteVariableInstruction;
import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaGlobal;
import com.sylvanaar.idea.Lua.lang.psi.visitor.LuaElementVisitor;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
public class UnassignedVariableAccessInspection extends AbstractInspection implements UnfairLocalInspectionTool {
@Nls
@NotNull
@Override
public String getDisplayName() {
return "Variable not assigned";
}
@Override
public String getStaticDescription() {
return "Variable is read from without being assigned to.";
}
@NotNull
@Override
public String getGroupDisplayName() {
return PROBABLE_BUGS;
}
@NotNull
@Override
public HighlightDisplayLevel getDefaultLevel() {
return HighlightDisplayLevel.WARNING;
}
@Override
public boolean isEnabledByDefault() {
return true;
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) {
return new LuaElementVisitor() {
// @Override
// public void visitBlock(LuaBlock e) {
// super.visitBlock(e);
//
// check(e, holder);
// }
@Override
public void visitFile(PsiFile file) {
super.visitFile(file);
if (! (file instanceof LuaPsiFile))
return;
check((LuaControlFlowOwner) file, holder);
}
};
}
protected void check(LuaControlFlowOwner owner, ProblemsHolder problemsHolder) {
try {
Instruction[] flow = owner.getControlFlow();
if (flow == null) return;
ReadWriteVariableInstruction[] reads = ControlFlowUtil.getReadsWithoutPriorWrites(flow);
for (ReadWriteVariableInstruction read : reads) {
PsiElement element = read.getElement();
if (element instanceof LuaReferenceElement) {
if (((LuaReferenceElement) element).getElement() instanceof LuaGlobal)
if (((LuaReferenceElement) element).multiResolve(false).length == 0) {
if (element.getTextLength() > 0)
problemsHolder.registerProblem(element, "Unassigned variable usage",
ProblemHighlightType.GENERIC_ERROR_OR_WARNING);
}
}
}
} catch (Exception ignored) {
}
}
}
| apache-2.0 |
LucidDB/luciddb | luciddb/src/com/lucidera/luciddb/mbean/server/PingServer.java | 2131 | /*
// Licensed to DynamoBI Corporation (DynamoBI) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. DynamoBI licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
*/
package com.lucidera.luciddb.mbean.server;
import java.sql.*;
import java.util.*;
import com.lucidera.luciddb.mbean.*;
import com.lucidera.luciddb.mbean.resource.*;
import org.eigenbase.util.*;
/**
* MBean for getting the status of LucidDb by running a simple SQL query
*
* @author Sunny Choi
* @version $Id$
*/
public class PingServer implements PingServerMBean
{
Connection conn = null;
String info = null;
public static String STATUS_ALIVE = "ALIVE";
public static String STATUS_DEAD = "DEAD";
private ResultSet getResultSet() throws Exception
{
conn = MBeanUtil.getConnection(conn);
Statement stmt = conn.createStatement();
String sql = MBeanQueryObject.get().ValidationQuery.str();
ResultSet rs = stmt.executeQuery(sql);
return rs;
}
public String getCurrentStatus() throws Exception
{
try {
getResultSet();
info = null;
return STATUS_ALIVE;
} catch (Throwable ex) {
info = ex.getMessage();
return STATUS_DEAD;
} finally {
try {
conn.close();
} catch (Exception ex) {
// do nothing
}
}
}
public String getInfo()
{
return info;
}
}
// End PingServer.java
| apache-2.0 |
massakam/pulsar | pulsar-broker/src/test/java/org/apache/pulsar/client/impl/KeyStoreTlsProducerConsumerTestWithoutAuthTest.java | 11002 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static org.mockito.Mockito.spy;
import com.google.common.collect.Sets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import lombok.extern.slf4j.Slf4j;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.api.ClientBuilder;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.ProducerConsumerBase;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.SubscriptionType;
import org.apache.pulsar.client.impl.auth.AuthenticationKeyStoreTls;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.TenantInfoImpl;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
// TLS test without authentication and authorization based on KeyStore type config.
@Slf4j
@Test(groups = "broker-impl")
public class KeyStoreTlsProducerConsumerTestWithoutAuthTest extends ProducerConsumerBase {
protected final String BROKER_KEYSTORE_FILE_PATH =
"./src/test/resources/authentication/keystoretls/broker.keystore.jks";
protected final String BROKER_TRUSTSTORE_FILE_PATH =
"./src/test/resources/authentication/keystoretls/broker.truststore.jks";
protected final String BROKER_KEYSTORE_PW = "111111";
protected final String BROKER_TRUSTSTORE_PW = "111111";
protected final String CLIENT_KEYSTORE_FILE_PATH =
"./src/test/resources/authentication/keystoretls/client.keystore.jks";
protected final String CLIENT_TRUSTSTORE_FILE_PATH =
"./src/test/resources/authentication/keystoretls/client.truststore.jks";
protected final String CLIENT_KEYSTORE_PW = "111111";
protected final String CLIENT_TRUSTSTORE_PW = "111111";
protected final String KEYSTORE_TYPE = "JKS";
private final String clusterName = "use";
Set<String> tlsProtocols = Sets.newConcurrentHashSet();
@BeforeMethod
@Override
protected void setup() throws Exception {
// TLS configuration for Broker
internalSetUpForBroker();
// Start Broker
super.init();
}
@AfterMethod(alwaysRun = true)
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
}
protected void internalSetUpForBroker() {
conf.setBrokerServicePortTls(Optional.of(0));
conf.setWebServicePortTls(Optional.of(0));
conf.setTlsEnabledWithKeyStore(true);
conf.setTlsKeyStoreType(KEYSTORE_TYPE);
conf.setTlsKeyStore(BROKER_KEYSTORE_FILE_PATH);
conf.setTlsKeyStorePassword(BROKER_KEYSTORE_PW);
conf.setTlsTrustStoreType(KEYSTORE_TYPE);
conf.setTlsTrustStore(CLIENT_TRUSTSTORE_FILE_PATH);
conf.setTlsTrustStorePassword(CLIENT_TRUSTSTORE_PW);
conf.setClusterName(clusterName);
conf.setTlsRequireTrustedClientCertOnConnect(true);
tlsProtocols.add("TLSv1.3");
tlsProtocols.add("TLSv1.2");
conf.setTlsProtocols(tlsProtocols);
conf.setNumExecutorThreadPoolSize(5);
}
protected void internalSetUpForClient(boolean addCertificates, String lookupUrl) throws Exception {
if (pulsarClient != null) {
pulsarClient.close();
}
ClientBuilder clientBuilder = PulsarClient.builder().serviceUrl(lookupUrl)
.enableTls(true)
.useKeyStoreTls(true)
.tlsTrustStorePath(BROKER_TRUSTSTORE_FILE_PATH)
.tlsTrustStorePassword(BROKER_TRUSTSTORE_PW)
.allowTlsInsecureConnection(false)
.operationTimeout(1000, TimeUnit.MILLISECONDS);
if (addCertificates) {
Map<String, String> authParams = new HashMap<>();
authParams.put(AuthenticationKeyStoreTls.KEYSTORE_TYPE, KEYSTORE_TYPE);
authParams.put(AuthenticationKeyStoreTls.KEYSTORE_PATH, CLIENT_KEYSTORE_FILE_PATH);
authParams.put(AuthenticationKeyStoreTls.KEYSTORE_PW, CLIENT_KEYSTORE_PW);
clientBuilder.authentication(AuthenticationKeyStoreTls.class.getName(), authParams);
}
replacePulsarClient(clientBuilder);
}
protected void internalSetUpForNamespace() throws Exception {
Map<String, String> authParams = new HashMap<>();
authParams.put(AuthenticationKeyStoreTls.KEYSTORE_PATH, CLIENT_KEYSTORE_FILE_PATH);
authParams.put(AuthenticationKeyStoreTls.KEYSTORE_PW, CLIENT_KEYSTORE_PW);
if (admin != null) {
admin.close();
}
admin = spy(PulsarAdmin.builder().serviceHttpUrl(brokerUrlTls.toString())
.useKeyStoreTls(true)
.tlsTrustStorePath(BROKER_TRUSTSTORE_FILE_PATH)
.tlsTrustStorePassword(BROKER_TRUSTSTORE_PW)
.allowTlsInsecureConnection(true)
.authentication(AuthenticationKeyStoreTls.class.getName(), authParams).build());
admin.clusters().createCluster(clusterName, ClusterData.builder()
.serviceUrl(brokerUrl.toString())
.serviceUrlTls(brokerUrlTls.toString())
.brokerServiceUrl(pulsar.getBrokerServiceUrl())
.brokerServiceUrlTls(pulsar.getBrokerServiceUrlTls())
.build());
admin.tenants().createTenant("my-property",
new TenantInfoImpl(Sets.newHashSet("appid1", "appid2"), Sets.newHashSet("use")));
admin.namespaces().createNamespace("my-property/my-ns");
}
/**
* verifies that messages whose size is larger than 2^14 bytes (max size of single TLS chunk) can be
* produced/consumed
*
* @throws Exception
*/
@Test(timeOut = 30000)
public void testTlsLargeSizeMessage() throws Exception {
log.info("-- Starting {} test --", methodName);
final int MESSAGE_SIZE = 16 * 1024 + 1;
log.info("-- message size -- {}", MESSAGE_SIZE);
String topicName = "persistent://my-property/use/my-ns/testTlsLargeSizeMessage"
+ System.currentTimeMillis();
internalSetUpForClient(true, pulsar.getBrokerServiceUrlTls());
internalSetUpForNamespace();
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscribe();
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName)
.create();
for (int i = 0; i < 10; i++) {
byte[] message = new byte[MESSAGE_SIZE];
Arrays.fill(message, (byte) i);
producer.send(message);
}
Message<byte[]> msg = null;
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
byte[] expected = new byte[MESSAGE_SIZE];
Arrays.fill(expected, (byte) i);
Assert.assertEquals(expected, msg.getData());
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
log.info("-- Exiting {} test --", methodName);
}
@Test(timeOut = 300000)
public void testTlsClientAuthOverBinaryProtocol() throws Exception {
log.info("-- Starting {} test --", methodName);
final int MESSAGE_SIZE = 16 * 1024 + 1;
log.info("-- message size -- {}", MESSAGE_SIZE);
String topicName = "persistent://my-property/use/my-ns/testTlsClientAuthOverBinaryProtocol"
+ System.currentTimeMillis();
internalSetUpForNamespace();
// Test 1 - Using TLS on binary protocol without sending certs - expect failure
internalSetUpForClient(false, pulsar.getBrokerServiceUrlTls());
try {
pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Exclusive).subscribe();
Assert.fail("Server should have failed the TLS handshake since client didn't .");
} catch (Exception ex) {
// OK
}
// Test 2 - Using TLS on binary protocol - sending certs
internalSetUpForClient(true, pulsar.getBrokerServiceUrlTls());
try {
pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Exclusive).subscribe();
} catch (Exception ex) {
Assert.fail("Should not fail since certs are sent.");
}
}
@Test(timeOut = 30000)
public void testTlsClientAuthOverHTTPProtocol() throws Exception {
log.info("-- Starting {} test --", methodName);
final int MESSAGE_SIZE = 16 * 1024 + 1;
log.info("-- message size -- {}", MESSAGE_SIZE);
String topicName = "persistent://my-property/use/my-ns/testTlsClientAuthOverHTTPProtocol"
+ System.currentTimeMillis();
internalSetUpForNamespace();
// Test 1 - Using TLS on https without sending certs - expect failure
internalSetUpForClient(false, pulsar.getWebServiceAddressTls());
try {
pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Exclusive).subscribe();
Assert.fail("Server should have failed the TLS handshake since client didn't .");
} catch (Exception ex) {
// OK
}
// Test 2 - Using TLS on https - sending certs
internalSetUpForClient(true, pulsar.getWebServiceAddressTls());
try {
pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Exclusive).subscribe();
} catch (Exception ex) {
Assert.fail("Should not fail since certs are sent.");
}
}
}
| apache-2.0 |
superbderrick/ExoPlayer | library/core/src/test/java/com/google/android/exoplayer2/offline/DefaultDownloaderFactoryTest.java | 1917 | /*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.offline;
import static com.google.common.truth.Truth.assertThat;
import android.net.Uri;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.upstream.DummyDataSource;
import com.google.android.exoplayer2.upstream.cache.Cache;
import java.util.Collections;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
/** Unit tests for {@link DefaultDownloaderFactory}. */
@RunWith(AndroidJUnit4.class)
public final class DefaultDownloaderFactoryTest {
@Test
public void createProgressiveDownloader() throws Exception {
DownloaderConstructorHelper constructorHelper =
new DownloaderConstructorHelper(Mockito.mock(Cache.class), DummyDataSource.FACTORY);
DownloaderFactory factory = new DefaultDownloaderFactory(constructorHelper);
Downloader downloader =
factory.createDownloader(
new DownloadRequest(
"id",
DownloadRequest.TYPE_PROGRESSIVE,
Uri.parse("https://www.test.com/download"),
/* streamKeys= */ Collections.emptyList(),
/* customCacheKey= */ null,
/* data= */ null));
assertThat(downloader).isInstanceOf(ProgressiveDownloader.class);
}
}
| apache-2.0 |
sijie/incubator-distributedlog | distributedlog-core-twitter/src/main/java/org/apache/distributedlog/subscription/package-info.java | 921 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* {@link com.twitter.util.Future} based subscription API.
*/
package org.apache.distributedlog.subscription; | apache-2.0 |
ekumenlabs/AndroidStreamingClient | efflux/src/main/java/com/biasedbit/efflux/packet/AbstractReportPacket.java | 2911 | /*
* Copyright 2010 Bruno de Carvalho
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.biasedbit.efflux.packet;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author <a:mailto="bruno.carvalho@wit-software.com" />Bruno de Carvalho</a>
*/
public abstract class AbstractReportPacket extends ControlPacket {
// internal vars --------------------------------------------------------------------------------------------------
protected long senderSsrc;
protected List<ReceptionReport> receptionReports;
// constructors ---------------------------------------------------------------------------------------------------
protected AbstractReportPacket(Type type) {
super(type);
}
// public methods -------------------------------------------------------------------------------------------------
public boolean addReceptionReportBlock(ReceptionReport block) {
if (this.receptionReports == null) {
this.receptionReports = new ArrayList<ReceptionReport>();
return this.receptionReports.add(block);
}
// 5 bits is the limit
return (this.receptionReports.size() < 31) && this.receptionReports.add(block);
}
public byte getReceptionReportCount() {
if (this.receptionReports == null) {
return 0;
}
return (byte) this.receptionReports.size();
}
// getters & setters ----------------------------------------------------------------------------------------------
public long getSenderSsrc() {
return senderSsrc;
}
public void setSenderSsrc(long senderSsrc) {
if ((senderSsrc < 0) || (senderSsrc > 0xffffffffL)) {
throw new IllegalArgumentException("Valid range for SSRC is [0;0xffffffff]");
}
this.senderSsrc = senderSsrc;
}
public List<ReceptionReport> getReceptionReports() {
if (this.receptionReports == null) {
return null;
}
return Collections.unmodifiableList(this.receptionReports);
}
public void setReceptionReports(List<ReceptionReport> receptionReports) {
if (receptionReports.size() >= 31) {
throw new IllegalArgumentException("At most 31 report blocks can be sent in a *ReportPacket");
}
this.receptionReports = receptionReports;
}
}
| apache-2.0 |
sdgdsffdsfff/zeus | log_analysis/src/main/java/com/ctrip/zeus/logstats/parser/state/extended/RequestUriState.java | 2475 | package com.ctrip.zeus.logstats.parser.state.extended;
import com.ctrip.zeus.logstats.parser.state.Action;
import com.ctrip.zeus.logstats.parser.state.LogStatsState;
import com.ctrip.zeus.logstats.parser.state.LogStatsStateMachine;
import com.ctrip.zeus.logstats.parser.state.StateMachineContext;
/**
* Created by zhoumy on 2016/6/7.
*/
public class RequestUriState implements LogStatsState {
private final String name;
private final Action action;
private LogStatsState next;
public RequestUriState(String name) {
this.name = name;
this.action = new RequestUriAction();
}
@Override
public String getName() {
return name;
}
@Override
public LogStatsStateMachine getSubMachine() {
return null;
}
@Override
public Action getAction() {
return action;
}
@Override
public void setNext(LogStatsState next) {
this.next = next;
}
@Override
public LogStatsState getNext() {
return next;
}
@Override
public boolean runSubMachine() {
return false;
}
private class RequestUriAction implements Action {
@Override
public void execute(StateMachineContext ctxt, String separator) {
StringBuilder sb = new StringBuilder();
if (separator == null){
separator = " ";
}
char[] schars = separator.toCharArray();
char[] source = ctxt.getSource();
char c;
for (int i = ctxt.getCurrentIndex(); i < source.length; i++) {
c = source[i];
if (c == schars[0] && i + schars.length <= source.length){
boolean isEnd = true;
for (int j = 1 ; j < schars.length ; j++){
if (schars[j] != source[i+j]){
isEnd =false;
break;
}
}
if (isEnd){
ctxt.proceed(i - ctxt.getCurrentIndex());
ctxt.addResult(name, sb.toString());
return;
}
}
sb.append(c);
}
}
@Override
public void execute(StateMachineContext ctxt) {
execute(ctxt," ");
}
}
}
| apache-2.0 |
sdrieling/osw-web | src/org/onesocialweb/gwt/client/ui/menu/MenuCommand.java | 806 | /*
* Copyright 2010 Vodafone Group Services Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.onesocialweb.gwt.client.ui.menu;
import com.google.gwt.user.client.Command;
public interface MenuCommand extends Command {
public abstract String getLabel();
}
| apache-2.0 |
zimmermatt/flink | flink-runtime/src/test/java/org/apache/flink/runtime/blob/BlobCacheDeleteTest.java | 12594 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.blob;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.BlobServerOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.OperatingSystem;
import org.apache.flink.util.TestLogger;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.apache.flink.runtime.blob.BlobCachePutTest.verifyDeletedEventually;
import static org.apache.flink.runtime.blob.BlobKey.BlobType.TRANSIENT_BLOB;
import static org.apache.flink.runtime.blob.BlobServerDeleteTest.delete;
import static org.apache.flink.runtime.blob.BlobServerGetTest.verifyDeleted;
import static org.apache.flink.runtime.blob.BlobServerPutTest.put;
import static org.apache.flink.runtime.blob.BlobServerPutTest.verifyContents;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
/**
* Tests how DELETE requests behave.
*/
public class BlobCacheDeleteTest extends TestLogger {
private final Random rnd = new Random();
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void testDeleteTransient1() throws IOException {
testDelete(null, new JobID());
}
@Test
public void testDeleteTransient2() throws IOException {
testDelete(new JobID(), null);
}
@Test
public void testDeleteTransient3() throws IOException {
testDelete(null, null);
}
@Test
public void testDeleteTransient4() throws IOException {
testDelete(new JobID(), new JobID());
}
@Test
public void testDeleteTransient5() throws IOException {
JobID jobId = new JobID();
testDelete(jobId, jobId);
}
/**
* Uploads a (different) byte array for each of the given jobs and verifies that deleting one of
* them (via the {@link BlobCacheService}) does not influence the other.
*
* @param jobId1
* first job id
* @param jobId2
* second job id
*/
private void testDelete(@Nullable JobID jobId1, @Nullable JobID jobId2)
throws IOException {
final Configuration config = new Configuration();
config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());
try (
BlobServer server = new BlobServer(config, new VoidBlobStore());
BlobCacheService cache = new BlobCacheService(new InetSocketAddress("localhost", server.getPort()),
config, new VoidBlobStore())) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOf(data, data.length);
data2[0] ^= 1;
// put first BLOB
TransientBlobKey key1 = (TransientBlobKey) put(server, jobId1, data, TRANSIENT_BLOB);
assertNotNull(key1);
// put two more BLOBs (same key, other key) for another job ID
TransientBlobKey key2a = (TransientBlobKey) put(server, jobId2, data, TRANSIENT_BLOB);
assertNotNull(key2a);
BlobKeyTest.verifyKeyDifferentHashEquals(key1, key2a);
TransientBlobKey key2b = (TransientBlobKey) put(server, jobId2, data2, TRANSIENT_BLOB);
assertNotNull(key2b);
BlobKeyTest.verifyKeyDifferentHashDifferent(key1, key2b);
// issue a DELETE request
assertTrue(delete(cache, jobId1, key1));
// delete only works on local cache!
assertTrue(server.getStorageLocation(jobId1, key1).exists());
// delete on server so that the cache cannot re-download
assertTrue(server.deleteInternal(jobId1, key1));
verifyDeleted(cache, jobId1, key1);
// deleting one BLOB should not affect another BLOB with a different key
// (and keys are always different now)
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// delete first file of second job
assertTrue(delete(cache, jobId2, key2a));
// delete only works on local cache
assertTrue(server.getStorageLocation(jobId2, key2a).exists());
// delete on server so that the cache cannot re-download
assertTrue(server.deleteInternal(jobId2, key2a));
verifyDeleted(cache, jobId2, key2a);
verifyContents(server, jobId2, key2b, data2);
// delete second file of second job
assertTrue(delete(cache, jobId2, key2b));
// delete only works on local cache
assertTrue(server.getStorageLocation(jobId2, key2b).exists());
// delete on server so that the cache cannot re-download
assertTrue(server.deleteInternal(jobId2, key2b));
verifyDeleted(cache, jobId2, key2b);
}
}
@Test
public void testDeleteTransientAlreadyDeletedNoJob() throws IOException {
testDeleteTransientAlreadyDeleted(null);
}
@Test
public void testDeleteTransientAlreadyDeletedForJob() throws IOException {
testDeleteTransientAlreadyDeleted(new JobID());
}
/**
* Uploads a byte array for the given job and verifies that deleting it (via the {@link
* BlobCacheService}) does not fail independent of whether the file exists.
*
* @param jobId
* job id
*/
private void testDeleteTransientAlreadyDeleted(@Nullable final JobID jobId) throws IOException {
final Configuration config = new Configuration();
config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());
try (
BlobServer server = new BlobServer(config, new VoidBlobStore());
BlobCacheService cache = new BlobCacheService(new InetSocketAddress("localhost", server.getPort()),
config, new VoidBlobStore())) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
// put BLOB
TransientBlobKey key = (TransientBlobKey) put(server, jobId, data, TRANSIENT_BLOB);
assertNotNull(key);
File blobFile = server.getStorageLocation(jobId, key);
assertTrue(blobFile.delete());
// DELETE operation should not fail if file is already deleted
assertTrue(delete(cache, jobId, key));
verifyDeleted(cache, jobId, key);
// one more delete call that should not fail
assertTrue(delete(cache, jobId, key));
verifyDeleted(cache, jobId, key);
}
}
@Test
public void testDeleteTransientLocalFailsNoJob() throws IOException, InterruptedException {
testDeleteTransientLocalFails(null);
}
@Test
public void testDeleteTransientLocalFailsForJob() throws IOException, InterruptedException {
testDeleteTransientLocalFails(new JobID());
}
/**
* Uploads a byte array for the given job and verifies that a delete operation (via the {@link
* BlobCacheService}) does not fail even if the file is not deletable locally, e.g. via
* restricting the permissions.
*
* @param jobId
* job id
*/
private void testDeleteTransientLocalFails(@Nullable final JobID jobId)
throws IOException, InterruptedException {
assumeTrue(!OperatingSystem.isWindows()); //setWritable doesn't work on Windows.
final Configuration config = new Configuration();
config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());
File blobFile = null;
File directory = null;
try (
BlobServer server = new BlobServer(config, new VoidBlobStore());
BlobCacheService cache = new BlobCacheService(new InetSocketAddress("localhost", server.getPort()),
config, new VoidBlobStore())) {
server.start();
try {
byte[] data = new byte[2000000];
rnd.nextBytes(data);
// put BLOB
TransientBlobKey key = (TransientBlobKey) put(server, jobId, data, TRANSIENT_BLOB);
assertNotNull(key);
// access from cache once to have it available there
verifyContents(cache, jobId, key, data);
blobFile = cache.getTransientBlobService().getStorageLocation(jobId, key);
directory = blobFile.getParentFile();
assertTrue(blobFile.setWritable(false, false));
assertTrue(directory.setWritable(false, false));
// issue a DELETE request
assertFalse(delete(cache, jobId, key));
// the file should still be there on the cache
verifyContents(cache, jobId, key, data);
// the server should have started the delete call after the cache accessed the (transient!) BLOB
verifyDeletedEventually(server, jobId, key);
} finally {
if (blobFile != null && directory != null) {
//noinspection ResultOfMethodCallIgnored
blobFile.setWritable(true, false);
//noinspection ResultOfMethodCallIgnored
directory.setWritable(true, false);
}
}
}
}
@Test
public void testConcurrentDeleteOperationsNoJobTransient()
throws IOException, ExecutionException, InterruptedException {
testConcurrentDeleteOperations(null);
}
@Test
public void testConcurrentDeleteOperationsForJobTransient()
throws IOException, ExecutionException, InterruptedException {
testConcurrentDeleteOperations(new JobID());
}
/**
* [FLINK-6020] Tests that concurrent delete operations don't interfere with each other.
*
* <p>Note: This test checks that there cannot be two threads which have checked whether a given
* blob file exist and then one of them fails deleting it. Without the introduced lock, this
* situation should rarely happen and make this test fail. Thus, if this test should become
* "unstable", then the delete atomicity is most likely broken.
* @param jobId
* job ID to use (or <tt>null</tt> if job-unrelated)
*
*/
private void testConcurrentDeleteOperations(@Nullable final JobID jobId)
throws IOException, InterruptedException, ExecutionException {
final Configuration config = new Configuration();
config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());
final int concurrentDeleteOperations = 3;
final ExecutorService executor = Executors.newFixedThreadPool(concurrentDeleteOperations);
final List<CompletableFuture<Void>> deleteFutures = new ArrayList<>(concurrentDeleteOperations);
final byte[] data = {1, 2, 3};
try (
BlobServer server = new BlobServer(config, new VoidBlobStore());
BlobCacheService cache = new BlobCacheService(new InetSocketAddress("localhost", server.getPort()),
config, new VoidBlobStore())) {
server.start();
final TransientBlobKey blobKey =
(TransientBlobKey) put(server, jobId, data, TRANSIENT_BLOB);
assertTrue(server.getStorageLocation(jobId, blobKey).exists());
for (int i = 0; i < concurrentDeleteOperations; i++) {
CompletableFuture<Void> deleteFuture = CompletableFuture
.supplyAsync(
() -> {
try {
assertTrue(delete(cache, jobId, blobKey));
assertFalse(cache.getTransientBlobService().getStorageLocation(jobId, blobKey).exists());
// delete only works on local cache!
assertTrue(server.getStorageLocation(jobId, blobKey).exists());
return null;
} catch (IOException e) {
throw new CompletionException(new FlinkException(
"Could not upload blob.", e));
}
}, executor);
deleteFutures.add(deleteFuture);
}
CompletableFuture<Void> waitFuture = FutureUtils.waitForAll(deleteFutures);
// make sure all delete operation have completed successfully
// in case of no lock, one of the delete operations should eventually fail
waitFuture.get();
// delete only works on local cache!
assertTrue(server.getStorageLocation(jobId, blobKey).exists());
} finally {
executor.shutdownNow();
}
}
}
| apache-2.0 |
maduhu/mifos-head | cashflow/src/main/java/org/mifos/platform/cashflow/ui/model/CashFlowValidator.java | 7455 | /*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.platform.cashflow.ui.model;
import org.apache.commons.lang.StringUtils;
import org.mifos.platform.cashflow.CashFlowConstants;
import org.springframework.binding.message.MessageBuilder;
import org.springframework.binding.message.MessageContext;
import org.springframework.binding.message.MessageResolver;
import org.springframework.binding.validation.ValidationContext;
import java.math.BigDecimal;
import java.math.RoundingMode;
import static java.text.MessageFormat.format;
public class CashFlowValidator {
public void validateCaptureCashFlow(CashFlowForm cashFlow, ValidationContext context) {
MessageContext messageContext = context.getMessageContext();
for (MonthlyCashFlowForm monthlyCashFlowForm : cashFlow.getMonthlyCashFlows()) {
validateExpense(messageContext, monthlyCashFlowForm);
validateRevenue(messageContext, monthlyCashFlowForm);
validateNotes(messageContext, monthlyCashFlowForm);
}
validateTotalCapitalAndLiability(cashFlow, messageContext);
validateIndebtednessRatio(cashFlow, messageContext);
setTotalsOnCashFlowForm(cashFlow, messageContext);
}
private void setTotalsOnCashFlowForm(CashFlowForm cashFlowForm, MessageContext messageContext) {
if (!messageContext.hasErrorMessages()) {
BigDecimal totalExpenses = BigDecimal.ZERO, totalRevenues = BigDecimal.ZERO;
for (MonthlyCashFlowForm monthlyCashFlowForm : cashFlowForm.getMonthlyCashFlows()) {
totalExpenses = totalExpenses.add(monthlyCashFlowForm.getExpense());
totalRevenues = totalRevenues.add(monthlyCashFlowForm.getRevenue());
}
cashFlowForm.setTotalExpenses(totalExpenses);
cashFlowForm.setTotalRevenues(totalRevenues);
}
}
private void validateTotalCapitalAndLiability(CashFlowForm cashFlow, MessageContext messageContext) {
if (cashFlow.isCaptureCapitalLiabilityInfo()) {
validateTotalCapital(messageContext, cashFlow.getTotalCapital());
validateTotalLiability(messageContext, cashFlow.getTotalLiability());
}
}
private void validateIndebtednessRatio(CashFlowForm cashFlowForm, MessageContext messageContext) {
if (cashFlowForm.shouldForValidateIndebtednessRate()) {
Double indebtednessRatio = cashFlowForm.getIndebtednessRatio();
BigDecimal loanAmount = cashFlowForm.getLoanAmount();
BigDecimal totalCapital = cashFlowForm.getTotalCapital();
BigDecimal totalLiability = cashFlowForm.getTotalLiability();
Double calculatedIndebtednessRatio = totalLiability.add(loanAmount).multiply(CashFlowConstants.HUNDRED).
divide(totalCapital,2,RoundingMode.HALF_EVEN).doubleValue();
if (calculatedIndebtednessRatio >= indebtednessRatio) {
String message = format("Indebtedness rate of the client is {0} which should be lesser than the allowable value of {1}",
calculatedIndebtednessRatio, indebtednessRatio);
constructErrorMessage(CashFlowConstants.INDEBTEDNESS_RATIO_MORE_THAN_ALLOWED, message, messageContext, calculatedIndebtednessRatio, indebtednessRatio);
}
}
}
private void validateTotalCapital(MessageContext messageContext, BigDecimal totalCapital) {
if (isNull(totalCapital)) {
String message = format("Total Capital should not be empty");
constructErrorMessage(CashFlowConstants.TOTAL_CAPITAL_SHOULD_NOT_BE_EMPTY, message, messageContext);
return;
}
if ((totalCapital.doubleValue() <= 0)) {
String message = format("Total Capital needs to be a value greater than zero");
constructErrorMessage(CashFlowConstants.TOTAL_CAPITAL_SHOULD_BE_GREATER_THAN_ZERO, message, messageContext);
}
}
private void validateTotalLiability(MessageContext messageContext, BigDecimal totalLiability) {
if (isNull(totalLiability)) {
String message = format("Total Liability should not be empty");
constructErrorMessage(CashFlowConstants.TOTAL_LIABILITY_SHOULD_NOT_BE_EMPTY, message, messageContext);
return;
}
if (totalLiability.doubleValue() < 0) {
String message = format("Total Liability needs to be non negative");
constructErrorMessage(CashFlowConstants.TOTAL_LIABILITY_SHOULD_BE_NON_NEGATIVE, message, messageContext);
}
}
private void validateExpense(MessageContext messageContext, MonthlyCashFlowForm monthlyCashFlowForm) {
if (isNull(monthlyCashFlowForm.getExpense())) {
String message = format("Please specify expense for {0} {1}.", monthlyCashFlowForm.getMonthInLocale(),
Integer.toString(monthlyCashFlowForm.getYear()));
constructErrorMessage(CashFlowConstants.EMPTY_EXPENSE, message, messageContext,
monthlyCashFlowForm.getMonthInLocale(), Integer.toString(monthlyCashFlowForm.getYear()));
}
}
private void validateRevenue(MessageContext messageContext, MonthlyCashFlowForm monthlyCashFlowForm) {
if (isNull(monthlyCashFlowForm.getRevenue())) {
String message = format("Please specify revenue for {0} {1}.", monthlyCashFlowForm.getMonthInLocale(),
Integer.toString(monthlyCashFlowForm.getYear()));
constructErrorMessage(CashFlowConstants.EMPTY_REVENUE, message, messageContext,
monthlyCashFlowForm.getMonthInLocale(), Integer.toString(monthlyCashFlowForm.getYear()));
}
}
private void validateNotes(MessageContext messageContext, MonthlyCashFlowForm monthlyCashFlowForm) {
if (!StringUtils.isEmpty(monthlyCashFlowForm.getNotes()) && monthlyCashFlowForm.getNotes().length() > 300) {
String message = format("Notes should be less than 300 characters for {0} {1}.", monthlyCashFlowForm.getMonthInLocale(),
Integer.toString(monthlyCashFlowForm.getYear()));
constructErrorMessage(CashFlowConstants.EMPTY_NOTES, message, messageContext,
monthlyCashFlowForm.getMonthInLocale(), Integer.toString(monthlyCashFlowForm.getYear()));
}
}
private boolean isNull(BigDecimal value) {
return value == null;
}
void constructErrorMessage(String code, String message, MessageContext context, Object... args) {
MessageResolver messageResolver = new MessageBuilder().error().code(code).defaultText(message).args(args).build();
context.addMessage(messageResolver);
}
}
| apache-2.0 |
Fabryprog/camel | core/camel-core/src/test/java/org/apache/camel/processor/TryCatchContinueToRouteTest.java | 2538 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
public class TryCatchContinueToRouteTest extends ContextTestSupport {
@Test
public void testTryCatchContinueToRoute() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:b").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:catch").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:catch").message(0).exchangeProperty(Exchange.EXCEPTION_CAUGHT).isInstanceOf(IllegalArgumentException.class);
getMockEndpoint("mock:c").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:c").message(0).exchangeProperty(Exchange.EXCEPTION_CAUGHT).isInstanceOf(IllegalArgumentException.class);
template.sendBody("direct:a", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:a")
.to("mock:a")
.to("direct:b")
.to("direct:c");
from("direct:b")
.doTry()
.to("mock:b")
.throwException(new IllegalArgumentException("Damn"))
.doCatch(Exception.class)
.to("mock:catch")
.end();
from("direct:c")
.to("mock:c");
}
};
}
}
| apache-2.0 |
feesa/easyrec-parent | easyrec-plugin-container/src/main/java/org/easyrec/model/plugin/archive/ArchivePseudoGenerator.java | 3008 | /*
* Copyright 2011 Research Studios Austria Forschungsgesellschaft mBH
*
* This file is part of easyrec.
*
* easyrec is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* easyrec is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with easyrec. If not, see <http://www.gnu.org/licenses/>.
*/
package org.easyrec.model.plugin.archive;
import org.easyrec.plugin.model.PluginId;
import org.easyrec.plugin.support.GeneratorPluginSupport;
import org.easyrec.store.dao.core.ArchiveDAO;
import java.util.Date;
/**
* @author pmarschik
*/
public class ArchivePseudoGenerator
extends GeneratorPluginSupport<ArchivePseudoConfiguration, ArchivePseudoStatistics> {
public static final String DISPLAY_NAME = "Archiving";
public static final PluginId ID = new PluginId("http://www.easyrec.org/internal/Archive", "1.00");
public static final int ASSOCTYPE = 0;
private ArchiveDAO archiveDAO;
public void setArchiveDAO(ArchiveDAO archiveDAO) {
this.archiveDAO = archiveDAO;
}
public ArchivePseudoGenerator() {
super(DISPLAY_NAME, ID.getUri(), ID.getVersion(), ArchivePseudoConfiguration.class,
ArchivePseudoStatistics.class);
}
private void init() {
install(false);
initialize();
}
@Override
protected void doExecute(ExecutionControl executionControl, ArchivePseudoStatistics stats) throws Exception {
String actualArchiveTableName = archiveDAO.getActualArchiveTableName();
// convert days to millis
Date refDate = new Date(System.currentTimeMillis() - (getConfiguration().getDays() * 86400000l));
logger.info("Cutoff date: " + refDate);
Integer numberOfActionsToArchive =
archiveDAO.getNumberOfActionsToArchive(getConfiguration().getTenantId(), refDate);
logger.info("Number of actions to archive:" + numberOfActionsToArchive);
if (numberOfActionsToArchive > 0) {
if (archiveDAO.isArchiveFull(actualArchiveTableName, numberOfActionsToArchive)) {
//generate new archive
actualArchiveTableName = archiveDAO.generateNewArchive(actualArchiveTableName);
}
// move actions to archive
archiveDAO.moveActions(actualArchiveTableName, getConfiguration().getTenantId(), refDate);
}
stats.setReferenceDate(refDate);
stats.setNumberOfArchivedActions(numberOfActionsToArchive);
}
@Override
public String getPluginDescription() {
return "easyrec internal";
}
}
| apache-2.0 |
apache/derby | java/org.apache.derby.engine/org/apache/derby/impl/sql/execute/RevokeRoleConstantAction.java | 9593 | /*
Derby - Class org.apache.derby.impl.sql.execute.RevokeRoleConstantAction
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.execute;
import java.util.Iterator;
import java.util.List;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.sql.Activation;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.depend.DependencyManager;
import org.apache.derby.iapi.sql.conn.Authorizer;
import org.apache.derby.iapi.sql.dictionary.RoleGrantDescriptor;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.RoleClosureIterator;
import org.apache.derby.iapi.store.access.TransactionController;
import org.apache.derby.shared.common.reference.SQLState;
import org.apache.derby.shared.common.sanity.SanityManager;
/**
* This class performs actions that are ALWAYS performed for a
* REVOKE role statement at execution time.
*
*/
class RevokeRoleConstantAction extends DDLConstantAction {
private List roleNames;
private List grantees;
private final boolean withAdminOption = false; // not impl.
// CONSTRUCTORS
/**
* Make the ConstantAction for a CREATE ROLE statement.
* When executed, will create a role by the given name.
*
* @param roleNames List of the name of the role names being revoked
* @param grantees List of the authorization ids granted to role
*/
public RevokeRoleConstantAction(List roleNames, List grantees) {
this.roleNames = roleNames;
this.grantees = grantees;
}
// INTERFACE METHODS
/**
* This is the guts of the Execution-time logic for REVOKE role.
*
* @see org.apache.derby.iapi.sql.execute.ConstantAction#executeConstantAction
*/
public void executeConstantAction(Activation activation)
throws StandardException {
LanguageConnectionContext lcc =
activation.getLanguageConnectionContext();
DataDictionary dd = lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
final String grantor = lcc.getCurrentUserId(activation);
dd.startWriting(lcc);
for (Iterator rIter = roleNames.iterator(); rIter.hasNext();) {
String role = (String)rIter.next();
if (role.equals(Authorizer.PUBLIC_AUTHORIZATION_ID)) {
throw StandardException.
newException(SQLState.AUTH_PUBLIC_ILLEGAL_AUTHORIZATION_ID);
}
for (Iterator gIter = grantees.iterator(); gIter.hasNext();) {
String grantee = (String)gIter.next();
// check that role exists
RoleGrantDescriptor rdDef =
dd.getRoleDefinitionDescriptor(role);
if (rdDef == null) {
throw StandardException.
newException(SQLState.ROLE_INVALID_SPECIFICATION, role);
}
// Check that role is granted to us (or PUBLIC) with
// WITH ADMIN option so we can grant (and hence
// revoke) it. For database owner, a role definition
// always fulfills this requirement. If we implement
// granting with WITH ADMIN option later, we need to
// look for a grant to us or to PUBLIC which has WITH
// ADMIN. The role definition descriptor will not
// suffice in that case, so we need something like:
//
// rd = dd.findRoleGrantWithAdminToRoleOrPublic(grantor)
// if (rd != null) {
// :
if (grantor.equals(lcc.getDataDictionary().
getAuthorizationDatabaseOwner())) {
// All ok, we are database owner
if (SanityManager.DEBUG) {
SanityManager.ASSERT(
rdDef.getGrantee().equals(grantor),
"expected database owner in role grant descriptor");
SanityManager.ASSERT(
rdDef.isWithAdminOption(),
"expected role definition to have ADMIN OPTION");
}
} else {
throw StandardException.newException
(SQLState.AUTH_ROLE_DBO_ONLY, "REVOKE role");
}
RoleGrantDescriptor rd =
dd.getRoleGrantDescriptor(role, grantee, grantor);
if (rd != null && withAdminOption) {
// NOTE: Never called yet, withAdminOption not yet
// implemented.
if (SanityManager.DEBUG) {
SanityManager.NOTREACHED();
}
// revoke only the ADMIN OPTION from grantee
//
if (rd.isWithAdminOption()) {
// Invalidate and remove old descriptor and add a new
// one without admin option.
//
// RoleClosureIterator rci =
// dd.createRoleClosureIterator
// (activation.getTransactionController(),
// role, false);
//
// String r;
// while ((r = rci.next()) != null) {
// rdDef = dd.getRoleDefinitionDescriptor(r);
//
// dd.getDependencyManager().invalidateFor
// (rdDef, DependencyManager.REVOKE_ROLE, lcc);
// }
//
// rd.drop(lcc);
// rd.setWithAdminOption(false);
// dd.addDescriptor(rd,
// null, // parent
// DataDictionary.SYSROLES_CATALOG_NUM,
// false, // no duplicatesAllowed
// tc);
} else {
activation.addWarning
(StandardException.newWarning
(SQLState.LANG_WITH_ADMIN_OPTION_NOT_REVOKED,
role, grantee));
}
} else if (rd != null) {
// Normal revoke of role from grantee.
//
// When a role is revoked, for every role in its grantee
// closure, we call the REVOKE_ROLE action. It is used to
// invalidate dependent objects (constraints, triggers and
// views). Note that until DERBY-1632 is fixed, we risk
// dropping objects not really dependent on this role, but
// one some other role just because it inherits from this
// one. See also DropRoleConstantAction.
RoleClosureIterator rci =
dd.createRoleClosureIterator
(activation.getTransactionController(),
role, false);
String r;
while ((r = rci.next()) != null) {
rdDef = dd.getRoleDefinitionDescriptor(r);
dd.getDependencyManager().invalidateFor
(rdDef, DependencyManager.REVOKE_ROLE, lcc);
}
rd.drop(lcc);
} else {
activation.addWarning
(StandardException.newWarning
(SQLState.LANG_ROLE_NOT_REVOKED, role, grantee));
}
}
}
}
// OBJECT SHADOWS
public String toString()
{
// Do not put this under SanityManager.DEBUG - it is needed for
// error reporting.
StringBuffer sb1 = new StringBuffer();
for (Iterator it = roleNames.iterator(); it.hasNext();) {
if( sb1.length() > 0) {
sb1.append( ", ");
}
sb1.append( it.next().toString());
}
StringBuffer sb2 = new StringBuffer();
for (Iterator it = grantees.iterator(); it.hasNext();) {
if( sb2.length() > 0) {
sb2.append( ", ");
}
sb2.append( it.next().toString());
}
return ("REVOKE " +
sb1.toString() +
" FROM: " +
sb2.toString() +
"\n");
}
}
| apache-2.0 |
rpudil/midpoint | model/model-impl/src/main/java/com/evolveum/midpoint/model/impl/expr/SequentialValueExpressionEvaluator.java | 4390 | /*
* Copyright (c) 2015 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.impl.expr;
import com.evolveum.midpoint.model.common.expression.ExpressionEvaluationContext;
import com.evolveum.midpoint.model.common.expression.ExpressionEvaluator;
import com.evolveum.midpoint.model.common.expression.ExpressionUtil;
import com.evolveum.midpoint.model.impl.lens.LensContext;
import com.evolveum.midpoint.prism.Item;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismProperty;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.PrismValue;
import com.evolveum.midpoint.prism.crypto.Protector;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.PrismValueDeltaSetTriple;
import com.evolveum.midpoint.repo.api.RepositoryService;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.util.exception.ExpressionEvaluationException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SequentialValueExpressionEvaluatorType;
/**
* @author semancik
*
*/
public class SequentialValueExpressionEvaluator<V extends PrismValue, D extends ItemDefinition> implements ExpressionEvaluator<V,D> {
private SequentialValueExpressionEvaluatorType sequentialValueEvaluatorType;
private D outputDefinition;
private Protector protector;
RepositoryService repositoryService;
private PrismContext prismContext;
SequentialValueExpressionEvaluator(SequentialValueExpressionEvaluatorType sequentialValueEvaluatorType,
D outputDefinition, Protector protector, RepositoryService repositoryService, PrismContext prismContext) {
this.sequentialValueEvaluatorType = sequentialValueEvaluatorType;
this.outputDefinition = outputDefinition;
this.protector = protector;
this.repositoryService = repositoryService;
this.prismContext = prismContext;
}
@Override
public PrismValueDeltaSetTriple<V> evaluate(ExpressionEvaluationContext params) throws SchemaException,
ExpressionEvaluationException, ObjectNotFoundException {
long counter = getSequenceCounter(sequentialValueEvaluatorType.getSequenceRef().getOid(), repositoryService, params.getResult());
Object value = ExpressionUtil.convertToOutputValue(counter, outputDefinition, protector);
Item<V,D> output = outputDefinition.instantiate();
if (output instanceof PrismProperty) {
PrismPropertyValue<Object> pValue = new PrismPropertyValue<Object>(value);
((PrismProperty<Object>)output).add(pValue);
} else {
throw new UnsupportedOperationException("Can only generate values of property, not "+output.getClass());
}
return ItemDelta.toDeltaSetTriple(output, null);
}
public static long getSequenceCounter(String sequenceOid, RepositoryService repositoryService, OperationResult result) throws ObjectNotFoundException, SchemaException {
LensContext<? extends FocusType> ctx = ModelExpressionThreadLocalHolder.getLensContext();
if (ctx == null) {
throw new IllegalStateException("No lens context");
}
Long counter = ctx.getSequenceCounter(sequenceOid);
if (counter == null) {
counter = repositoryService.advanceSequence(sequenceOid, result);
ctx.setSequenceCounter(sequenceOid, counter);
}
return counter;
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.common.expression.ExpressionEvaluator#shortDebugDump()
*/
@Override
public String shortDebugDump() {
return "squentialValue: "+sequentialValueEvaluatorType.getSequenceRef().getOid();
}
}
| apache-2.0 |
DarwinSPL/DarwinSPL | plugins/de.darwinspl.feature.graphical.base/src/de/darwinspl/feature/graphical/base/util/DwFeatureUtil.java | 1171 | package de.darwinspl.feature.graphical.base.util;
import java.util.Date;
import eu.hyvar.evolution.util.HyEvolutionUtil;
import eu.hyvar.feature.HyFeature;
import eu.hyvar.feature.HyGroupComposition;
import eu.hyvar.feature.HyGroupType;
import eu.hyvar.feature.HyGroupTypeEnum;
public class DwFeatureUtil {
/**
* Checks if feature has no modifier therefore is child of a "or" or "alternative" group
* @param date
* @return true if feature is child of a "or" or "alternative" group, false otherwise
*/
public static boolean isWithoutModifier(HyFeature feature, Date date){
HyGroupComposition composition = HyEvolutionUtil.getValidTemporalElement(feature.getGroupMembership(), date);
if(composition == null) return false;
//if(HyEvolutionUtil.getValidTemporalElements(composition.getFeatures(), date).size() <= 1)
// return false;
HyGroupType type = HyEvolutionUtil.getValidTemporalElement(composition.getCompositionOf().getTypes(), date);
if(type.getType() == HyGroupTypeEnum.AND){
return false;
}
return true;
}
public static boolean isWithModifier(HyFeature feature, Date date){
return !isWithoutModifier(feature, date);
}
}
| apache-2.0 |
abrowning80/solutions-geoevent-java | solutions-geoevent/processors/bearing-processor/src/main/java/com/esri/geoevent/solutions/processor/bearing/BearingProcessorService.java | 992 | package com.esri.geoevent.solutions.processor.bearing;
import com.esri.ges.core.component.ComponentException;
import com.esri.ges.core.property.PropertyException;
import com.esri.ges.manager.geoeventdefinition.GeoEventDefinitionManager;
import com.esri.ges.messaging.Messaging;
import com.esri.ges.processor.GeoEventProcessor;
import com.esri.ges.processor.GeoEventProcessorServiceBase;
public class BearingProcessorService extends GeoEventProcessorServiceBase {
Messaging messaging;
GeoEventDefinitionManager manager;
public BearingProcessorService() throws PropertyException {
definition = new BearingProcessorDefinition();
}
@Override
public GeoEventProcessor create() throws ComponentException {
BearingProcessor bp = new BearingProcessor(definition);
bp.setMessaging(messaging);
bp.setGDManager(manager);
return bp;
}
public void setMessaging(Messaging m)
{
messaging = m;
}
public void setManager(GeoEventDefinitionManager m)
{
manager = m;
}
}
| apache-2.0 |
yurloc/uberfire | uberfire-workbench/uberfire-workbench-client/src/test/java/org/uberfire/client/workbench/panels/impl/SimpleWorkbenchPanelPresenterTest.java | 857 | package org.uberfire.client.workbench.panels.impl;
import com.google.gwtmockito.GwtMock;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.uberfire.client.workbench.PanelManager;
import static org.junit.Assert.*;
@RunWith(GwtMockitoTestRunner.class)
public class SimpleWorkbenchPanelPresenterTest {
private SimpleWorkbenchPanelView view;
@GwtMock
private PanelManager panelManager;
private SimpleWorkbenchPanelPresenter presenter;
@Before
public void setup() {
view = new SimpleWorkbenchPanelView();
presenter = new SimpleWorkbenchPanelPresenter( view, panelManager, null,null);
}
@Test
public void init() {
presenter.init();
assertEquals( presenter, view.getPresenter() );
}
}
| apache-2.0 |
cripsy-lamp/prom-plugin | trunk/src-Contexts/org/processmining/contexts/uitopia/packagemanager/PMPackageListModel.java | 2195 | package org.processmining.contexts.uitopia.packagemanager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.swing.AbstractListModel;
public class PMPackageListModel extends AbstractListModel {
/**
*
*/
private static final long serialVersionUID = 4483191311607087069L;
private static final Comparator<PMPackage> COMP_PACKAGE_NAME = new Comparator<PMPackage>() {
public int compare(PMPackage r1, PMPackage r2) {
return r1.getPackageName().compareTo(r2.getPackageName());
}
};
private static final Comparator<PMPackage> COMP_AUTHOR_NAME = new Comparator<PMPackage>() {
public int compare(PMPackage r1, PMPackage r2) {
return r1.getAuthorName().compareTo(r2.getAuthorName());
}
};
private final List<? extends PMPackage> fullList;
private List<? extends PMPackage> filteredList;
private boolean filterFavorites;
private Comparator<PMPackage> comparator;
public PMPackageListModel(List<? extends PMPackage> packages) {
fullList = packages;
filteredList = new ArrayList<PMPackage>(packages);
filterFavorites = false;
comparator = PMPackageListModel.COMP_PACKAGE_NAME;
}
public void sortByPackageName() {
comparator = PMPackageListModel.COMP_PACKAGE_NAME;
updateList();
}
public void sortByAuthorName() {
comparator = PMPackageListModel.COMP_AUTHOR_NAME;
updateList();
}
public void setFilterFavorites(boolean isFiltered) {
filterFavorites = isFiltered;
updateList();
}
private void updateList() {
filteredList = new ArrayList<PMPackage>(fullList);
if (filterFavorites) {
ArrayList<PMPackage> filtered = new ArrayList<PMPackage>();
for (PMPackage r : filteredList) {
if (r.isFavorite()) {
filtered.add(r);
}
}
filteredList = filtered;
}
Collections.sort(filteredList, comparator);
fireContentsChanged(this, 0, getSize() - 1);
}
/*
* (non-Javadoc)
*
* @see javax.swing.ListModel#getElementAt(int)
*/
public Object getElementAt(int index) {
return filteredList.get(index);
}
/*
* (non-Javadoc)
*
* @see javax.swing.ListModel#getSize()
*/
public int getSize() {
return filteredList.size();
}
}
| apache-2.0 |
gzsombor/ranger | security-admin/src/main/java/org/apache/ranger/common/TimedExecutorConfigurator.java | 3085 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ranger.common;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
@Service
@Scope("singleton")
public class TimedExecutorConfigurator {
// these two are important and hence are user configurable.
static final String Property_MaxThreadPoolSize = "ranger.timed.executor.max.threadpool.size";
static final String Property_QueueSize = "ranger.timed.executor.queue.size";
// We need these default-defaults since default-site.xml file isn't inside the jar, i.e. file itself may be missing or values in it might be messed up! :(
static final int _DefaultMaxThreadPoolSize = 10;
static final private int _DefaultBlockingQueueSize = 100;
private int _maxThreadPoolSize;
private int _blockingQueueSize;
// The following are hard-coded for now and can be exposed if there is a pressing need.
private int _coreThreadPoolSize = 1;
private long _keepAliveTime = 10;
private TimeUnit _keepAliveTimeUnit = TimeUnit.SECONDS;
public TimedExecutorConfigurator() {
}
// Infrequently used class (once per lifetime of policy manager) hence, values read from property file aren't cached.
@PostConstruct
void initialize() {
Integer value = PropertiesUtil.getIntProperty(Property_MaxThreadPoolSize);
if (value == null) {
_maxThreadPoolSize = _DefaultMaxThreadPoolSize;
} else {
_maxThreadPoolSize = value;
}
value = PropertiesUtil.getIntProperty(Property_QueueSize);
if (value == null) {
_blockingQueueSize = _DefaultBlockingQueueSize;
} else {
_blockingQueueSize = value;
}
}
/**
* Provided mostly only testability.
* @param maxThreadPoolSize
* @param blockingQueueSize
*/
public TimedExecutorConfigurator(int maxThreadPoolSize, int blockingQueueSize) {
_maxThreadPoolSize = maxThreadPoolSize;
_blockingQueueSize = blockingQueueSize;
}
public int getCoreThreadPoolSize() {
return _coreThreadPoolSize;
}
public int getMaxThreadPoolSize() {
return _maxThreadPoolSize;
}
public long getKeepAliveTime() {
return _keepAliveTime;
}
public TimeUnit getKeepAliveTimeUnit() {
return _keepAliveTimeUnit;
}
public int getBlockingQueueSize() {
return _blockingQueueSize;
}
}
| apache-2.0 |
rombert/jackrabbit-oak | oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/session/WorkspaceImpl.java | 12188 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.session;
import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath;
import static org.apache.jackrabbit.oak.jcr.session.SessionImpl.checkIndexOnName;
import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.NODE_TYPES_PATH;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.Nonnull;
import javax.jcr.InvalidSerializedDataException;
import javax.jcr.NamespaceRegistry;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.lock.LockManager;
import javax.jcr.observation.ObservationManager;
import javax.jcr.query.QueryManager;
import javax.jcr.version.Version;
import javax.jcr.version.VersionManager;
import org.apache.jackrabbit.api.JackrabbitWorkspace;
import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
import org.apache.jackrabbit.commons.xml.ParsingContentHandler;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate;
import org.apache.jackrabbit.oak.jcr.delegate.WorkspaceDelegate;
import org.apache.jackrabbit.oak.jcr.lock.LockManagerImpl;
import org.apache.jackrabbit.oak.jcr.query.QueryManagerImpl;
import org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation;
import org.apache.jackrabbit.oak.jcr.version.VersionManagerImpl;
import org.apache.jackrabbit.oak.jcr.xml.ImportHandler;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.name.ReadWriteNamespaceRegistry;
import org.apache.jackrabbit.oak.plugins.nodetype.write.ReadWriteNodeTypeManager;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* TODO document
*/
public class WorkspaceImpl implements JackrabbitWorkspace {
private final SessionContext sessionContext;
private final SessionDelegate sessionDelegate;
private final WorkspaceDelegate workspaceDelegate;
private final QueryManagerImpl queryManager;
private final VersionManagerImpl versionManager;
private final ReadWriteNodeTypeManager nodeTypeManager;
public WorkspaceImpl(final SessionContext sessionContext) {
this.sessionContext = sessionContext;
this.sessionDelegate = sessionContext.getSessionDelegate();
this.workspaceDelegate = new WorkspaceDelegate(sessionContext);
this.queryManager = new QueryManagerImpl(sessionContext);
this.versionManager = new VersionManagerImpl(sessionContext);
this.nodeTypeManager = new ReadWriteNodeTypeManager() {
@Override
protected void refresh() throws RepositoryException {
getSession().refresh(true);
}
@Override
protected Tree getTypes() {
return sessionDelegate.getRoot().getTree(NODE_TYPES_PATH);
}
@Nonnull
@Override
protected Root getWriteRoot() {
return sessionDelegate.getContentSession().getLatestRoot();
}
@Override
@Nonnull
protected ValueFactory getValueFactory() {
return sessionContext.getValueFactory();
}
@Nonnull
@Override
protected NamePathMapper getNamePathMapper() {
return sessionContext;
}
};
}
//----------------------------------------------------------< Workspace >---
@Override
@Nonnull
public Session getSession() {
return sessionContext.getSession();
}
@Override
public String getName() {
return sessionDelegate.getWorkspaceName();
}
@Override
public void copy(String srcAbsPath, String destAbsPath) throws RepositoryException {
copy(getName(), srcAbsPath, destAbsPath);
}
private String getOakPathOrThrowNotFound(String srcAbsPath) throws PathNotFoundException {
return sessionContext.getOakPathOrThrowNotFound(srcAbsPath);
}
@Override
public void copy(String srcWorkspace,
String srcAbsPath,
final String destAbsPath) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
if (!getName().equals(srcWorkspace)) {
throw new UnsupportedRepositoryOperationException("Not implemented.");
}
sessionDelegate.performVoid(new SessionOperation("copy", true) {
@Override
public void checkPreconditions() throws RepositoryException {
super.checkPreconditions();
ensureIsAlive();
}
@Override
public void performVoid() throws RepositoryException {
sessionDelegate.checkProtectedNode(getParentPath(srcOakPath));
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
checkIndexOnName(destAbsPath);
workspaceDelegate.copy(srcOakPath, destOakPath);
}
});
}
@Override
public void clone(String srcWorkspace, String srcAbsPath, String destAbsPath, boolean removeExisting) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
sessionDelegate.performVoid(new SessionOperation("clone", true) {
@Override
public void checkPreconditions() throws RepositoryException {
super.checkPreconditions();
ensureIsAlive();
}
@Override
public void performVoid() throws RepositoryException {
sessionDelegate.checkProtectedNode(getParentPath(srcOakPath));
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
throw new UnsupportedRepositoryOperationException("Not implemented.");
}
});
}
@Override
public void move(String srcAbsPath, final String destAbsPath) throws RepositoryException {
final String srcOakPath = getOakPathOrThrowNotFound(srcAbsPath);
final String destOakPath = getOakPathOrThrowNotFound(destAbsPath);
ensureIsAlive();
sessionDelegate.checkProtectedNode(getParentPath(srcOakPath));
sessionDelegate.checkProtectedNode(getParentPath(destOakPath));
checkIndexOnName(destAbsPath);
sessionDelegate.move(srcOakPath, destOakPath, false);
}
@Override
public void restore(Version[] versions, boolean removeExisting) throws RepositoryException {
getVersionManager().restore(versions, removeExisting);
}
@Override
public LockManagerImpl getLockManager() {
return new LockManagerImpl(sessionContext);
}
@Override
public QueryManager getQueryManager() throws RepositoryException {
ensureIsAlive();
return queryManager;
}
@Override
public NamespaceRegistry getNamespaceRegistry() {
return new ReadWriteNamespaceRegistry(sessionDelegate.getRoot()) {
@Override
protected Root getWriteRoot() {
return sessionDelegate.getContentSession().getLatestRoot();
}
@Override
protected void refresh() throws RepositoryException {
getSession().refresh(true);
}
};
}
@Override
public ReadWriteNodeTypeManager getNodeTypeManager() {
return nodeTypeManager;
}
@Override
public ObservationManager getObservationManager() throws RepositoryException {
ensureIsAlive();
return sessionContext.getObservationManager();
}
@Override
public VersionManager getVersionManager() throws RepositoryException {
ensureIsAlive();
return versionManager;
}
@Override
public String[] getAccessibleWorkspaceNames() throws RepositoryException {
ensureIsAlive();
// FIXME: adjust implementation once OAK-118 is being addressed.
return new String[]{getName()};
}
@Override
public ContentHandler getImportContentHandler(String parentAbsPath, int uuidBehavior) throws RepositoryException {
ensureIsAlive();
return new ImportHandler(parentAbsPath, sessionContext, uuidBehavior, true);
}
@Override
public void importXML(String parentAbsPath, InputStream in, int uuidBehavior) throws IOException, RepositoryException {
ensureIsAlive();
try {
ContentHandler handler = getImportContentHandler(parentAbsPath, uuidBehavior);
new ParsingContentHandler(handler).parse(in);
} catch (SAXException e) {
Throwable exception = e.getException();
if (exception instanceof RepositoryException) {
throw (RepositoryException) exception;
} else if (exception instanceof IOException) {
throw (IOException) exception;
} else if (exception instanceof CommitFailedException) {
throw ((CommitFailedException) exception).asRepositoryException();
} else {
throw new InvalidSerializedDataException("XML parse error", e);
}
} finally {
// JCR-2903
if (in != null) {
try {
in.close();
} catch (IOException ignore) {
}
}
}
}
@Override
public void createWorkspace(String name) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
@Override
public void createWorkspace(String name, String srcWorkspace) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
@Override
public void deleteWorkspace(String name) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.deleteWorkspace");
}
//------------------------------------------------< JackrabbitWorkspace >---
@Override
public void createWorkspace(String workspaceName, InputSource workspaceTemplate) throws RepositoryException {
ensureIsAlive();
throw new UnsupportedRepositoryOperationException("OAK-118: Workspace.createWorkspace");
}
/**
* @see org.apache.jackrabbit.api.JackrabbitWorkspace#getPrivilegeManager()
*/
@Override
public PrivilegeManager getPrivilegeManager() throws RepositoryException {
return sessionContext.getPrivilegeManager();
}
//------------------------------------------------------------< private >---
private void ensureIsAlive() throws RepositoryException {
// check session status
if (!sessionDelegate.isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
}
| apache-2.0 |
oneliang/third-party-lib | proguard/proguard/io/DirectoryWriter.java | 4622 | /*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2014 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.io;
import proguard.classfile.ClassConstants;
import java.io.*;
/**
* This DataEntryWriter writes data entries to individual files in a given
* directory.
*
* @author Eric Lafortune
*/
public class DirectoryWriter implements DataEntryWriter
{
private final File baseFile;
private final boolean isFile;
private File currentFile;
private OutputStream currentOutputStream;
private Finisher currentFinisher;
/**
* Creates a new DirectoryWriter.
* @param baseFile the base directory to which all files will be written.
*/
public DirectoryWriter(File baseFile,
boolean isFile)
{
this.baseFile = baseFile;
this.isFile = isFile;
}
// Implementations for DataEntryWriter.
public boolean createDirectory(DataEntry dataEntry) throws IOException
{
// Should we close the current file?
if (!isFile &&
currentFile != null)
{
closeEntry();
}
File directory = getFile(dataEntry);
if (!directory.exists() &&
!directory.mkdirs())
{
throw new IOException("Can't create directory [" + directory.getPath() + "]");
}
return true;
}
public OutputStream getOutputStream(DataEntry dataEntry) throws IOException
{
return getOutputStream(dataEntry, null);
}
public OutputStream getOutputStream(DataEntry dataEntry,
Finisher finisher) throws IOException
{
File file = getFile(dataEntry);
// Should we close the current file?
if (!isFile &&
currentFile != null &&
!currentFile.equals(file))
{
closeEntry();
}
// Do we need a new stream?
if (currentOutputStream == null)
{
// Make sure the parent directories exist.
File parentDirectory = file.getParentFile();
if (parentDirectory != null &&
!parentDirectory.exists() &&
!parentDirectory.mkdirs())
{
throw new IOException("Can't create directory [" + parentDirectory.getPath() + "]");
}
// Open a new output stream for writing to the file.
currentOutputStream =
new BufferedOutputStream(
new FileOutputStream(file));
currentFinisher = finisher;
currentFile = file;
}
return currentOutputStream;
}
public void close() throws IOException
{
// Close the file stream, if any.
closeEntry();
}
// Small utility methods.
/**
* Returns the file for the given data entry.
*/
private File getFile(DataEntry dataEntry)
{
// Use the specified file, or construct a new file.
return isFile ?
baseFile :
new File(baseFile,
dataEntry.getName().replace(ClassConstants.PACKAGE_SEPARATOR,
File.separatorChar));
}
/**
* Closes the previous file, if any.
*/
private void closeEntry() throws IOException
{
// Close the file stream, if any.
if (currentOutputStream != null)
{
// Let any finisher finish up first.
if (currentFinisher != null)
{
currentFinisher.finish();
currentFinisher = null;
}
currentOutputStream.close();
currentOutputStream = null;
currentFile = null;
}
}
}
| apache-2.0 |
apache/axis2-java | modules/integration/test/org/apache/axis2/integration/UtilServer.java | 9251 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.integration;
import junit.framework.TestCase;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.axis2.context.ServiceContext;
import org.apache.axis2.context.ServiceGroupContext;
import org.apache.axis2.deployment.DeploymentEngine;
import org.apache.axis2.description.AxisModule;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.TransportInDescription;
import org.apache.axis2.engine.ListenerManager;
import org.apache.axis2.testutils.PortAllocator;
import org.apache.axis2.transport.http.SimpleHTTPServer;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.FilenameFilter;
public class UtilServer {
private static SimpleHTTPServer receiver;
public static final int TESTING_PORT = PortAllocator.allocatePort();
public static final String FAILURE_MESSAGE = "Intentional Failure";
public static synchronized void deployService(AxisService service)
throws AxisFault {
receiver.getConfigurationContext().getAxisConfiguration().addService(
service);
}
public static synchronized void unDeployService(QName service)
throws AxisFault {
receiver.getConfigurationContext().getAxisConfiguration()
.removeService(service.getLocalPart());
}
public static synchronized void unDeployClientService() throws AxisFault {
if (receiver.getConfigurationContext().getAxisConfiguration() != null) {
receiver.getConfigurationContext().getAxisConfiguration()
.removeService("AnonymousService");
}
}
public static synchronized void start() throws Exception {
start(TestingUtils.prefixBaseDirectory(Constants.TESTING_REPOSITORY));
}
public static synchronized void start(String repository) throws Exception {
if (receiver != null) {
throw new IllegalStateException("Server already running");
}
ConfigurationContext er = getNewConfigurationContext(repository);
receiver = new SimpleHTTPServer(er, TESTING_PORT);
try {
receiver.start();
ListenerManager listenerManager = er.getListenerManager();
TransportInDescription trsIn = new TransportInDescription(Constants.TRANSPORT_HTTP);
trsIn.setReceiver(receiver);
if (listenerManager == null) {
listenerManager = new ListenerManager();
listenerManager.init(er);
}
listenerManager.addListener(trsIn, true);
System.out.print("Server started on port "
+ TESTING_PORT + ".....");
} catch (Exception e) {
e.printStackTrace();
}
}
public static ConfigurationContext getNewConfigurationContext(
String repository) throws Exception {
File file = new File(repository);
if (!file.exists()) {
throw new Exception("repository directory "
+ file.getAbsolutePath() + " does not exists");
}
return ConfigurationContextFactory
.createConfigurationContextFromFileSystem(file.getAbsolutePath(),
file.getAbsolutePath() +
"/conf/axis2.xml");
}
public static ConfigurationContext getNewConfigurationContext(
String repository, String axis2xml) throws Exception {
File file = new File(TestingUtils.prefixBaseDirectory(repository));
if (!file.exists()) {
throw new Exception("repository directory "
+ file.getAbsolutePath() + " does not exists");
}
return ConfigurationContextFactory
.createConfigurationContextFromFileSystem(file.getAbsolutePath(),
axis2xml);
}
public static synchronized void stop() throws AxisFault {
if (receiver == null) {
throw new IllegalStateException();
}
receiver.stop();
while (receiver.isRunning()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
//nothing to do here
}
}
// tp.doStop();
System.out.print("Server stopped .....");
receiver.getConfigurationContext().terminate();
receiver = null;
}
public static ConfigurationContext getConfigurationContext() {
return receiver.getConfigurationContext();
}
public static ServiceContext createAdressedEnabledClientSide(
AxisService service) throws AxisFault {
File file = getAddressingMARFile();
TestCase.assertTrue(file.exists());
ConfigurationContext configContext = ConfigurationContextFactory
.createConfigurationContextFromFileSystem(
TestingUtils.prefixBaseDirectory("target/test-resources/integrationRepo"), null);
AxisModule axisModule = DeploymentEngine.buildModule(file,
configContext.getAxisConfiguration());
configContext.getAxisConfiguration().addModule(axisModule);
configContext.getAxisConfiguration().addService(service);
ServiceGroupContext serviceGroupContext =
configContext.createServiceGroupContext(service.getAxisServiceGroup());
return serviceGroupContext.getServiceContext(service);
}
static class AddressingFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
return name.startsWith("addressing") && name.endsWith(".mar");
}
}
private static File getAddressingMARFile() {
File dir = new File(TestingUtils.prefixBaseDirectory(Constants.TESTING_REPOSITORY + "/modules"));
File[] files = dir.listFiles(new AddressingFilter());
TestCase.assertTrue(files.length == 1);
File file = files[0];
TestCase.assertTrue(file.exists());
return file;
}
public static ConfigurationContext createClientConfigurationContext() throws AxisFault {
File file = getAddressingMARFile();
TestCase.assertTrue(file.exists());
ConfigurationContext configContext =
ConfigurationContextFactory.createConfigurationContextFromFileSystem(
TestingUtils.prefixBaseDirectory(Constants.TESTING_PATH + "/integrationRepo"),
TestingUtils.prefixBaseDirectory(Constants.TESTING_PATH + "/integrationRepo/conf/axis2.xml"));
AxisModule axisModule = DeploymentEngine.buildModule(file,
configContext.getAxisConfiguration());
configContext.getAxisConfiguration().addModule(axisModule);
return configContext;
}
public static void engageAddressingModule() throws AxisFault {
File file = getAddressingMARFile();
AxisModule axisModule = DeploymentEngine.buildModule(file,
receiver.getConfigurationContext().getAxisConfiguration());
receiver.getConfigurationContext().getAxisConfiguration().engageModule(axisModule);
}
public static ConfigurationContext createClientConfigurationContext(String repo)
throws AxisFault {
return ConfigurationContextFactory.createConfigurationContextFromFileSystem(
repo,
repo + "/conf/axis2.xml");
}
public static ServiceContext createAdressedEnabledClientSide(
AxisService service, String clientHome) throws AxisFault {
File file = getAddressingMARFile();
TestCase.assertTrue(file.exists());
ConfigurationContext configContext = ConfigurationContextFactory
.createConfigurationContextFromFileSystem(clientHome, null);
AxisModule axisModule = DeploymentEngine.buildModule(file,
configContext.getAxisConfiguration());
configContext.getAxisConfiguration().addModule(axisModule);
// sysContext.getAxisConfiguration().engageModule(moduleDesc.getName());
configContext.getAxisConfiguration().addService(service);
ServiceGroupContext serviceGroupContext =
configContext.createServiceGroupContext(service.getAxisServiceGroup());
return serviceGroupContext.getServiceContext(service);
}
}
| apache-2.0 |
harinigunabalan/PerformanceHat | cw-feedback-handler/src/main/java/eu/cloudwave/wp5/feedbackhandler/model/db/DbProcedureExecution.java | 1508 | /*******************************************************************************
* Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package eu.cloudwave.wp5.feedbackhandler.model.db;
import org.springframework.data.mongodb.core.mapping.Document;
import eu.cloudwave.wp5.common.model.ProcedureExecution;
import eu.cloudwave.wp5.feedbackhandler.constants.DbTableNames;
/**
* MongoDB-specific extension of {@link ProcedureExecution}.
*
* Extends the {@link ProcedureExecution} class with an additional attribute specifying the application the procedure
* belongs to. This is only used on the server and therefore not exposed in the shared interface.
*/
@Document(collection = DbTableNames.PROCEDURE_EXECUTIONS)
public interface DbProcedureExecution extends ProcedureExecution, DbEntity {
public DbApplication getApplication();
}
| apache-2.0 |
kuainiao/MultimediaDesktop | Server/src/main/java/com/wms/studio/service/LoginIpServiceImpl.java | 4506 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wms.studio.service;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.annotation.Resource;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.transaction.Transactional;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import com.wms.studio.annotations.HandlerPoint;
import com.wms.studio.api.dto.PageDto;
import com.wms.studio.api.dto.page.PageSize;
import com.wms.studio.api.dto.user.LoginIpDto;
import com.wms.studio.api.dto.user.LoginIpInfoDto;
import com.wms.studio.api.service.LoginIpService;
import com.wms.studio.api.utils.StringUtils;
import com.wms.studio.covert.EntityConvertInterface;
import com.wms.studio.entity.LoginIp;
import com.wms.studio.entity.User;
import com.wms.studio.repository.LoginIpRepository;
import com.wms.studio.repository.UserRepository;
/**
*
* @author WMS
*
*/
@Service("loginIpService")
public class LoginIpServiceImpl implements LoginIpService {
private static final Logger log = Logger
.getLogger(LoginIpServiceImpl.class);
@Resource
private LoginIpRepository loginIpRepository;
@Resource
private UserRepository userRepository;
@Resource
@Qualifier("loginIpConvert")
private EntityConvertInterface<LoginIp, LoginIpInfoDto> loginIpConvert;
public void setLoginIpRepository(LoginIpRepository loginIpRepository) {
this.loginIpRepository = loginIpRepository;
}
public void setLoginIpConvert(
EntityConvertInterface<LoginIp, LoginIpInfoDto> loginIpConvert) {
this.loginIpConvert = loginIpConvert;
}
@Transactional
@Override
@HandlerPoint(handlerName = "loginHandler")
public void addLoginIp(LoginIpDto loginIpdto) {
if (loginIpdto == null || StringUtils.isBlank(loginIpdto.getUserId())
|| StringUtils.isBlank(loginIpdto.getIpaddress())
|| loginIpdto.getLoginType() == null) {
log.error("[登录记录]-[登录记录数据校验失败]-[校验数据为:]"
+ ToStringBuilder.reflectionToString(loginIpdto));
return;
}
LoginIp loginIp = new LoginIp(new User(loginIpdto.getUserId()),
loginIpdto.getIpaddress(), loginIpdto.getLoginType());
loginIpRepository.save(loginIp);
userRepository.updateUserLastLoginDate(loginIpdto.getUserId(),
loginIp.getLoginTime());
}
@Override
@org.springframework.transaction.annotation.Transactional(propagation = Propagation.NOT_SUPPORTED, readOnly = true)
public PageDto<LoginIpInfoDto> findBy(final String userId,
final Date start, final Date end, PageSize pageSize) {
if (pageSize == null) {
pageSize = new PageSize();
}
return loginIpConvert
.covertToDto(loginIpRepository.findAll(
new Specification<LoginIp>() {
@Override
public Predicate toPredicate(Root<LoginIp> root,
CriteriaQuery<?> query, CriteriaBuilder cb) {
List<Predicate> pres = new ArrayList<>(3);
if (start != null) {
pres.add(cb.greaterThanOrEqualTo(
root.get("loginTime")
.as(Date.class), start));
}
if (end != null) {
pres.add(cb.lessThanOrEqualTo(
root.get("loginTime")
.as(Date.class), end));
}
if (!StringUtils.isBlank(userId)) {
pres.add(cb.equal(
root.get("user").as(User.class),
new User(userId)));
}
Predicate[] p = new Predicate[pres.size()];
return cb.and(pres.toArray(p));
}
},
new PageRequest(pageSize.getPage() - 1, pageSize
.getLimit())));
}
}
| apache-2.0 |