repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
skylot/jadx | jadx-core/src/test/java/jadx/tests/integration/conditions/TestComplexIf.java | 1178 | package jadx.tests.integration.conditions;
import org.junit.jupiter.api.Test;
import jadx.core.dex.nodes.ClassNode;
import jadx.tests.api.SmaliTest;
import static jadx.tests.api.utils.JadxMatchers.containsOne;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestComplexIf extends SmaliTest {
// @formatter:off
/*
public final class TestComplexIf {
private String a;
private int b;
private float c;
public final boolean test() {
if (this.a.equals("GT-P6200") || this.a.equals("GT-P6210") || ... ) {
return true;
}
if (this.a.equals("SM-T810") || this.a.equals("SM-T813") || ...) {
return false;
}
return this.c > 160.0f ? true : this.c <= 0.0f && ((this.b & 15) == 4 ? 1 : null) != null;
}
}
*/
// @formatter:on
@Test
public void test() {
ClassNode cls = getClassNodeFromSmaliWithPkg("conditions", "TestComplexIf");
String code = cls.getCode().toString();
assertThat(code, containsOne("if (this.a.equals(\"GT-P6200\") || this.a.equals(\"GT-P6210\") || this.a.equals(\"A100\") "
+ "|| this.a.equals(\"A101\") || this.a.equals(\"LIFETAB_S786X\") || this.a.equals(\"VS890 4G\")) {"));
}
}
| apache-2.0 |
dilini-mampitiya/product-pc | modules/components/analytics/core/org.wso2.carbon.pc.analytics.core/src/main/java/org/wso2/carbon/pc/analytics/core/clients/AnalyticsRestClient.java | 3671 | /**
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.pc.analytics.core.clients;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.pc.analytics.core.utils.AnalyticsUtils;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import javax.xml.stream.XMLStreamException;
/**
* AnalyticsRestClient class is used to invoke the DAS REST API
*/
public class AnalyticsRestClient {
private static final Log log = LogFactory.getLog(AnalyticsRestClient.class);
/**
* Send post request to a DAS rest web service
* @param url used to locate the webservice functionality
* @param message is the request message that need to be sent to the web service
* @return the result as a String
*/
public static String post(String url, String message) throws IOException, XMLStreamException {
RegistryUtils.setTrustStoreSystemProperties();
HttpClient httpClient = new HttpClient();
PostMethod postRequest = new PostMethod(url);
postRequest.setRequestHeader("Authorization", AnalyticsUtils.getAuthorizationHeader());
BufferedReader br = null;
try {
StringRequestEntity input =
new StringRequestEntity(message, "application/json", "UTF-8");
postRequest.setRequestEntity(input);
int returnCode = httpClient.executeMethod(postRequest);
if (returnCode != HttpStatus.SC_OK) {
String errorCode = "Failed : HTTP error code : " + returnCode;
throw new RuntimeException(errorCode);
}
InputStreamReader reader =
new InputStreamReader((postRequest.getResponseBodyAsStream()));
br = new BufferedReader(reader);
String output = null;
StringBuilder totalOutput = new StringBuilder();
if (log.isDebugEnabled()) {
log.debug("Output from Server .... \n");
}
while ((output = br.readLine()) != null) {
totalOutput.append(output);
}
if (log.isDebugEnabled()) {
log.debug("Output = " + totalOutput.toString());
}
return totalOutput.toString();
} catch (UnsupportedEncodingException e) {
String errMsg = "Async DAS client unsupported encoding exception.";
throw new UnsupportedEncodingException(errMsg);
} catch (UnsupportedOperationException e) {
String errMsg = "Async DAS client unsupported operation exception.";
throw new UnsupportedOperationException(errMsg);
} catch (IOException e) {
String errMsg = "Async DAS client I/O exception.";
log.error(errMsg, e);
} finally {
postRequest.releaseConnection();
if (br != null) {
try {
br.close();
} catch (Exception e) {
String errMsg = "Async DAS rest client BufferedReader close exception.";
log.error(errMsg, e);
}
}
}
return null;
}
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-ecr/src/main/java/com/amazonaws/services/ecr/model/transform/AuthorizationDataJsonUnmarshaller.java | 3418 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ecr.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.ecr.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* AuthorizationData JSON Unmarshaller
*/
public class AuthorizationDataJsonUnmarshaller implements
Unmarshaller<AuthorizationData, JsonUnmarshallerContext> {
public AuthorizationData unmarshall(JsonUnmarshallerContext context)
throws Exception {
AuthorizationData authorizationData = new AuthorizationData();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("authorizationToken", targetDepth)) {
context.nextToken();
authorizationData.setAuthorizationToken(context
.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("expiresAt", targetDepth)) {
context.nextToken();
authorizationData.setExpiresAt(context.getUnmarshaller(
java.util.Date.class).unmarshall(context));
}
if (context.testExpression("proxyEndpoint", targetDepth)) {
context.nextToken();
authorizationData.setProxyEndpoint(context.getUnmarshaller(
String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return authorizationData;
}
private static AuthorizationDataJsonUnmarshaller instance;
public static AuthorizationDataJsonUnmarshaller getInstance() {
if (instance == null)
instance = new AuthorizationDataJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
apache/oodt | workflow/src/main/java/org/apache/oodt/cas/workflow/cli/action/StopWorkflowInstCliAction.java | 1858 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.workflow.cli.action;
//OODT imports
import org.apache.oodt.cas.cli.exception.CmdLineActionException;
import org.apache.oodt.cas.workflow.system.WorkflowManagerClient;
/**
* A {@link CmdLineAction} which stops a workflow instance.
*
* @author bfoster (Brian Foster)
*/
public class StopWorkflowInstCliAction extends WorkflowCliAction {
private String instanceId;
@Override
public void execute(ActionMessagePrinter printer) throws CmdLineActionException {
try (WorkflowManagerClient client = getClient()) {
if (client.stopWorkflowInstance(instanceId)) {
printer.println("Successfully stopped workflow '" + instanceId + "'");
} else {
throw new Exception("Stop workflow returned false");
}
} catch (Exception e) {
throw new CmdLineActionException("Failed to stop workflow '"
+ instanceId + "' : " + e.getMessage(), e);
}
}
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
}
| apache-2.0 |
shyTNT/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201508/CreativeServiceInterfacegetCreativesByStatement.java | 3073 |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
*
* Gets a {@link CreativePage} of {@link Creative} objects that satisfy the
* given {@link Statement#query}. The following fields are supported for
* filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th> <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code id}</td>
* <td>{@link Creative#id}</td>
* </tr>
* <tr>
* <td>{@code name}</td>
* <td>{@link Creative#name}</td>
* </tr>
* <tr>
* <td>{@code advertiserId}</td>
* <td>{@link Creative#advertiserId}</td>
* </tr>
* <tr>
* <td>{@code width}</td>
* <td>{@link Creative#size}</td>
* </tr>
* <tr>
* <td>{@code height}</td>
* <td>{@link Creative#size}</td>
* </tr>
* <tr>
* <td>{@code lastModifiedDateTime}</td>
* <td>{@link Creative#lastModifiedDateTime}</td>
* </tr>
* </table>
*
* @param filterStatement a Publisher Query Language statement used to filter
* a set of creatives
* @return the creatives that match the given filter
*
*
* <p>Java class for getCreativesByStatement element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="getCreativesByStatement">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="filterStatement" type="{https://www.google.com/apis/ads/publisher/v201508}Statement" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"filterStatement"
})
@XmlRootElement(name = "getCreativesByStatement")
public class CreativeServiceInterfacegetCreativesByStatement {
protected Statement filterStatement;
/**
* Gets the value of the filterStatement property.
*
* @return
* possible object is
* {@link Statement }
*
*/
public Statement getFilterStatement() {
return filterStatement;
}
/**
* Sets the value of the filterStatement property.
*
* @param value
* allowed object is
* {@link Statement }
*
*/
public void setFilterStatement(Statement value) {
this.filterStatement = value;
}
}
| apache-2.0 |
shawnkumar/cstargraph | src/java/org/apache/cassandra/io/util/FastByteArrayOutputStream.java | 8966 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import org.apache.cassandra.utils.ByteBufferUtil;
/*
* This file has been modified from Apache Harmony's ByteArrayOutputStream
* implementation. The synchronized methods of the original have been
* replaced by non-synchronized methods. This makes certain operations
* much FASTer, but also *not thread-safe*.
*
* This file remains formatted the same as the Apache Harmony original to
* make patching easier if any bug fixes are made to the Harmony version.
*/
/**
* A specialized {@link OutputStream} for class for writing content to an
* (internal) byte array. As bytes are written to this stream, the byte array
* may be expanded to hold more bytes. When the writing is considered to be
* finished, a copy of the byte array can be requested from the class.
*
* @see ByteArrayOutputStream
*/
public class FastByteArrayOutputStream extends OutputStream {
/**
* The byte array containing the bytes written.
*/
protected byte[] buf;
/**
* The number of bytes written.
*/
protected int count;
/**
* Constructs a new ByteArrayOutputStream with a default size of 32 bytes.
* If more than 32 bytes are written to this instance, the underlying byte
* array will expand.
*/
public FastByteArrayOutputStream() {
buf = new byte[32];
}
/**
* Constructs a new {@code ByteArrayOutputStream} with a default size of
* {@code size} bytes. If more than {@code size} bytes are written to this
* instance, the underlying byte array will expand.
*
* @param size
* initial size for the underlying byte array, must be
* non-negative.
* @throws IllegalArgumentException
* if {@code size < 0}.
*/
public FastByteArrayOutputStream(int size) {
if (size >= 0) {
buf = new byte[size];
} else {
throw new IllegalArgumentException();
}
}
/**
* Closes this stream. This releases system resources used for this stream.
*
* @throws IOException
* if an error occurs while attempting to close this stream.
*/
@Override
public void close() throws IOException {
/**
* Although the spec claims "A closed stream cannot perform output
* operations and cannot be reopened.", this implementation must do
* nothing.
*/
super.close();
}
private void expand(int i) {
/* Can the buffer handle @i more bytes, if not expand it */
if (count + i <= buf.length) {
return;
}
long expectedExtent = (count + i) * 2L; //long to deal with possible int overflow
int newSize = (int) Math.min(Integer.MAX_VALUE - 8, expectedExtent); // MAX_ARRAY_SIZE
byte[] newbuf = new byte[newSize];
System.arraycopy(buf, 0, newbuf, 0, count);
buf = newbuf;
}
/**
* Resets this stream to the beginning of the underlying byte array. All
* subsequent writes will overwrite any bytes previously stored in this
* stream.
*/
public void reset() {
count = 0;
}
/**
* Returns the total number of bytes written to this stream so far.
*
* @return the number of bytes written to this stream.
*/
public int size() {
return count;
}
/**
* Returns the contents of this ByteArrayOutputStream as a byte array. Any
* changes made to the receiver after returning will not be reflected in the
* byte array returned to the caller.
*
* @return this stream's current contents as a byte array.
*/
public byte[] toByteArray() {
byte[] newArray = new byte[count];
System.arraycopy(buf, 0, newArray, 0, count);
return newArray;
}
/**
* Returns the contents of this ByteArrayOutputStream as a string. Any
* changes made to the receiver after returning will not be reflected in the
* string returned to the caller.
*
* @return this stream's current contents as a string.
*/
@Override
public String toString() {
return new String(buf, 0, count);
}
/**
* Returns the contents of this ByteArrayOutputStream as a string. Each byte
* {@code b} in this stream is converted to a character {@code c} using the
* following function:
* {@code c == (char)(((hibyte & 0xff) << 8) | (b & 0xff))}. This method is
* deprecated and either {@link #toString()} or {@link #toString(String)}
* should be used.
*
* @param hibyte
* the high byte of each resulting Unicode character.
* @return this stream's current contents as a string with the high byte set
* to {@code hibyte}.
* @deprecated Use {@link #toString()}.
*/
@Deprecated
public String toString(int hibyte) {
char[] newBuf = new char[size()];
for (int i = 0; i < newBuf.length; i++) {
newBuf[i] = (char) (((hibyte & 0xff) << 8) | (buf[i] & 0xff));
}
return new String(newBuf);
}
/**
* Returns the contents of this ByteArrayOutputStream as a string converted
* according to the encoding declared in {@code enc}.
*
* @param enc
* a string representing the encoding to use when translating
* this stream to a string.
* @return this stream's current contents as an encoded string.
* @throws UnsupportedEncodingException
* if the provided encoding is not supported.
*/
public String toString(String enc) throws UnsupportedEncodingException {
return new String(buf, 0, count, enc);
}
/**
* Writes {@code count} bytes from the byte array {@code buffer} starting at
* offset {@code index} to this stream.
*
* @param buffer
* the buffer to be written.
* @param offset
* the initial position in {@code buffer} to retrieve bytes.
* @param len
* the number of bytes of {@code buffer} to write.
* @throws NullPointerException
* if {@code buffer} is {@code null}.
* @throws IndexOutOfBoundsException
* if {@code offset < 0} or {@code len < 0}, or if
* {@code offset + len} is greater than the length of
* {@code buffer}.
*/
@Override
public void write(byte[] buffer, int offset, int len) {
// avoid int overflow
if (offset < 0 || offset > buffer.length || len < 0
|| len > buffer.length - offset
|| this.count + len < 0) {
throw new IndexOutOfBoundsException();
}
if (len == 0) {
return;
}
/* Expand if necessary */
expand(len);
System.arraycopy(buffer, offset, buf, this.count, len);
this.count += len;
}
public void write(ByteBuffer buffer)
{
int len = buffer.remaining();
expand(len);
ByteBufferUtil.arrayCopy(buffer, buffer.position(), buf, this.count, len);
this.count += len;
}
/**
* Writes the specified byte {@code oneByte} to the OutputStream. Only the
* low order byte of {@code oneByte} is written.
*
* @param oneByte
* the byte to be written.
*/
@Override
public void write(int oneByte) {
if (count == buf.length) {
expand(1);
}
buf[count++] = (byte) oneByte;
}
/**
* Takes the contents of this stream and writes it to the output stream
* {@code out}.
*
* @param out
* an OutputStream on which to write the contents of this stream.
* @throws IOException
* if an error occurs while writing to {@code out}.
*/
public void writeTo(OutputStream out) throws IOException {
out.write(buf, 0, count);
}
}
| apache-2.0 |
wangda/sharding-jdbc | sharding-jdbc-core/src/test/java/com/dangdang/ddframe/rdb/sharding/router/SelectMixedTablesTest.java | 2577 | /**
* Copyright 1999-2015 dangdang.com.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package com.dangdang.ddframe.rdb.sharding.router;
import java.util.Arrays;
import org.junit.Test;
import com.dangdang.ddframe.rdb.sharding.exception.SQLParserException;
import com.dangdang.ddframe.rdb.sharding.exception.ShardingJdbcException;
public final class SelectMixedTablesTest extends AbstractBaseRouteSqlTest {
@Test
public void assertBindingTableWithUnBoundTable() throws SQLParserException {
assertSingleTarget("select * from order o join order_item i join order_attr a using(order_id) where o.order_id = 1", "ds_1",
"SELECT * FROM order_1 o JOIN order_item_1 i JOIN order_attr_b a USING (order_id) WHERE o.order_id = 1");
}
@Test
public void assertConditionFromRelationship() throws SQLParserException {
assertSingleTarget("select * from order o join order_attr a using(order_id) where o.order_id = 1", "ds_1",
"SELECT * FROM order_1 o JOIN order_attr_b a USING (order_id) WHERE o.order_id = 1");
}
@Test
public void assertSelectWithCartesianProductAllPartitions() throws SQLParserException {
assertMultipleTargets("select * from order o, order_attr a", 4, Arrays.asList("ds_0", "ds_1"),
Arrays.asList("SELECT * FROM order_0 o, order_attr_a a", "SELECT * FROM order_1 o, order_attr_a a",
"SELECT * FROM order_0 o, order_attr_b a", "SELECT * FROM order_1 o, order_attr_b a"));
}
@Test
public void assertSelectWithoutTableRule() throws SQLParserException {
assertSingleTarget("select * from order o join product p using(prod_id) where o.order_id = 1", "ds_1",
"SELECT * FROM order_1 o JOIN product p USING (prod_id) WHERE o.order_id = 1");
}
@Test(expected = ShardingJdbcException.class)
public void assertSelectTableWithoutRules() throws SQLParserException {
assertSingleTarget("select * from aaa, bbb, ccc", null, null);
}
}
| apache-2.0 |
dbmalkovsky/flowable-engine | modules/flowable-job-service/src/main/java/org/flowable/job/service/impl/cmd/GetJobByCorrelationIdCmd.java | 2315 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.job.service.impl.cmd;
import org.flowable.common.engine.api.FlowableIllegalArgumentException;
import org.flowable.common.engine.impl.interceptor.Command;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.job.api.Job;
import org.flowable.job.service.JobServiceConfiguration;
/**
* @author Filip Hrisafov
*/
public class GetJobByCorrelationIdCmd implements Command<Job> {
protected JobServiceConfiguration jobServiceConfiguration;
protected String correlationId;
public GetJobByCorrelationIdCmd(String correlationId, JobServiceConfiguration jobServiceConfiguration) {
this.correlationId = correlationId;
this.jobServiceConfiguration = jobServiceConfiguration;
}
@Override
public Job execute(CommandContext commandContext) {
if (correlationId == null) {
throw new FlowableIllegalArgumentException("correlationId is null");
}
Job job = jobServiceConfiguration.getDeadLetterJobEntityManager().findJobByCorrelationId(correlationId);
if (job != null) {
return job;
}
job = jobServiceConfiguration.getExternalWorkerJobEntityManager().findJobByCorrelationId(correlationId);
if (job != null) {
return job;
}
job = jobServiceConfiguration.getTimerJobEntityManager().findJobByCorrelationId(correlationId);
if (job != null) {
return job;
}
job = jobServiceConfiguration.getSuspendedJobEntityManager().findJobByCorrelationId(correlationId);
if (job != null) {
return job;
}
return jobServiceConfiguration.getJobEntityManager().findJobByCorrelationId(correlationId);
}
}
| apache-2.0 |
lepdou/apollo | apollo-client/src/test/java/com/ctrip/framework/apollo/spring/config/ConfigPropertySourceTest.java | 3809 | /*
* Copyright 2021 Apollo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ctrip.framework.apollo.spring.config;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigChangeListener;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
@RunWith(MockitoJUnitRunner.class)
public class ConfigPropertySourceTest {
private ConfigPropertySource configPropertySource;
@Mock
private Config someConfig;
@Before
public void setUp() throws Exception {
String someName = "someName";
configPropertySource = new ConfigPropertySource(someName, someConfig);
}
@Test
public void testGetPropertyNames() throws Exception {
String somePropertyName = "somePropertyName";
String anotherPropertyName = "anotherPropertyName";
Set<String> somePropertyNames = Sets.newHashSet(somePropertyName, anotherPropertyName);
when(someConfig.getPropertyNames()).thenReturn(somePropertyNames);
String[] result = configPropertySource.getPropertyNames();
verify(someConfig, times(1)).getPropertyNames();
assertArrayEquals(somePropertyNames.toArray(), result);
}
@Test
public void testGetEmptyPropertyNames() throws Exception {
when(someConfig.getPropertyNames()).thenReturn(Sets.<String>newHashSet());
assertEquals(0, configPropertySource.getPropertyNames().length);
}
@Test
public void testGetProperty() throws Exception {
String somePropertyName = "somePropertyName";
String someValue = "someValue";
when(someConfig.getProperty(somePropertyName, null)).thenReturn(someValue);
assertEquals(someValue, configPropertySource.getProperty(somePropertyName));
verify(someConfig, times(1)).getProperty(somePropertyName, null);
}
@Test
public void testAddChangeListener() throws Exception {
ConfigChangeListener someListener = mock(ConfigChangeListener.class);
ConfigChangeListener anotherListener = mock(ConfigChangeListener.class);
final List<ConfigChangeListener> listeners = Lists.newArrayList();
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
listeners.add(invocation.getArgumentAt(0, ConfigChangeListener.class));
return Void.class;
}
}).when(someConfig).addChangeListener(any(ConfigChangeListener.class));
configPropertySource.addChangeListener(someListener);
configPropertySource.addChangeListener(anotherListener);
assertEquals(2, listeners.size());
assertTrue(listeners.containsAll(Lists.newArrayList(someListener, anotherListener)));
}
}
| apache-2.0 |
MikeThomsen/nifi | nifi-nar-bundles/nifi-easyrules-bundle/nifi-easyrules-service/src/main/java/org/apache/nifi/rules/engine/EasyRulesEngineService.java | 3675 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.rules.engine;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.rules.Action;
import org.jeasy.rules.api.Facts;
import org.jeasy.rules.api.RuleListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Implementation of RulesEngineService interface
*
* @see RulesEngineService
*/
@CapabilityDescription("Defines and execute the rules stored in NiFi or EasyRules file formats for a given set of facts. Supports " +
"rules stored as JSON or YAML file types.")
@Tags({ "rules","rules-engine","engine","actions","facts" })
public class EasyRulesEngineService extends EasyRulesEngineProvider implements RulesEngineService {
private volatile RulesEngine rulesEngine;
@Override
@OnEnabled
public void onEnabled(final ConfigurationContext context) throws InitializationException {
super.onEnabled(context);
EasyRulesEngine easyRulesEngine = (EasyRulesEngine) getRulesEngine();
List<RuleListener> ruleListeners = new ArrayList<>();
ruleListeners.add(new EasyRulesListener(getLogger()));
easyRulesEngine.setRuleListeners(ruleListeners);
rulesEngine = easyRulesEngine;
}
/**
* Return the list of actions what should be executed for a given set of facts
* @param facts a Map of key and facts values, as objects, that should be evaluated by the rules engine
* @return List of Actions
*/
@Override
public List<Action> fireRules(Map<String, Object> facts) {
return rulesEngine.fireRules(facts);
}
private static class EasyRulesListener implements RuleListener {
private ComponentLog logger;
EasyRulesListener(ComponentLog logger) {
this.logger = logger;
}
@Override
public boolean beforeEvaluate(org.jeasy.rules.api.Rule rule, Facts facts) {
return true;
}
@Override
public void afterEvaluate(org.jeasy.rules.api.Rule rule, Facts facts, boolean b) {
}
@Override
public void beforeExecute(org.jeasy.rules.api.Rule rule, Facts facts) {
}
@Override
public void onSuccess(org.jeasy.rules.api.Rule rule, Facts facts) {
logger.debug("Rules was successfully processed for: {}",new Object[]{rule.getName()});
}
@Override
public void onFailure(org.jeasy.rules.api.Rule rule, Facts facts, Exception e) {
logger.warn("Rule execution failed for: {}", new Object[]{rule.getName()}, e);
}
}
}
| apache-2.0 |
xtern/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/platform/client/ClientMessageParser.java | 21476 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.client;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.binary.BinaryReaderExImpl;
import org.apache.ignite.internal.binary.GridBinaryMarshaller;
import org.apache.ignite.internal.binary.streams.BinaryHeapInputStream;
import org.apache.ignite.internal.binary.streams.BinaryHeapOutputStream;
import org.apache.ignite.internal.binary.streams.BinaryInputStream;
import org.apache.ignite.internal.binary.streams.BinaryMemoryAllocator;
import org.apache.ignite.internal.processors.cache.binary.CacheObjectBinaryProcessorImpl;
import org.apache.ignite.internal.processors.odbc.ClientListenerMessageParser;
import org.apache.ignite.internal.processors.odbc.ClientListenerRequest;
import org.apache.ignite.internal.processors.odbc.ClientListenerResponse;
import org.apache.ignite.internal.processors.odbc.ClientMessage;
import org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryConfigurationGetRequest;
import org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryTypeGetRequest;
import org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryTypeNameGetRequest;
import org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryTypeNamePutRequest;
import org.apache.ignite.internal.processors.platform.client.binary.ClientBinaryTypePutRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheClearKeyRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheClearKeysRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheClearRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheContainsKeyRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheContainsKeysRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheCreateWithConfigurationRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheCreateWithNameRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheDestroyRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetAllRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetAndPutIfAbsentRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetAndPutRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetAndRemoveRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetAndReplaceRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetConfigurationRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetNamesRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetOrCreateWithConfigurationRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetOrCreateWithNameRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheGetSizeRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheLocalPeekRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheNodePartitionsRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCachePartitionsRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCachePutAllRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCachePutIfAbsentRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCachePutRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheQueryContinuousRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheQueryNextPageRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheRemoveAllRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheRemoveIfEqualsRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheRemoveKeyRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheRemoveKeysRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheReplaceIfEqualsRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheReplaceRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheScanQueryRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheSqlFieldsQueryRequest;
import org.apache.ignite.internal.processors.platform.client.cache.ClientCacheSqlQueryRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterChangeStateRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterGetStateRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterGroupGetNodeIdsRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterGroupGetNodesDetailsRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterGroupGetNodesEndpointsRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterWalChangeStateRequest;
import org.apache.ignite.internal.processors.platform.client.cluster.ClientClusterWalGetStateRequest;
import org.apache.ignite.internal.processors.platform.client.compute.ClientExecuteTaskRequest;
import org.apache.ignite.internal.processors.platform.client.service.ClientServiceGetDescriptorRequest;
import org.apache.ignite.internal.processors.platform.client.service.ClientServiceGetDescriptorsRequest;
import org.apache.ignite.internal.processors.platform.client.service.ClientServiceInvokeRequest;
import org.apache.ignite.internal.processors.platform.client.streamer.ClientDataStreamerAddDataRequest;
import org.apache.ignite.internal.processors.platform.client.streamer.ClientDataStreamerStartRequest;
import org.apache.ignite.internal.processors.platform.client.tx.ClientTxEndRequest;
import org.apache.ignite.internal.processors.platform.client.tx.ClientTxStartRequest;
/**
* Thin client message parser.
*/
public class ClientMessageParser implements ClientListenerMessageParser {
/* General-purpose operations. */
/** */
private static final short OP_RESOURCE_CLOSE = 0;
/** */
private static final short OP_HEARTBEAT = 1;
/** */
private static final short OP_GET_IDLE_TIMEOUT = 2;
/* Cache operations */
/** */
private static final short OP_CACHE_GET = 1000;
/** */
private static final short OP_CACHE_PUT = 1001;
/** */
private static final short OP_CACHE_PUT_IF_ABSENT = 1002;
/** */
private static final short OP_CACHE_GET_ALL = 1003;
/** */
private static final short OP_CACHE_PUT_ALL = 1004;
/** */
private static final short OP_CACHE_GET_AND_PUT = 1005;
/** */
private static final short OP_CACHE_GET_AND_REPLACE = 1006;
/** */
private static final short OP_CACHE_GET_AND_REMOVE = 1007;
/** */
private static final short OP_CACHE_GET_AND_PUT_IF_ABSENT = 1008;
/** */
private static final short OP_CACHE_REPLACE = 1009;
/** */
private static final short OP_CACHE_REPLACE_IF_EQUALS = 1010;
/** */
private static final short OP_CACHE_CONTAINS_KEY = 1011;
/** */
private static final short OP_CACHE_CONTAINS_KEYS = 1012;
/** */
private static final short OP_CACHE_CLEAR = 1013;
/** */
private static final short OP_CACHE_CLEAR_KEY = 1014;
/** */
private static final short OP_CACHE_CLEAR_KEYS = 1015;
/** */
private static final short OP_CACHE_REMOVE_KEY = 1016;
/** */
private static final short OP_CACHE_REMOVE_IF_EQUALS = 1017;
/** */
private static final short OP_CACHE_REMOVE_KEYS = 1018;
/** */
private static final short OP_CACHE_REMOVE_ALL = 1019;
/** */
private static final short OP_CACHE_GET_SIZE = 1020;
/** */
private static final short OP_CACHE_LOCAL_PEEK = 1021;
/* Cache create / destroy, configuration. */
/** */
private static final short OP_CACHE_GET_NAMES = 1050;
/** */
private static final short OP_CACHE_CREATE_WITH_NAME = 1051;
/** */
private static final short OP_CACHE_GET_OR_CREATE_WITH_NAME = 1052;
/** */
private static final short OP_CACHE_CREATE_WITH_CONFIGURATION = 1053;
/** */
private static final short OP_CACHE_GET_OR_CREATE_WITH_CONFIGURATION = 1054;
/** */
private static final short OP_CACHE_GET_CONFIGURATION = 1055;
/** */
private static final short OP_CACHE_DESTROY = 1056;
/* Cache service info. */
/** Deprecated since 1.3.0. Replaced by OP_CACHE_PARTITIONS. */
private static final short OP_CACHE_NODE_PARTITIONS = 1100;
/** */
private static final short OP_CACHE_PARTITIONS = 1101;
/* Query operations. */
/** */
private static final short OP_QUERY_SCAN = 2000;
/** */
private static final short OP_QUERY_SCAN_CURSOR_GET_PAGE = 2001;
/** */
private static final short OP_QUERY_SQL = 2002;
/** */
private static final short OP_QUERY_SQL_CURSOR_GET_PAGE = 2003;
/** */
private static final short OP_QUERY_SQL_FIELDS = 2004;
/** */
private static final short OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE = 2005;
/** */
private static final short OP_QUERY_CONTINUOUS = 2006;
/** */
public static final short OP_QUERY_CONTINUOUS_EVENT_NOTIFICATION = 2007;
/* Binary metadata operations. */
/** */
private static final short OP_BINARY_TYPE_NAME_GET = 3000;
/** */
private static final short OP_BINARY_TYPE_NAME_PUT = 3001;
/** */
private static final short OP_BINARY_TYPE_GET = 3002;
/** */
private static final short OP_BINARY_TYPE_PUT = 3003;
/** */
private static final short OP_BINARY_CONFIGURATION_GET = 3004;
/** Start new transaction. */
private static final short OP_TX_START = 4000;
/** Commit transaction. */
private static final short OP_TX_END = 4001;
/* Cluster operations. */
/** */
private static final short OP_CLUSTER_GET_STATE = 5000;
/** */
private static final short OP_CLUSTER_CHANGE_STATE = 5001;
/** */
private static final short OP_CLUSTER_CHANGE_WAL_STATE = 5002;
/** */
private static final short OP_CLUSTER_GET_WAL_STATE = 5003;
/** */
private static final short OP_CLUSTER_GROUP_GET_NODE_IDS = 5100;
/** */
private static final short OP_CLUSTER_GROUP_GET_NODE_INFO = 5101;
/** */
private static final short OP_CLUSTER_GROUP_GET_NODE_ENDPOINTS = 5102;
/* Compute operations. */
/** */
private static final short OP_COMPUTE_TASK_EXECUTE = 6000;
/** */
public static final short OP_COMPUTE_TASK_FINISHED = 6001;
/** Service invocation. */
private static final short OP_SERVICE_INVOKE = 7000;
/** Get service descriptors. */
private static final short OP_SERVICE_GET_DESCRIPTORS = 7001;
/** Get service descriptor. */
private static final short OP_SERVICE_GET_DESCRIPTOR = 7002;
/** Data streamers. */
/** */
private static final short OP_DATA_STREAMER_START = 8000;
/** */
private static final short OP_DATA_STREAMER_ADD_DATA = 8001;
/** Marshaller. */
private final GridBinaryMarshaller marsh;
/** Client connection context */
private final ClientConnectionContext ctx;
/** Client protocol context */
private final ClientProtocolContext protocolCtx;
/**
* @param ctx Client connection context.
*/
ClientMessageParser(ClientConnectionContext ctx, ClientProtocolContext protocolCtx) {
assert ctx != null;
assert protocolCtx != null;
this.ctx = ctx;
this.protocolCtx = protocolCtx;
CacheObjectBinaryProcessorImpl cacheObjProc = (CacheObjectBinaryProcessorImpl)ctx.kernalContext().cacheObjects();
marsh = cacheObjProc.marshaller();
}
/** {@inheritDoc} */
@Override public ClientListenerRequest decode(ClientMessage msg) {
assert msg != null;
BinaryInputStream inStream = new BinaryHeapInputStream(msg.payload());
// skipHdrCheck must be true (we have 103 op code).
BinaryReaderExImpl reader = new BinaryReaderExImpl(marsh.context(), inStream,
null, null, true, true);
return decode(reader);
}
/**
* Decodes the request.
*
* @param reader Reader.
* @return Request.
*/
public ClientListenerRequest decode(BinaryReaderExImpl reader) {
short opCode = reader.readShort();
switch (opCode) {
case OP_CACHE_GET:
return new ClientCacheGetRequest(reader);
case OP_BINARY_TYPE_NAME_GET:
return new ClientBinaryTypeNameGetRequest(reader);
case OP_BINARY_TYPE_GET:
return new ClientBinaryTypeGetRequest(reader);
case OP_CACHE_PUT:
return new ClientCachePutRequest(reader);
case OP_BINARY_TYPE_NAME_PUT:
return new ClientBinaryTypeNamePutRequest(reader);
case OP_BINARY_TYPE_PUT:
return new ClientBinaryTypePutRequest(reader);
case OP_BINARY_CONFIGURATION_GET:
return new ClientBinaryConfigurationGetRequest(reader);
case OP_QUERY_SCAN:
return new ClientCacheScanQueryRequest(reader);
case OP_QUERY_SCAN_CURSOR_GET_PAGE:
case OP_QUERY_SQL_CURSOR_GET_PAGE:
return new ClientCacheQueryNextPageRequest(reader);
case OP_RESOURCE_CLOSE:
return new ClientResourceCloseRequest(reader);
case OP_HEARTBEAT:
return new ClientRequest(reader);
case OP_GET_IDLE_TIMEOUT:
return new ClientGetIdleTimeoutRequest(reader);
case OP_CACHE_CONTAINS_KEY:
return new ClientCacheContainsKeyRequest(reader);
case OP_CACHE_CONTAINS_KEYS:
return new ClientCacheContainsKeysRequest(reader);
case OP_CACHE_GET_ALL:
return new ClientCacheGetAllRequest(reader);
case OP_CACHE_GET_AND_PUT:
return new ClientCacheGetAndPutRequest(reader);
case OP_CACHE_GET_AND_REPLACE:
return new ClientCacheGetAndReplaceRequest(reader);
case OP_CACHE_GET_AND_REMOVE:
return new ClientCacheGetAndRemoveRequest(reader);
case OP_CACHE_PUT_IF_ABSENT:
return new ClientCachePutIfAbsentRequest(reader);
case OP_CACHE_GET_AND_PUT_IF_ABSENT:
return new ClientCacheGetAndPutIfAbsentRequest(reader);
case OP_CACHE_REPLACE:
return new ClientCacheReplaceRequest(reader);
case OP_CACHE_REPLACE_IF_EQUALS:
return new ClientCacheReplaceIfEqualsRequest(reader);
case OP_CACHE_PUT_ALL:
return new ClientCachePutAllRequest(reader);
case OP_CACHE_CLEAR:
return new ClientCacheClearRequest(reader);
case OP_CACHE_CLEAR_KEY:
return new ClientCacheClearKeyRequest(reader);
case OP_CACHE_CLEAR_KEYS:
return new ClientCacheClearKeysRequest(reader);
case OP_CACHE_REMOVE_KEY:
return new ClientCacheRemoveKeyRequest(reader);
case OP_CACHE_REMOVE_IF_EQUALS:
return new ClientCacheRemoveIfEqualsRequest(reader);
case OP_CACHE_GET_SIZE:
return new ClientCacheGetSizeRequest(reader);
case OP_CACHE_REMOVE_KEYS:
return new ClientCacheRemoveKeysRequest(reader);
case OP_CACHE_LOCAL_PEEK:
return new ClientCacheLocalPeekRequest(reader);
case OP_CACHE_REMOVE_ALL:
return new ClientCacheRemoveAllRequest(reader);
case OP_CACHE_CREATE_WITH_NAME:
return new ClientCacheCreateWithNameRequest(reader);
case OP_CACHE_GET_OR_CREATE_WITH_NAME:
return new ClientCacheGetOrCreateWithNameRequest(reader);
case OP_CACHE_DESTROY:
return new ClientCacheDestroyRequest(reader);
case OP_CACHE_NODE_PARTITIONS:
return new ClientCacheNodePartitionsRequest(reader);
case OP_CACHE_PARTITIONS:
return new ClientCachePartitionsRequest(reader);
case OP_CACHE_GET_NAMES:
return new ClientCacheGetNamesRequest(reader);
case OP_CACHE_GET_CONFIGURATION:
return new ClientCacheGetConfigurationRequest(reader, protocolCtx);
case OP_CACHE_CREATE_WITH_CONFIGURATION:
return new ClientCacheCreateWithConfigurationRequest(reader, protocolCtx);
case OP_CACHE_GET_OR_CREATE_WITH_CONFIGURATION:
return new ClientCacheGetOrCreateWithConfigurationRequest(reader, protocolCtx);
case OP_QUERY_SQL:
return new ClientCacheSqlQueryRequest(reader);
case OP_QUERY_SQL_FIELDS:
return new ClientCacheSqlFieldsQueryRequest(reader, protocolCtx);
case OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE:
return new ClientCacheQueryNextPageRequest(reader);
case OP_QUERY_CONTINUOUS:
return new ClientCacheQueryContinuousRequest(reader);
case OP_TX_START:
return new ClientTxStartRequest(reader);
case OP_TX_END:
return new ClientTxEndRequest(reader);
case OP_CLUSTER_GET_STATE:
return new ClientClusterGetStateRequest(reader);
case OP_CLUSTER_CHANGE_STATE:
return new ClientClusterChangeStateRequest(reader);
case OP_CLUSTER_CHANGE_WAL_STATE:
return new ClientClusterWalChangeStateRequest(reader);
case OP_CLUSTER_GET_WAL_STATE:
return new ClientClusterWalGetStateRequest(reader);
case OP_CLUSTER_GROUP_GET_NODE_IDS:
return new ClientClusterGroupGetNodeIdsRequest(reader);
case OP_CLUSTER_GROUP_GET_NODE_INFO:
return new ClientClusterGroupGetNodesDetailsRequest(reader);
case OP_CLUSTER_GROUP_GET_NODE_ENDPOINTS:
return new ClientClusterGroupGetNodesEndpointsRequest(reader);
case OP_COMPUTE_TASK_EXECUTE:
return new ClientExecuteTaskRequest(reader);
case OP_SERVICE_INVOKE:
return new ClientServiceInvokeRequest(reader, protocolCtx);
case OP_SERVICE_GET_DESCRIPTORS:
return new ClientServiceGetDescriptorsRequest(reader);
case OP_SERVICE_GET_DESCRIPTOR:
return new ClientServiceGetDescriptorRequest(reader);
case OP_DATA_STREAMER_START:
return new ClientDataStreamerStartRequest(reader);
case OP_DATA_STREAMER_ADD_DATA:
return new ClientDataStreamerAddDataRequest(reader);
}
return new ClientRawRequest(reader.readLong(), ClientStatus.INVALID_OP_CODE,
"Invalid request op code: " + opCode);
}
/** {@inheritDoc} */
@Override public ClientMessage encode(ClientListenerResponse resp) {
assert resp != null;
BinaryHeapOutputStream outStream = new BinaryHeapOutputStream(32, BinaryMemoryAllocator.POOLED.chunk());
BinaryRawWriterEx writer = marsh.writer(outStream);
assert resp instanceof ClientOutgoingMessage : "Unexpected response type: " + resp.getClass();
((ClientOutgoingMessage)resp).encode(ctx, writer);
return new ClientMessage(outStream);
}
/** {@inheritDoc} */
@Override public int decodeCommandType(ClientMessage msg) {
assert msg != null;
BinaryInputStream inStream = new BinaryHeapInputStream(msg.payload());
return inStream.readShort();
}
/** {@inheritDoc} */
@Override public long decodeRequestId(ClientMessage msg) {
return 0;
}
}
| apache-2.0 |
kierarad/gocd | server/src/main/java/com/thoughtworks/go/server/controller/Message.java | 1754 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.controller;
import java.util.HashMap;
import org.apache.commons.lang3.StringUtils;
public final class Message {
private final String level;
private final String key;
private final String message;
private static final String DEFAULT_KEY = "message";
private Message(String level, String message) {
this(level, DEFAULT_KEY, message);
}
private Message(String level, String key, String message) {
this.level = level;
this.key = key;
this.message = message;
}
public String getLevel() {
return level;
}
public String getKey() {
return key;
}
public String getMessage() {
return message;
}
public static Message error(String key, String message) {
return new Message("error", key, message);
}
public static Message info(String key, String message) {
return new Message("info", key, message);
}
public void populateModel(HashMap<String, Object> data) {
if (StringUtils.isEmpty(message)) {
return;
}
data.put(key, this);
}
}
| apache-2.0 |
yafengguo/Apache-beam | sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java | 5477 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.range;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkState;
import javax.annotation.Nullable;
import org.apache.beam.sdk.io.BoundedSource.BoundedReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link RangeTracker} for {@link ByteKey ByteKeys} in {@link ByteKeyRange ByteKeyRanges}.
*
* @see ByteKey
* @see ByteKeyRange
*/
public final class ByteKeyRangeTracker implements RangeTracker<ByteKey> {
private static final Logger logger = LoggerFactory.getLogger(ByteKeyRangeTracker.class);
/** Instantiates a new {@link ByteKeyRangeTracker} with the specified range. */
public static ByteKeyRangeTracker of(ByteKeyRange range) {
return new ByteKeyRangeTracker(range);
}
public synchronized boolean isDone() {
return done;
}
@Override
public synchronized ByteKey getStartPosition() {
return range.getStartKey();
}
@Override
public synchronized ByteKey getStopPosition() {
return range.getEndKey();
}
/** Returns the current range. */
public synchronized ByteKeyRange getRange() {
return range;
}
@Override
public synchronized boolean tryReturnRecordAt(boolean isAtSplitPoint, ByteKey recordStart) {
if (done) {
return false;
}
checkState(!(position == null && !isAtSplitPoint), "The first record must be at a split point");
checkState(!(recordStart.compareTo(range.getStartKey()) < 0),
"Trying to return record which is before the start key");
checkState(!(position != null && recordStart.compareTo(position) < 0),
"Trying to return record which is before the last-returned record");
if (position == null) {
range = range.withStartKey(recordStart);
}
position = recordStart;
if (isAtSplitPoint) {
if (!range.containsKey(recordStart)) {
done = true;
return false;
}
++splitPointsSeen;
}
return true;
}
@Override
public synchronized boolean trySplitAtPosition(ByteKey splitPosition) {
// Unstarted.
if (position == null) {
logger.warn(
"{}: Rejecting split request at {} because no records have been returned.",
this,
splitPosition);
return false;
}
// Started, but not after current position.
if (splitPosition.compareTo(position) <= 0) {
logger.warn(
"{}: Rejecting split request at {} because it is not after current position {}.",
this,
splitPosition,
position);
return false;
}
// Sanity check.
if (!range.containsKey(splitPosition)) {
logger.warn(
"{}: Rejecting split request at {} because it is not within the range.",
this,
splitPosition);
return false;
}
range = range.withEndKey(splitPosition);
return true;
}
@Override
public synchronized double getFractionConsumed() {
if (position == null) {
return 0;
}
return range.estimateFractionForKey(position);
}
public synchronized long getSplitPointsConsumed() {
if (position == null) {
return 0;
} else if (isDone()) {
return splitPointsSeen;
} else {
// There is a current split point, and it has not finished processing.
checkState(
splitPointsSeen > 0,
"A started rangeTracker should have seen > 0 split points (is %s)",
splitPointsSeen);
return splitPointsSeen - 1;
}
}
///////////////////////////////////////////////////////////////////////////////
private ByteKeyRange range;
@Nullable private ByteKey position;
private long splitPointsSeen;
private boolean done;
private ByteKeyRangeTracker(ByteKeyRange range) {
this.range = range;
position = null;
splitPointsSeen = 0L;
done = false;
}
/**
* Marks this range tracker as being done. Specifically, this will mark the current split point,
* if one exists, as being finished.
*
* <p>Always returns false, so that it can be used in an implementation of
* {@link BoundedReader#start()} or {@link BoundedReader#advance()} as follows:
*
* <pre> {@code
* public boolean start() {
* return startImpl() && rangeTracker.tryReturnRecordAt(isAtSplitPoint, position)
* || rangeTracker.markDone();
* }} </pre>
*/
public synchronized boolean markDone() {
done = true;
return false;
}
@Override
public synchronized String toString() {
return toStringHelper(ByteKeyRangeTracker.class)
.add("range", range)
.add("position", position)
.toString();
}
}
| apache-2.0 |
recruit-tech/redpen | redpen-core/src/test/java/cc/redpen/validator/document/FrequentSentenceStartValidatorTest.java | 2634 | /**
* redpen: a text inspection tool
* Copyright (c) 2014-2015 Recruit Technologies Co., Ltd. and contributors
* (see CONTRIBUTORS.md)
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.redpen.validator.document;
import cc.redpen.RedPenException;
import cc.redpen.model.Document;
import cc.redpen.model.Sentence;
import cc.redpen.tokenizer.WhiteSpaceTokenizer;
import cc.redpen.validator.ValidationError;
import cc.redpen.validator.ValidatorFactory;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
class FrequentSentenceStartValidatorTest {
@Test
void testDocument() throws RedPenException {
FrequentSentenceStartValidator validator = (FrequentSentenceStartValidator) ValidatorFactory.getInstance("FrequentSentenceStart");
Document document =
Document.builder(new WhiteSpaceTokenizer())
.addSection(1)
.addParagraph()
.addSentence(new Sentence("When it comes to the Subject Of Cake (the sweet and delicious baked delicacy), one should" +
" always remember (or at least consider) this foodstuff's effect on one's ever-expanding waistline.", 1))
.addSentence(new Sentence("When it comes to fish, tuna is pretty nice.", 2))
.addSentence(new Sentence("When it comes to celery, the thing to consider is the crunch.", 3))
.addSentence(new Sentence("When it comes to how to start a sentence, variety is the key.", 4))
.addSentence(new Sentence("The acronym CPU stands for Central Processing Unit (CPU).", 5))
.addSentence(new Sentence("The acronym AAAS is the American Association for the Advancement of Science.", 6))
.build();
List<ValidationError> errors = new ArrayList<>();
validator.setErrorList(errors);
validator.validate(document);
assertEquals(5, errors.size());
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java | 3291 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.control.cc.work;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobStatus;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
import org.apache.hyracks.control.cc.cluster.INodeManager;
import org.apache.hyracks.control.cc.job.IJobManager;
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class JobletCleanupNotificationWork extends AbstractHeartbeatWork {
private static final Logger LOGGER = LogManager.getLogger();
private JobId jobId;
public JobletCleanupNotificationWork(ClusterControllerService ccs, JobId jobId, String nodeId) {
super(ccs, nodeId, null);
this.jobId = jobId;
}
@Override
public void runWork() {
IJobManager jobManager = ccs.getJobManager();
final JobRun run = jobManager.get(jobId);
if (run == null) {
LOGGER.log(Level.WARN, () -> "ignoring unknown job " + jobId + " on notification from " + nodeId);
return;
}
Set<String> cleanupPendingNodes = run.getCleanupPendingNodeIds();
if (!cleanupPendingNodes.remove(nodeId)) {
LOGGER.log(Level.WARN,
() -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes + " for job " + jobId);
return;
}
INodeManager nodeManager = ccs.getNodeManager();
NodeControllerState ncs = nodeManager.getNodeControllerState(nodeId);
if (ncs != null) {
ncs.getActiveJobIds().remove(jobId);
}
if (cleanupPendingNodes.isEmpty()) {
try {
jobManager.finalComplete(run);
} catch (HyracksException e) {
// Fail the job with the caught exception during final completion.
List<Exception> completionException = new ArrayList<>();
if (run.getExceptions() != null && !run.getExceptions().isEmpty()) {
completionException.addAll(run.getExceptions());
}
completionException.add(0, e);
run.setStatus(JobStatus.FAILURE, completionException);
}
}
}
}
| apache-2.0 |
jk1/intellij-community | platform/vcs-log/impl/src/com/intellij/vcs/log/history/VcsLogFileRevision.java | 3850 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.history;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.RepositoryLocation;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.ByteBackedContentRevision;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.history.VcsFileRevisionEx;
import com.intellij.openapi.vcs.history.VcsRevisionNumber;
import com.intellij.vcs.log.VcsCommitMetadata;
import com.intellij.vcs.log.VcsUser;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
public class VcsLogFileRevision extends VcsFileRevisionEx {
@NotNull private final ContentRevision myRevision;
@NotNull private final FilePath myPath;
@NotNull private final VcsUser myAuthor;
@NotNull private final VcsUser myCommitter;
private final long myAuthorTime;
private final long myCommitTime;
@NotNull private final String myFullMessage;
@Nullable private byte[] myContent = null;
public VcsLogFileRevision(@NotNull VcsCommitMetadata commitMetadata, @NotNull ContentRevision revision, @NotNull FilePath path) {
myRevision = revision;
myPath = path;
myAuthor = commitMetadata.getAuthor();
myCommitter = commitMetadata.getCommitter();
myAuthorTime = commitMetadata.getAuthorTime();
myCommitTime = commitMetadata.getCommitTime();
myFullMessage = commitMetadata.getFullMessage();
}
@Nullable
@Override
public String getAuthor() {
return myAuthor.getName();
}
@Nullable
@Override
public String getAuthorEmail() {
return myAuthor.getEmail();
}
@Nullable
@Override
public String getCommitterName() {
return myCommitter.getName();
}
@Nullable
@Override
public String getCommitterEmail() {
return myCommitter.getName();
}
@Nullable
@Override
public String getCommitMessage() {
return myFullMessage;
}
@NotNull
@Override
public FilePath getPath() {
return myPath;
}
@Nullable
@Override
public String getBranchName() {
return null;
}
@Nullable
@Override
public RepositoryLocation getChangedRepositoryPath() {
return null;
}
@Override
public byte[] loadContent() throws IOException, VcsException {
if (myContent == null) {
if (myRevision instanceof ByteBackedContentRevision) {
myContent = ((ByteBackedContentRevision)myRevision).getContentAsBytes();
}
else {
String content = myRevision.getContent();
if (content != null) {
myContent = content.getBytes(myPath.getCharset().name());
}
}
}
return myContent;
}
@Nullable
@Override
public byte[] getContent() {
return myContent;
}
@NotNull
@Override
public VcsRevisionNumber getRevisionNumber() {
return myRevision.getRevisionNumber();
}
@Override
public Date getRevisionDate() {
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(myCommitTime);
return cal.getTime();
}
@Nullable
@Override
public Date getAuthorDate() {
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(myAuthorTime);
return cal.getTime();
}
}
| apache-2.0 |
dpocock/camel | camel-core/src/main/java/org/apache/camel/processor/PollEnricher.java | 12945 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.CamelExchangeException;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Expression;
import org.apache.camel.PollingConsumer;
import org.apache.camel.impl.ConsumerCache;
import org.apache.camel.impl.EmptyConsumerCache;
import org.apache.camel.processor.aggregate.AggregationStrategy;
import org.apache.camel.spi.IdAware;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.AsyncProcessorHelper;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.ServiceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.util.ExchangeHelper.copyResultsPreservePattern;
/**
* A content enricher that enriches input data by first obtaining additional
* data from a <i>resource</i> represented by an endpoint <code>producer</code>
* and second by aggregating input data and additional data. Aggregation of
* input data and additional data is delegated to an {@link org.apache.camel.processor.aggregate.AggregationStrategy}
* object.
* <p/>
* Uses a {@link org.apache.camel.PollingConsumer} to obtain the additional data as opposed to {@link Enricher}
* that uses a {@link org.apache.camel.Producer}.
*
* @see Enricher
*/
public class PollEnricher extends ServiceSupport implements AsyncProcessor, IdAware, CamelContextAware {
private static final Logger LOG = LoggerFactory.getLogger(PollEnricher.class);
private CamelContext camelContext;
private ConsumerCache consumerCache;
private String id;
private AggregationStrategy aggregationStrategy;
private final Expression expression;
private long timeout;
private boolean aggregateOnException;
private int cacheSize;
private boolean ignoreInvalidEndpoint;
/**
* Creates a new {@link PollEnricher}.
*
* @param expression expression to use to compute the endpoint to poll from.
* @param timeout timeout in millis
*/
public PollEnricher(Expression expression, long timeout) {
this.expression = expression;
this.timeout = timeout;
}
public CamelContext getCamelContext() {
return camelContext;
}
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Expression getExpression() {
return expression;
}
public AggregationStrategy getAggregationStrategy() {
return aggregationStrategy;
}
/**
* Sets the aggregation strategy for this poll enricher.
*
* @param aggregationStrategy the aggregationStrategy to set
*/
public void setAggregationStrategy(AggregationStrategy aggregationStrategy) {
this.aggregationStrategy = aggregationStrategy;
}
public long getTimeout() {
return timeout;
}
/**
* Sets the timeout to use when polling.
* <p/>
* Use 0 to use receiveNoWait,
* Use -1 to use receive with no timeout (which will block until data is available).
*
* @param timeout timeout in millis.
*/
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public boolean isAggregateOnException() {
return aggregateOnException;
}
public void setAggregateOnException(boolean aggregateOnException) {
this.aggregateOnException = aggregateOnException;
}
/**
* Sets the default aggregation strategy for this poll enricher.
*/
public void setDefaultAggregationStrategy() {
this.aggregationStrategy = defaultAggregationStrategy();
}
public int getCacheSize() {
return cacheSize;
}
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
public boolean isIgnoreInvalidEndpoint() {
return ignoreInvalidEndpoint;
}
public void setIgnoreInvalidEndpoint(boolean ignoreInvalidEndpoint) {
this.ignoreInvalidEndpoint = ignoreInvalidEndpoint;
}
public void process(Exchange exchange) throws Exception {
AsyncProcessorHelper.process(this, exchange);
}
/**
* Enriches the input data (<code>exchange</code>) by first obtaining
* additional data from an endpoint represented by an endpoint
* <code>producer</code> and second by aggregating input data and additional
* data. Aggregation of input data and additional data is delegated to an
* {@link org.apache.camel.processor.aggregate.AggregationStrategy} object set at construction time. If the
* message exchange with the resource endpoint fails then no aggregation
* will be done and the failed exchange content is copied over to the
* original message exchange.
*
* @param exchange input data.
*/
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
preCheckPoll(exchange);
} catch (Exception e) {
exchange.setException(new CamelExchangeException("Error during pre poll check", exchange, e));
callback.done(true);
return true;
}
// which consumer to use
PollingConsumer consumer;
Endpoint endpoint;
// use dynamic endpoint so calculate the endpoint to use
Object recipient = null;
try {
recipient = expression.evaluate(exchange, Object.class);
endpoint = resolveEndpoint(exchange, recipient);
// acquire the consumer from the cache
consumer = consumerCache.acquirePollingConsumer(endpoint);
} catch (Throwable e) {
if (isIgnoreInvalidEndpoint()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Endpoint uri is invalid: " + recipient + ". This exception will be ignored.", e);
}
} else {
exchange.setException(e);
}
callback.done(true);
return true;
}
Exchange resourceExchange;
try {
if (timeout < 0) {
LOG.debug("Consumer receive: {}", consumer);
resourceExchange = consumer.receive();
} else if (timeout == 0) {
LOG.debug("Consumer receiveNoWait: {}", consumer);
resourceExchange = consumer.receiveNoWait();
} else {
LOG.debug("Consumer receive with timeout: {} ms. {}", timeout, consumer);
resourceExchange = consumer.receive(timeout);
}
if (resourceExchange == null) {
LOG.debug("Consumer received no exchange");
} else {
LOG.debug("Consumer received: {}", resourceExchange);
}
} catch (Exception e) {
exchange.setException(new CamelExchangeException("Error during poll", exchange, e));
callback.done(true);
return true;
} finally {
// return the consumer back to the cache
consumerCache.releasePollingConsumer(endpoint, consumer);
}
try {
if (!isAggregateOnException() && (resourceExchange != null && resourceExchange.isFailed())) {
// copy resource exchange onto original exchange (preserving pattern)
copyResultsPreservePattern(exchange, resourceExchange);
} else {
prepareResult(exchange);
// prepare the exchanges for aggregation
ExchangeHelper.prepareAggregation(exchange, resourceExchange);
// must catch any exception from aggregation
Exchange aggregatedExchange = aggregationStrategy.aggregate(exchange, resourceExchange);
if (aggregatedExchange != null) {
// copy aggregation result onto original exchange (preserving pattern)
copyResultsPreservePattern(exchange, aggregatedExchange);
// handover any synchronization
if (resourceExchange != null) {
resourceExchange.handoverCompletions(exchange);
}
}
}
// set header with the uri of the endpoint enriched so we can use that for tracing etc
if (exchange.hasOut()) {
exchange.getOut().setHeader(Exchange.TO_ENDPOINT, consumer.getEndpoint().getEndpointUri());
} else {
exchange.getIn().setHeader(Exchange.TO_ENDPOINT, consumer.getEndpoint().getEndpointUri());
}
} catch (Throwable e) {
exchange.setException(new CamelExchangeException("Error occurred during aggregation", exchange, e));
callback.done(true);
return true;
}
callback.done(true);
return true;
}
protected Endpoint resolveEndpoint(Exchange exchange, Object recipient) {
// trim strings as end users might have added spaces between separators
if (recipient instanceof String) {
recipient = ((String)recipient).trim();
}
return ExchangeHelper.resolveEndpoint(exchange, recipient);
}
/**
* Strategy to pre check polling.
* <p/>
* Is currently used to prevent doing poll enrich from a file based endpoint when the current route also
* started from a file based endpoint as that is not currently supported.
*
* @param exchange the current exchange
*/
protected void preCheckPoll(Exchange exchange) throws Exception {
// noop
}
private static void prepareResult(Exchange exchange) {
if (exchange.getPattern().isOutCapable()) {
exchange.getOut().copyFrom(exchange.getIn());
}
}
private static AggregationStrategy defaultAggregationStrategy() {
return new CopyAggregationStrategy();
}
@Override
public String toString() {
return "PollEnrich[" + expression + "]";
}
protected void doStart() throws Exception {
if (consumerCache == null) {
// create consumer cache if we use dynamic expressions for computing the endpoints to poll
if (cacheSize < 0) {
consumerCache = new EmptyConsumerCache(this, camelContext);
LOG.debug("PollEnrich {} is not using ConsumerCache", this);
} else if (cacheSize == 0) {
consumerCache = new ConsumerCache(this, camelContext);
LOG.debug("PollEnrich {} using ConsumerCache with default cache size", this);
} else {
consumerCache = new ConsumerCache(this, camelContext, cacheSize);
LOG.debug("PollEnrich {} using ConsumerCache with cacheSize={}", this, cacheSize);
}
}
ServiceHelper.startServices(consumerCache, aggregationStrategy);
}
protected void doStop() throws Exception {
ServiceHelper.stopServices(aggregationStrategy, consumerCache);
}
protected void doShutdown() throws Exception {
ServiceHelper.stopAndShutdownServices(aggregationStrategy, consumerCache);
}
private static class CopyAggregationStrategy implements AggregationStrategy {
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
if (newExchange != null) {
copyResultsPreservePattern(oldExchange, newExchange);
} else {
// if no newExchange then there was no message from the external resource
// and therefore we should set an empty body to indicate this fact
// but keep headers/attachments as we want to propagate those
oldExchange.getIn().setBody(null);
oldExchange.setOut(null);
}
return oldExchange;
}
}
}
| apache-2.0 |
titusfortner/selenium | java/src/org/openqa/selenium/docker/v1_41/StartContainer.java | 1809 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.docker.v1_41;
import org.openqa.selenium.docker.ContainerId;
import org.openqa.selenium.internal.Require;
import org.openqa.selenium.remote.http.HttpHandler;
import org.openqa.selenium.remote.http.HttpRequest;
import static org.openqa.selenium.docker.v1_41.DockerMessages.throwIfNecessary;
import static org.openqa.selenium.docker.v1_41.V141Docker.DOCKER_API_VERSION;
import static org.openqa.selenium.remote.http.HttpMethod.POST;
class StartContainer {
private final HttpHandler client;
public StartContainer(HttpHandler client) {
this.client = Require.nonNull("HTTP client", client);
}
public void apply(ContainerId id) {
Require.nonNull("Container id", id);
throwIfNecessary(
client.execute(
new HttpRequest(POST, String.format("/v%s/containers/%s/start", DOCKER_API_VERSION, id))
.addHeader("Content-Length", "0")
.addHeader("Content-Type", "text/plain")),
"Unable to start container: %s",
id);
}
}
| apache-2.0 |
goldmansachs/reladomo | reladomogen/src/main/java/com/gs/fw/common/mithra/generator/MithraObjectTypeParser.java | 1902 | /*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.generator;
import com.gs.fw.common.mithra.generator.filesystem.FauxFileSystem;
import com.gs.fw.common.mithra.generator.metamodel.MithraInterfaceType;
import java.io.File;
import java.util.Map;
/*
A parser for Mithra object type definitions. The parser produces type definitions
which are consumed by the generator to generate classes.
The contract with the caller (generator) is as follows :
1. Generator invokes a sequence of setter methods
2. After all setter methods have been invoked, generator invokes parse
3. After parse returns successfully, generator invokes the various getter methods
*/
public interface MithraObjectTypeParser
{
// invoked before parse
void setLogger(Logger logger);
void setForceOffHeap(boolean forceOffHeap);
void setDefaultFinalGetters(boolean defaultFinalGetters);
void setFauxFileSystem(FauxFileSystem fauxFileSystem);
// actually parse
// returns the name of the class list, usually as a file path.
String parse();
// invoked after a successful parse
Map<String,MithraObjectTypeWrapper> getMithraObjects();
Map<String,MithraEmbeddedValueObjectTypeWrapper> getMithraEmbeddedValueObjects();
Map<String,MithraInterfaceType> getMithraInterfaces();
String getChecksum();
}
| apache-2.0 |
ecarm002/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java | 6576 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.common;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.EditDistanceSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
import org.apache.hyracks.storage.common.IIndex;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.Test;
public abstract class AbstractInvertedIndexSearchTest extends AbstractInvertedIndexTest {
protected final Logger LOGGER = LogManager.getLogger();
protected int NUM_DOC_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DOC_QUERIES;
protected int NUM_RANDOM_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_RANDOM_QUERIES;
protected final boolean bulkLoad;
public AbstractInvertedIndexSearchTest(InvertedIndexType invIndexType, boolean bulkLoad) {
super(invIndexType);
this.bulkLoad = bulkLoad;
}
protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen,
List<IInvertedIndexSearchModifier> searchModifiers) throws IOException {
IIndex invIndex = testCtx.getIndex();
if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)
|| !bulkLoad) {
invIndex.create();
invIndex.activate();
}
if (bulkLoad) {
if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)) {
LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, false);
} else {
LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, true);
}
} else {
LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
}
invIndex.validate();
for (IInvertedIndexSearchModifier searchModifier : searchModifiers) {
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Running searches with: " + searchModifier.toString());
}
LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(), NUM_DOC_QUERIES,
NUM_RANDOM_QUERIES, searchModifier, SCAN_COUNT_ARRAY);
}
invIndex.deactivate();
invIndex.destroy();
}
private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
searchModifiers.add(new ConjunctiveSearchModifier());
searchModifiers.add(new JaccardSearchModifier(1.0f));
searchModifiers.add(new JaccardSearchModifier(0.8f));
searchModifiers.add(new JaccardSearchModifier(0.5f));
runTest(testCtx, tupleGen, searchModifiers);
}
private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
searchModifiers.add(new ConjunctiveSearchModifier());
searchModifiers.add(new JaccardSearchModifier(1.0f));
searchModifiers.add(new JaccardSearchModifier(0.8f));
searchModifiers.add(new JaccardSearchModifier(0.5f));
searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 0));
searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 1));
searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 2));
searchModifiers.add(new EditDistanceSearchModifier(LSMInvertedIndexTestUtils.TEST_GRAM_LENGTH, 3));
runTest(testCtx, tupleGen, searchModifiers);
}
@Test
public void wordTokensInvIndexTest() throws IOException {
LSMInvertedIndexTestContext testCtx =
LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
testWordInvIndexIndex(testCtx);
}
@Test
public void hashedWordTokensInvIndexTest() throws IOException {
LSMInvertedIndexTestContext testCtx =
LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
testWordInvIndexIndex(testCtx);
}
@Test
public void ngramTokensInvIndexTest() throws IOException {
LSMInvertedIndexTestContext testCtx =
LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
testNGramInvIndexIndex(testCtx);
}
@Test
public void hashedNGramTokensInvIndexTest() throws IOException {
LSMInvertedIndexTestContext testCtx =
LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
testNGramInvIndexIndex(testCtx);
}
}
| apache-2.0 |
agentmilindu/stratos | dependencies/jclouds/provider/aws-ec2/1.8.1-stratos/src/main/java/org/jclouds/aws/ec2/AWSEC2ProviderMetadata.java | 4168 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.aws.ec2;
import static org.jclouds.aws.ec2.reference.AWSEC2Constants.PROPERTY_EC2_AMI_QUERY;
import static org.jclouds.aws.ec2.reference.AWSEC2Constants.PROPERTY_EC2_CC_AMI_QUERY;
import static org.jclouds.aws.ec2.reference.AWSEC2Constants.PROPERTY_EC2_CC_REGIONS;
import static org.jclouds.compute.config.ComputeServiceProperties.TEMPLATE;
import static org.jclouds.compute.config.ComputeServiceProperties.TIMEOUT_NODE_SUSPENDED;
import java.net.URI;
import java.util.Properties;
import org.jclouds.aws.domain.Region;
import org.jclouds.providers.ProviderMetadata;
import org.jclouds.providers.internal.BaseProviderMetadata;
/**
* Implementation of {@ link org.jclouds.types.ProviderMetadata} for Amazon's
* Elastic Compute Cloud (EC2) provider.
*/
public class AWSEC2ProviderMetadata extends BaseProviderMetadata {
public static Builder builder() {
return new Builder();
}
@Override
public Builder toBuilder() {
return builder().fromProviderMetadata(this);
}
public AWSEC2ProviderMetadata() {
super(builder());
}
public AWSEC2ProviderMetadata(Builder builder) {
super(builder);
}
public static Properties defaultProperties() {
Properties properties = new Properties();
// sometimes, like in ec2, stop takes a very long time, perhaps
// due to volume management. one example spent 2 minutes moving
// from stopping->stopped state on an ec2 micro
properties.setProperty(TIMEOUT_NODE_SUSPENDED, 120 * 1000 + "");
properties.putAll(Region.regionProperties());
// Amazon Linux, Amazon Windows, alestic, canonical, and rightscale
properties.setProperty(PROPERTY_EC2_AMI_QUERY,
"owner-id=137112412989,801119661308,063491364108,099720109477,411009282317;state=available;image-type=machine");
// amis that work with the cluster instances
properties.setProperty(PROPERTY_EC2_CC_REGIONS, Region.US_EAST_1 + "," + Region.US_WEST_2 + "," + Region.EU_WEST_1);
properties
.setProperty(
PROPERTY_EC2_CC_AMI_QUERY,
"virtualization-type=hvm;architecture=x86_64;owner-id=137112412989,099720109477;hypervisor=xen;state=available;image-type=machine;root-device-type=ebs");
properties.setProperty(TEMPLATE, "osFamily=AMZN_LINUX,os64Bit=true");
return properties;
}
public static class Builder extends BaseProviderMetadata.Builder {
protected Builder() {
id("aws-ec2")
.name("Amazon Elastic Compute Cloud (EC2)")
.apiMetadata(new AWSEC2ApiMetadata())
.endpoint("https://ec2.us-east-1.amazonaws.com")
.homepage(URI.create("http://aws.amazon.com/ec2"))
.console(URI.create("https://console.aws.amazon.com/ec2/home"))
.defaultProperties(AWSEC2ProviderMetadata.defaultProperties())
.linkedServices("aws-ec2", "aws-elb", "aws-cloudwatch", "aws-s3", "aws-simpledb")
.iso3166Codes("US-VA", "US-CA", "US-OR", "BR-SP", "IE", "SG", "AU-NSW", "JP-13");
}
@Override
public AWSEC2ProviderMetadata build() {
return new AWSEC2ProviderMetadata(this);
}
@Override
public Builder fromProviderMetadata(
ProviderMetadata in) {
super.fromProviderMetadata(in);
return this;
}
}
}
| apache-2.0 |
salyh/javamailspec | geronimo-j2ee-management_1.1_spec/src/main/java/javax/management/j2ee/statistics/JDBCStats.java | 1233 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//
// This source code implements specifications defined by the Java
// Community Process. In order to remain compliant with the specification
// DO NOT add / change / or delete method signatures!
//
package javax.management.j2ee.statistics;
/**
* @version $Rev$
*/
public interface JDBCStats extends Stats {
public JDBCConnectionStats[] getConnections();
public JDBCConnectionPoolStats[] getConnectionPools();
} | apache-2.0 |
CliffYuan/netty | src/main/java/org/jboss/netty/logging/OsgiLogger.java | 3715 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.logging;
import org.osgi.service.log.LogService;
/**
* <a href="http://www.osgi.org/">OSGi</a> {@link LogService} logger.
*/
class OsgiLogger extends AbstractInternalLogger {
private final OsgiLoggerFactory parent;
private final InternalLogger fallback;
private final String name;
private final String prefix;
OsgiLogger(OsgiLoggerFactory parent, String name, InternalLogger fallback) {
this.parent = parent;
this.name = name;
this.fallback = fallback;
prefix = '[' + name + "] ";
}
public void debug(String msg) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_DEBUG, prefix + msg);
} else {
fallback.debug(msg);
}
}
public void debug(String msg, Throwable cause) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_DEBUG, prefix + msg, cause);
} else {
fallback.debug(msg, cause);
}
}
public void error(String msg) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_ERROR, prefix + msg);
} else {
fallback.error(msg);
}
}
public void error(String msg, Throwable cause) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_ERROR, prefix + msg, cause);
} else {
fallback.error(msg, cause);
}
}
public void info(String msg) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_INFO, prefix + msg);
} else {
fallback.info(msg);
}
}
public void info(String msg, Throwable cause) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_INFO, prefix + msg, cause);
} else {
fallback.info(msg, cause);
}
}
public boolean isDebugEnabled() {
return true;
}
public boolean isErrorEnabled() {
return true;
}
public boolean isInfoEnabled() {
return true;
}
public boolean isWarnEnabled() {
return true;
}
public void warn(String msg) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_WARNING, prefix + msg);
} else {
fallback.warn(msg);
}
}
public void warn(String msg, Throwable cause) {
LogService logService = parent.getLogService();
if (logService != null) {
logService.log(LogService.LOG_WARNING, prefix + msg, cause);
} else {
fallback.warn(msg, cause);
}
}
@Override
public String toString() {
return name;
}
}
| apache-2.0 |
rcastro78/twitter-kit-android | twitter-core/src/main/java/com/twitter/sdk/android/core/internal/oauth/OAuth2Token.java | 3872 | /*
* Copyright (C) 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.sdk.android.core.internal.oauth;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
import com.twitter.sdk.android.core.AuthToken;
import com.twitter.sdk.android.core.TwitterAuthConfig;
import java.util.HashMap;
import java.util.Map;
/**
* OAuth2.0 token.
*/
public class OAuth2Token extends AuthToken implements Parcelable {
public static final String TOKEN_TYPE_BEARER = "bearer";
public static final Parcelable.Creator<OAuth2Token> CREATOR
= new Parcelable.Creator<OAuth2Token>() {
public OAuth2Token createFromParcel(Parcel in) {
return new OAuth2Token(in);
}
public OAuth2Token[] newArray(int size) {
return new OAuth2Token[size];
}
};
@SerializedName("token_type")
private final String tokenType;
@SerializedName("access_token")
private final String accessToken;
public OAuth2Token(String tokenType, String accessToken) {
super();
this.tokenType = tokenType;
this.accessToken = accessToken;
}
public OAuth2Token(String tokenType, String accessToken, long createdAt) {
super(createdAt);
this.tokenType = tokenType;
this.accessToken = accessToken;
}
private OAuth2Token(Parcel in) {
super();
tokenType = in.readString();
accessToken = in.readString();
}
public String getTokenType() {
return tokenType;
}
public String getAccessToken() {
return accessToken;
}
@Override
public boolean isExpired() {
// Oauth 2.0 tokens do not have a common expiration policy. Returning false indicates
// the token is not known to have expired. App auth tokens only expire when manually
// invalidated, while guest auth tokens are known to have expired after 3 hours.
return false;
}
@Override
public Map<String, String> getAuthHeaders(TwitterAuthConfig authConfig, String method,
String url, Map<String, String> postParams) {
final Map<String, String> headers = new HashMap<>();
final String authorizationHeader = OAuth2Service.getAuthorizationHeader(this);
headers.put(HEADER_AUTHORIZATION, authorizationHeader);
return headers;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeString(tokenType);
out.writeString(accessToken);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final OAuth2Token that = (OAuth2Token) o;
if (accessToken != null ? !accessToken.equals(that.accessToken) : that.accessToken != null)
return false;
if (tokenType != null ? !tokenType.equals(that.tokenType) : that.tokenType != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = tokenType != null ? tokenType.hashCode() : 0;
result = 31 * result + (accessToken != null ? accessToken.hashCode() : 0);
return result;
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/cli/cli-client/src/main/java/org/infinispan/cli/connection/Connection.java | 1581 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.infinispan.cli.connection;
import java.io.Closeable;
import java.util.Collection;
import org.infinispan.cli.CommandBuffer;
import org.infinispan.cli.Context;
public interface Connection extends Closeable {
void connect(Context context, String credentials) throws Exception;
boolean needsCredentials();
void execute(Context context, CommandBuffer commandBuffer);
String getActiveCache();
String getActiveContainer();
Collection<String> getAvailableCaches();
Collection<String> getAvailableContainers();
boolean isConnected();
void setActiveContainer(String name);
}
| apache-2.0 |
markflyhigh/incubator-beam | sdks/java/extensions/sorter/src/main/java/org/apache/beam/sdk/extensions/sorter/ExternalSorter.java | 3222 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sorter;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import java.io.Serializable;
/** Does an external sort of the provided values. */
public abstract class ExternalSorter implements Sorter {
protected final Options options;
/** {@link Options} contains configuration of the sorter. */
public static class Options implements Serializable {
private String tempLocation = "/tmp";
private int memoryMB = 100;
private SorterType sorterType = SorterType.HADOOP;
/** Sorter type. */
public enum SorterType {
HADOOP,
NATIVE
}
/** Sets the path to a temporary location where the sorter writes intermediate files. */
public Options setTempLocation(String tempLocation) {
if (tempLocation.startsWith("gs://")) {
throw new IllegalArgumentException("Sorter doesn't support GCS temporary location.");
}
this.tempLocation = tempLocation;
return this;
}
/** Returns the configured temporary location. */
public String getTempLocation() {
return tempLocation;
}
/**
* Sets the size of the memory buffer in megabytes. Must be greater than zero and less than
* 2048.
*/
public Options setMemoryMB(int memoryMB) {
checkArgument(memoryMB > 0, "memoryMB must be greater than zero");
// Hadoop's external sort stores the number of available memory bytes in an int, this prevents
// integer overflow
checkArgument(memoryMB < 2048, "memoryMB must be less than 2048");
this.memoryMB = memoryMB;
return this;
}
/** Returns the configured size of the memory buffer. */
public int getMemoryMB() {
return memoryMB;
}
/** Sets the sorter type. */
public Options setSorterType(SorterType sorterType) {
this.sorterType = sorterType;
return this;
}
/** Returns the sorter type. */
public SorterType getSorterType() {
return sorterType;
}
}
/** Returns a {@link Sorter} configured with the given {@link Options}. */
public static ExternalSorter create(Options options) {
return options.getSorterType() == Options.SorterType.HADOOP
? HadoopExternalSorter.create(options)
: NativeExternalSorter.create(options);
}
ExternalSorter(Options options) {
this.options = options;
}
}
| apache-2.0 |
iyounus/incubator-systemml | src/main/java/org/apache/sysml/utils/GenerateClassesForMLContext.java | 40009 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.utils;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.mlcontext.MLContext;
import org.apache.sysml.api.mlcontext.MLResults;
import org.apache.sysml.api.mlcontext.Script;
import org.apache.sysml.api.mlcontext.ScriptExecutor;
import org.apache.sysml.parser.DMLProgram;
import org.apache.sysml.parser.DataIdentifier;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.parser.FunctionStatement;
import org.apache.sysml.parser.FunctionStatementBlock;
import org.apache.sysml.parser.LanguageException;
import org.apache.sysml.parser.Statement;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.CtField;
import javassist.CtMethod;
import javassist.CtNewConstructor;
import javassist.CtNewMethod;
import javassist.Modifier;
import javassist.NotFoundException;
/**
* Automatically generate classes and methods for interaction with DML scripts
* and functions through the MLContext API.
*
*/
public class GenerateClassesForMLContext {
public static final String SOURCE = "scripts";
public static final String DESTINATION = "target/classes";
public static final String BASE_DEST_PACKAGE = "org.apache.sysml";
public static final String CONVENIENCE_BASE_DEST_PACKAGE = "org.apache.sysml.api.mlcontext.convenience";
public static final String PATH_TO_MLCONTEXT_CLASS = "org/apache/sysml/api/mlcontext/MLContext.class";
public static final String PATH_TO_MLRESULTS_CLASS = "org/apache/sysml/api/mlcontext/MLResults.class";
public static final String PATH_TO_SCRIPT_CLASS = "org/apache/sysml/api/mlcontext/Script.class";
public static final String PATH_TO_SCRIPTTYPE_CLASS = "org/apache/sysml/api/mlcontext/ScriptType.class";
public static final String PATH_TO_MATRIX_CLASS = "org/apache/sysml/api/mlcontext/Matrix.class";
public static final String PATH_TO_FRAME_CLASS = "org/apache/sysml/api/mlcontext/Frame.class";
public static String source = SOURCE;
public static String destination = DESTINATION;
public static boolean skipStagingDir = true;
public static boolean skipPerfTestDir = true;
public static boolean skipObsoleteDir = true;
public static boolean skipCompareBackendsDir = true;
public static void main(String[] args) throws Throwable {
if (args.length == 2) {
source = args[0];
destination = args[1];
} else if (args.length == 1) {
source = args[0];
}
try {
DMLScript.VALIDATOR_IGNORE_ISSUES = true;
System.out.println("************************************");
System.out.println("**** MLContext Class Generation ****");
System.out.println("************************************");
System.out.println("Source: " + source);
System.out.println("Destination: " + destination);
makeCtClasses();
recurseDirectoriesForClassGeneration(source);
String fullDirClassName = recurseDirectoriesForConvenienceClassGeneration(source);
addConvenienceMethodsToMLContext(source, fullDirClassName);
} finally {
DMLScript.VALIDATOR_IGNORE_ISSUES = false;
}
}
/**
* Create compile-time classes required for later class generation.
*/
public static void makeCtClasses() {
try {
ClassPool pool = ClassPool.getDefault();
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_MLCONTEXT_CLASS)));
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_MLRESULTS_CLASS)));
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_SCRIPT_CLASS)));
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_SCRIPTTYPE_CLASS)));
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_MATRIX_CLASS)));
pool.makeClass(new FileInputStream(new File(destination + File.separator + PATH_TO_FRAME_CLASS)));
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (RuntimeException e) {
e.printStackTrace();
}
}
/**
* Add methods to MLContext to allow tab-completion to folders/packages
* (such as {@code ml.scripts()} and {@code ml.nn()}).
*
* @param source
* path to source directory (typically, the scripts directory)
* @param fullDirClassName
* the full name of the class representing the source (scripts)
* directory
*/
public static void addConvenienceMethodsToMLContext(String source, String fullDirClassName) {
try {
ClassPool pool = ClassPool.getDefault();
CtClass ctMLContext = pool.get(MLContext.class.getName());
CtClass dirClass = pool.get(fullDirClassName);
String methodName = convertFullClassNameToConvenienceMethodName(fullDirClassName);
System.out.println("Adding " + methodName + "() to " + ctMLContext.getName());
String methodBody = "{ " + fullDirClassName + " z = new " + fullDirClassName + "(); return z; }";
CtMethod ctMethod = CtNewMethod.make(Modifier.PUBLIC, dirClass, methodName, null, null, methodBody,
ctMLContext);
ctMLContext.addMethod(ctMethod);
addPackageConvenienceMethodsToMLContext(source, ctMLContext);
ctMLContext.writeFile(destination);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (RuntimeException e) {
e.printStackTrace();
} catch (NotFoundException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
}
}
/**
* Add methods to MLContext to allow tab-completion to packages contained
* within the source directory (such as {@code ml.nn()}).
*
* @param dirPath
* path to source directory (typically, the scripts directory)
* @param ctMLContext
* javassist compile-time class representation of MLContext
*/
public static void addPackageConvenienceMethodsToMLContext(String dirPath, CtClass ctMLContext) {
try {
if (!SOURCE.equalsIgnoreCase(dirPath)) {
return;
}
File dir = new File(dirPath);
File[] subdirs = dir.listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
return f.isDirectory();
}
});
for (File subdir : subdirs) {
String subDirPath = dirPath + File.separator + subdir.getName();
if (skipDir(subdir, false)) {
continue;
}
String fullSubDirClassName = dirPathToFullDirClassName(subDirPath);
ClassPool pool = ClassPool.getDefault();
CtClass subDirClass = pool.get(fullSubDirClassName);
String subDirName = subdir.getName();
subDirName = subDirName.replaceAll("-", "_");
subDirName = subDirName.toLowerCase();
System.out.println("Adding " + subDirName + "() to " + ctMLContext.getName());
String methodBody = "{ " + fullSubDirClassName + " z = new " + fullSubDirClassName + "(); return z; }";
CtMethod ctMethod = CtNewMethod.make(Modifier.PUBLIC, subDirClass, subDirName, null, null, methodBody,
ctMLContext);
ctMLContext.addMethod(ctMethod);
}
} catch (NotFoundException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
}
}
/**
* Convert the full name of a class representing a directory to a method
* name.
*
* @param fullDirClassName
* the full name of the class representing a directory
* @return method name
*/
public static String convertFullClassNameToConvenienceMethodName(String fullDirClassName) {
String m = fullDirClassName;
m = m.substring(m.lastIndexOf(".") + 1);
m = m.toLowerCase();
return m;
}
/**
* Generate convenience classes recursively. This allows for code such as
* {@code ml.scripts.algorithms...}.
*
* @param dirPath
* path to directory
* @return the full name of the class representing the dirPath directory
*/
public static String recurseDirectoriesForConvenienceClassGeneration(String dirPath) {
try {
File dir = new File(dirPath);
String fullDirClassName = dirPathToFullDirClassName(dirPath);
System.out.println("Generating Class: " + fullDirClassName);
ClassPool pool = ClassPool.getDefault();
CtClass ctDir = pool.makeClass(fullDirClassName);
File[] subdirs = dir.listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
return f.isDirectory();
}
});
for (File subdir : subdirs) {
String subDirPath = dirPath + File.separator + subdir.getName();
if (skipDir(subdir, false)) {
continue;
}
String fullSubDirClassName = recurseDirectoriesForConvenienceClassGeneration(subDirPath);
CtClass subDirClass = pool.get(fullSubDirClassName);
String subDirName = subdir.getName();
subDirName = subDirName.replaceAll("-", "_");
subDirName = subDirName.toLowerCase();
System.out.println("Adding " + subDirName + "() to " + fullDirClassName);
String methodBody = "{ " + fullSubDirClassName + " z = new " + fullSubDirClassName + "(); return z; }";
CtMethod ctMethod = CtNewMethod.make(Modifier.PUBLIC, subDirClass, subDirName, null, null, methodBody,
ctDir);
ctDir.addMethod(ctMethod);
}
File[] scriptFiles = dir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return (name.toLowerCase().endsWith(".dml") || name.toLowerCase().endsWith(".pydml"));
}
});
for (File scriptFile : scriptFiles) {
String scriptFilePath = scriptFile.getPath();
String fullScriptClassName = BASE_DEST_PACKAGE + "."
+ scriptFilePathToFullClassNameNoBase(scriptFilePath);
CtClass scriptClass = pool.get(fullScriptClassName);
String methodName = scriptFilePathToSimpleClassName(scriptFilePath);
String methodBody = "{ " + fullScriptClassName + " z = new " + fullScriptClassName + "(); return z; }";
CtMethod ctMethod = CtNewMethod.make(Modifier.PUBLIC, scriptClass, methodName, null, null, methodBody,
ctDir);
ctDir.addMethod(ctMethod);
}
ctDir.writeFile(destination);
return fullDirClassName;
} catch (RuntimeException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (NotFoundException e) {
e.printStackTrace();
}
return null;
}
/**
* Convert a directory path to a full class name for a convenience class.
*
* @param dirPath
* path to directory
* @return the full name of the class representing the dirPath directory
*/
public static String dirPathToFullDirClassName(String dirPath) {
if (!dirPath.contains(File.separator)) {
String c = dirPath;
c = c.replace("-", "_");
c = c.substring(0, 1).toUpperCase() + c.substring(1);
c = CONVENIENCE_BASE_DEST_PACKAGE + "." + c;
return c;
}
String p = dirPath;
p = p.substring(0, p.lastIndexOf(File.separator));
p = p.replace("-", "_");
p = p.replace(File.separator, ".");
p = p.toLowerCase();
String c = dirPath;
c = c.substring(c.lastIndexOf(File.separator) + 1);
c = c.replace("-", "_");
c = c.substring(0, 1).toUpperCase() + c.substring(1);
return CONVENIENCE_BASE_DEST_PACKAGE + "." + p + "." + c;
}
/**
* Whether or not the directory (and subdirectories of the directory) should
* be skipped.
*
* @param dir
* path to directory to check
* @param displayMessage
* if {@code true}, display skip information to standard output
* @return {@code true} if the directory should be skipped, {@code false}
* otherwise
*/
public static boolean skipDir(File dir, boolean displayMessage) {
if ("staging".equalsIgnoreCase(dir.getName()) && skipStagingDir) {
if (displayMessage) {
System.out.println("Skipping staging directory: " + dir.getPath());
}
return true;
}
if ("perftest".equalsIgnoreCase(dir.getName()) && skipPerfTestDir) {
if (displayMessage) {
System.out.println("Skipping perftest directory: " + dir.getPath());
}
return true;
}
if ("obsolete".equalsIgnoreCase(dir.getName()) && skipObsoleteDir) {
if (displayMessage) {
System.out.println("Skipping obsolete directory: " + dir.getPath());
}
return true;
}
if ("compare_backends".equalsIgnoreCase(dir.getName()) && skipCompareBackendsDir) {
if (displayMessage) {
System.out.println("Skipping compare_backends directory: " + dir.getPath());
}
return true;
}
return false;
}
/**
* Recursively traverse the directories to create classes representing the
* script files.
*
* @param dirPath
* path to directory
*/
public static void recurseDirectoriesForClassGeneration(String dirPath) {
File dir = new File(dirPath);
iterateScriptFilesInDirectory(dir);
File[] subdirs = dir.listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
return f.isDirectory();
}
});
for (File subdir : subdirs) {
String subdirpath = dirPath + File.separator + subdir.getName();
if (skipDir(subdir, true)) {
continue;
}
recurseDirectoriesForClassGeneration(subdirpath);
}
}
/**
* Iterate through the script files in a directory and create a class for
* each script file.
*
* @param dir
* the directory to iterate through
*/
public static void iterateScriptFilesInDirectory(File dir) {
File[] scriptFiles = dir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return (name.toLowerCase().endsWith(".dml") || name.toLowerCase().endsWith(".pydml"));
}
});
for (File scriptFile : scriptFiles) {
String scriptFilePath = scriptFile.getPath();
createScriptClass(scriptFilePath);
}
}
/**
* Obtain the relative package for a script file. For example,
* {@code scripts/algorithms/LinearRegCG.dml} resolves to
* {@code scripts.algorithms}.
*
* @param scriptFilePath
* the path to a script file
* @return the relative package for a script file
*/
public static String scriptFilePathToPackageNoBase(String scriptFilePath) {
String p = scriptFilePath;
p = p.substring(0, p.lastIndexOf(File.separator));
p = p.replace("-", "_");
p = p.replace(File.separator, ".");
p = p.toLowerCase();
return p;
}
/**
* Obtain the simple class name for a script file. For example,
* {@code scripts/algorithms/LinearRegCG} resolves to {@code LinearRegCG}.
*
* @param scriptFilePath
* the path to a script file
* @return the simple class name for a script file
*/
public static String scriptFilePathToSimpleClassName(String scriptFilePath) {
String c = scriptFilePath;
c = c.substring(c.lastIndexOf(File.separator) + 1);
c = c.replace("-", "_");
c = c.substring(0, 1).toUpperCase() + c.substring(1);
c = c.substring(0, c.indexOf("."));
return c;
}
/**
* Obtain the relative full class name for a script file. For example,
* {@code scripts/algorithms/LinearRegCG.dml} resolves to
* {@code scripts.algorithms.LinearRegCG}.
*
* @param scriptFilePath
* the path to a script file
* @return the relative full class name for a script file
*/
public static String scriptFilePathToFullClassNameNoBase(String scriptFilePath) {
String p = scriptFilePathToPackageNoBase(scriptFilePath);
String c = scriptFilePathToSimpleClassName(scriptFilePath);
return p + "." + c;
}
/**
* Convert a script file to a Java class that extends the MLContext API's
* Script class.
*
* @param scriptFilePath
* the path to a script file
*/
public static void createScriptClass(String scriptFilePath) {
try {
String fullScriptClassName = BASE_DEST_PACKAGE + "." + scriptFilePathToFullClassNameNoBase(scriptFilePath);
System.out.println("Generating Class: " + fullScriptClassName);
ClassPool pool = ClassPool.getDefault();
CtClass ctNewScript = pool.makeClass(fullScriptClassName);
CtClass ctScript = pool.get(Script.class.getName());
ctNewScript.setSuperclass(ctScript);
CtConstructor ctCon = new CtConstructor(null, ctNewScript);
ctCon.setBody(scriptConstructorBody(scriptFilePath));
ctNewScript.addConstructor(ctCon);
addFunctionMethods(scriptFilePath, ctNewScript);
ctNewScript.writeFile(destination);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (RuntimeException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
} catch (NotFoundException e) {
e.printStackTrace();
}
}
/**
* Create a DMLProgram from a script file.
*
* @param scriptFilePath
* the path to a script file
* @return the DMLProgram generated by the script file
*/
public static DMLProgram dmlProgramFromScriptFilePath(String scriptFilePath) {
String scriptString = fileToString(scriptFilePath);
Script script = new Script(scriptString);
ScriptExecutor se = new ScriptExecutor() {
@Override
public MLResults execute(Script script) {
setup(script);
parseScript();
return null;
}
};
se.execute(script);
DMLProgram dmlProgram = se.getDmlProgram();
return dmlProgram;
}
/**
* Add methods to a derived script class to allow invocation of script
* functions.
*
* @param scriptFilePath
* the path to a script file
* @param ctNewScript
* the javassist compile-time class representation of a script
*/
public static void addFunctionMethods(String scriptFilePath, CtClass ctNewScript) {
try {
DMLProgram dmlProgram = dmlProgramFromScriptFilePath(scriptFilePath);
if (dmlProgram == null) {
System.out.println("Could not generate DML Program for: " + scriptFilePath);
return;
}
Map<String, FunctionStatementBlock> defaultNsFsbsMap = dmlProgram
.getFunctionStatementBlocks(DMLProgram.DEFAULT_NAMESPACE);
List<FunctionStatementBlock> fsbs = new ArrayList<FunctionStatementBlock>();
fsbs.addAll(defaultNsFsbsMap.values());
for (FunctionStatementBlock fsb : fsbs) {
ArrayList<Statement> sts = fsb.getStatements();
for (Statement st : sts) {
if (!(st instanceof FunctionStatement)) {
continue;
}
FunctionStatement fs = (FunctionStatement) st;
String dmlFunctionCall = generateDmlFunctionCall(scriptFilePath, fs);
String functionCallMethod = generateFunctionCallMethod(scriptFilePath, fs, dmlFunctionCall);
CtMethod m = CtNewMethod.make(functionCallMethod, ctNewScript);
ctNewScript.addMethod(m);
addDescriptionFunctionCallMethod(fs, scriptFilePath, ctNewScript, false);
addDescriptionFunctionCallMethod(fs, scriptFilePath, ctNewScript, true);
}
}
} catch (LanguageException e) {
System.out.println("Could not add function methods for " + ctNewScript.getName());
} catch (CannotCompileException e) {
System.out.println("Could not add function methods for " + ctNewScript.getName());
} catch (RuntimeException e) {
System.out.println("Could not add function methods for " + ctNewScript.getName());
}
}
/**
* Create a method that returns either: (1) the full function body, or (2)
* the function body up to the end of the documentation comment for the
* function. If (1) is generated, the method name will be followed
* "__source". If (2) is generated, the method name will be followed by
* "__docs". If (2) is generated but no end of documentation comment is
* detected, the full function body will be displayed.
*
* @param fs
* a SystemML function statement
* @param scriptFilePath
* the path to a script file
* @param ctNewScript
* the javassist compile-time class representation of a script
* @param full
* if {@code true}, create method to return full function body;
* if {@code false}, create method to return the function body up
* to the end of the documentation comment
*/
public static void addDescriptionFunctionCallMethod(FunctionStatement fs, String scriptFilePath,
CtClass ctNewScript, boolean full) {
try {
int bl = fs.getBeginLine();
int el = fs.getEndLine();
File f = new File(scriptFilePath);
List<String> lines = FileUtils.readLines(f);
int end = el;
if (!full) {
for (int i = bl - 1; i < el; i++) {
String line = lines.get(i);
if (line.contains("*/")) {
end = i + 1;
break;
}
}
}
List<String> sub = lines.subList(bl - 1, end);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < sub.size(); i++) {
String line = sub.get(i);
String escapeLine = StringEscapeUtils.escapeJava(line);
sb.append(escapeLine);
sb.append("\\n");
}
String functionString = sb.toString();
String docFunctionCallMethod = generateDescriptionFunctionCallMethod(fs, functionString, full);
CtMethod m = CtNewMethod.make(docFunctionCallMethod, ctNewScript);
ctNewScript.addMethod(m);
} catch (IOException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
}
}
/**
* Generate method for returning (1) the full function body, or (2) the
* function body up to the end of the documentation comment. (1) will have
* "__source" appended to the end of the function name. (2) will have
* "__docs" appended to the end of the function name.
*
* @param fs
* a SystemML function statement
* @param functionString
* either the full function body or the function body up to the
* end of the documentation comment
* @param full
* if {@code true}, append "__source" to the end of the function
* name; if {@code false}, append "__docs" to the end of the
* function name
* @return string representation of the function description method
*/
public static String generateDescriptionFunctionCallMethod(FunctionStatement fs, String functionString,
boolean full) {
StringBuilder sb = new StringBuilder();
sb.append("public String ");
sb.append(fs.getName());
if (full) {
sb.append("__source");
} else {
sb.append("__docs");
}
sb.append("() {\n");
sb.append("String docString = \"" + functionString + "\";\n");
sb.append("return docString;\n");
sb.append("}\n");
return sb.toString();
}
/**
* Obtain a string representation of a parameter type, where a Matrix or
* Frame is represented by its full class name.
*
* @param param
* the function parameter
* @return string representation of a parameter type
*/
public static String getParamTypeAsString(DataIdentifier param) {
DataType dt = param.getDataType();
ValueType vt = param.getValueType();
if ((dt == DataType.SCALAR) && (vt == ValueType.INT)) {
return "long";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.DOUBLE)) {
return "double";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.BOOLEAN)) {
return "boolean";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.STRING)) {
return "String";
} else if (dt == DataType.MATRIX) {
return "org.apache.sysml.api.mlcontext.Matrix";
} else if (dt == DataType.FRAME) {
return "org.apache.sysml.api.mlcontext.Frame";
}
return null;
}
/**
* Obtain a string representation of a parameter type, where a Matrix or
* Frame is represented by its simple class name.
*
* @param param
* the function parameter
* @return string representation of a parameter type
*/
public static String getSimpleParamTypeAsString(DataIdentifier param) {
DataType dt = param.getDataType();
ValueType vt = param.getValueType();
if ((dt == DataType.SCALAR) && (vt == ValueType.INT)) {
return "long";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.DOUBLE)) {
return "double";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.BOOLEAN)) {
return "boolean";
} else if ((dt == DataType.SCALAR) && (vt == ValueType.STRING)) {
return "String";
} else if (dt == DataType.MATRIX) {
return "Matrix";
} else if (dt == DataType.FRAME) {
return "Frame";
}
return null;
}
/**
* Obtain the full class name for a class that encapsulates the outputs of a
* function
*
* @param scriptFilePath
* the path to a script file
* @param fs
* a SystemML function statement
* @return the full class name for a class that encapsulates the outputs of
* a function
*/
public static String getFullFunctionOutputClassName(String scriptFilePath, FunctionStatement fs) {
String p = scriptFilePath;
p = p.replace("-", "_");
p = p.replace(File.separator, ".");
p = p.toLowerCase();
p = p.substring(0, p.lastIndexOf("."));
String c = fs.getName();
c = c.substring(0, 1).toUpperCase() + c.substring(1);
c = c + "_output";
String functionOutputClassName = BASE_DEST_PACKAGE + "." + p + "." + c;
return functionOutputClassName;
}
/**
* Create a class that encapsulates the outputs of a function.
*
* @param scriptFilePath
* the path to a script file
* @param fs
* a SystemML function statement
*/
public static void createFunctionOutputClass(String scriptFilePath, FunctionStatement fs) {
try {
ArrayList<DataIdentifier> oparams = fs.getOutputParams();
// Note: if a function returns 1 output, simply output it rather
// than encapsulating it in a function output class
if ((oparams.size() == 0) || (oparams.size() == 1)) {
return;
}
String fullFunctionOutputClassName = getFullFunctionOutputClassName(scriptFilePath, fs);
System.out.println("Generating Class: " + fullFunctionOutputClassName);
ClassPool pool = ClassPool.getDefault();
CtClass ctFuncOut = pool.makeClass(fullFunctionOutputClassName);
// add fields
for (int i = 0; i < oparams.size(); i++) {
DataIdentifier oparam = oparams.get(i);
String type = getParamTypeAsString(oparam);
String name = oparam.getName();
String fstring = "public " + type + " " + name + ";";
CtField field = CtField.make(fstring, ctFuncOut);
ctFuncOut.addField(field);
}
// add constructor
String simpleFuncOutClassName = fullFunctionOutputClassName
.substring(fullFunctionOutputClassName.lastIndexOf(".") + 1);
StringBuilder con = new StringBuilder();
con.append("public " + simpleFuncOutClassName + "(");
for (int i = 0; i < oparams.size(); i++) {
if (i > 0) {
con.append(", ");
}
DataIdentifier oparam = oparams.get(i);
String type = getParamTypeAsString(oparam);
String name = oparam.getName();
con.append(type + " " + name);
}
con.append(") {\n");
for (int i = 0; i < oparams.size(); i++) {
DataIdentifier oparam = oparams.get(i);
String name = oparam.getName();
con.append("this." + name + "=" + name + ";\n");
}
con.append("}\n");
String cstring = con.toString();
CtConstructor ctCon = CtNewConstructor.make(cstring, ctFuncOut);
ctFuncOut.addConstructor(ctCon);
// add toString
StringBuilder s = new StringBuilder();
s.append("public String toString(){\n");
s.append("StringBuilder sb = new StringBuilder();\n");
for (int i = 0; i < oparams.size(); i++) {
DataIdentifier oparam = oparams.get(i);
String name = oparam.getName();
s.append("sb.append(\"" + name + " (" + getSimpleParamTypeAsString(oparam) + "): \" + " + name
+ " + \"\\n\");\n");
}
s.append("String str = sb.toString();\nreturn str;\n");
s.append("}\n");
String toStr = s.toString();
CtMethod toStrMethod = CtNewMethod.make(toStr, ctFuncOut);
ctFuncOut.addMethod(toStrMethod);
ctFuncOut.writeFile(destination);
} catch (RuntimeException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Obtain method for invoking a script function.
*
* @param scriptFilePath
* the path to a script file
* @param fs
* a SystemML function statement
* @param dmlFunctionCall
* a string representing the invocation of a script function
* @return string representation of a method that performs a function call
*/
public static String generateFunctionCallMethod(String scriptFilePath, FunctionStatement fs,
String dmlFunctionCall) {
createFunctionOutputClass(scriptFilePath, fs);
StringBuilder sb = new StringBuilder();
sb.append("public ");
// begin return type
ArrayList<DataIdentifier> oparams = fs.getOutputParams();
if (oparams.size() == 0) {
sb.append("void");
} else if (oparams.size() == 1) {
// if 1 output, no need to encapsulate it, so return the output
// directly
DataIdentifier oparam = oparams.get(0);
String type = getParamTypeAsString(oparam);
sb.append(type);
} else {
String fullFunctionOutputClassName = getFullFunctionOutputClassName(scriptFilePath, fs);
sb.append(fullFunctionOutputClassName);
}
sb.append(" ");
// end return type
sb.append(fs.getName());
sb.append("(");
ArrayList<DataIdentifier> inputParams = fs.getInputParams();
for (int i = 0; i < inputParams.size(); i++) {
if (i > 0) {
sb.append(", ");
}
DataIdentifier inputParam = inputParams.get(i);
/*
* Note: Using Object is currently preferrable to using
* datatype/valuetype to explicitly set the input type to
* Integer/Double/Boolean/String since Object allows the automatic
* handling of things such as automatic conversions from longs to
* ints.
*/
sb.append("Object ");
sb.append(inputParam.getName());
}
sb.append(") {\n");
sb.append("String scriptString = \"" + dmlFunctionCall + "\";\n");
sb.append(
"org.apache.sysml.api.mlcontext.Script script = new org.apache.sysml.api.mlcontext.Script(scriptString);\n");
if ((inputParams.size() > 0) || (oparams.size() > 0)) {
sb.append("script");
}
for (int i = 0; i < inputParams.size(); i++) {
DataIdentifier inputParam = inputParams.get(i);
String name = inputParam.getName();
sb.append(".in(\"" + name + "\", " + name + ")");
}
for (int i = 0; i < oparams.size(); i++) {
DataIdentifier outputParam = oparams.get(i);
String name = outputParam.getName();
sb.append(".out(\"" + name + "\")");
}
if ((inputParams.size() > 0) || (oparams.size() > 0)) {
sb.append(";\n");
}
sb.append("org.apache.sysml.api.mlcontext.MLResults results = script.execute();\n");
if (oparams.size() == 0) {
sb.append("return;\n");
} else if (oparams.size() == 1) {
DataIdentifier o = oparams.get(0);
DataType dt = o.getDataType();
ValueType vt = o.getValueType();
if ((dt == DataType.SCALAR) && (vt == ValueType.INT)) {
sb.append("long res = results.getLong(\"" + o.getName() + "\");\nreturn res;\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.DOUBLE)) {
sb.append("double res = results.getDouble(\"" + o.getName() + "\");\nreturn res;\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.BOOLEAN)) {
sb.append("boolean res = results.getBoolean(\"" + o.getName() + "\");\nreturn res;\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.STRING)) {
sb.append("String res = results.getString(\"" + o.getName() + "\");\nreturn res;\n");
} else if (dt == DataType.MATRIX) {
sb.append("org.apache.sysml.api.mlcontext.Matrix res = results.getMatrix(\"" + o.getName()
+ "\");\nreturn res;\n");
} else if (dt == DataType.FRAME) {
sb.append("org.apache.sysml.api.mlcontext.Frame res = results.getFrame(\"" + o.getName()
+ "\");\nreturn res;\n");
}
} else {
for (int i = 0; i < oparams.size(); i++) {
DataIdentifier outputParam = oparams.get(i);
String name = outputParam.getName().toLowerCase();
String type = getParamTypeAsString(outputParam);
DataType dt = outputParam.getDataType();
ValueType vt = outputParam.getValueType();
if ((dt == DataType.SCALAR) && (vt == ValueType.INT)) {
sb.append(type + " " + name + " = results.getLong(\"" + outputParam.getName() + "\");\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.DOUBLE)) {
sb.append(type + " " + name + " = results.getDouble(\"" + outputParam.getName() + "\");\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.BOOLEAN)) {
sb.append(type + " " + name + " = results.getBoolean(\"" + outputParam.getName() + "\");\n");
} else if ((dt == DataType.SCALAR) && (vt == ValueType.STRING)) {
sb.append(type + " " + name + " = results.getString(\"" + outputParam.getName() + "\");\n");
} else if (dt == DataType.MATRIX) {
sb.append(type + " " + name + " = results.getMatrix(\"" + outputParam.getName() + "\");\n");
} else if (dt == DataType.FRAME) {
sb.append(type + " " + name + " = results.getFrame(\"" + outputParam.getName() + "\");\n");
}
}
String ffocn = getFullFunctionOutputClassName(scriptFilePath, fs);
sb.append(ffocn + " res = new " + ffocn + "(");
for (int i = 0; i < oparams.size(); i++) {
if (i > 0) {
sb.append(", ");
}
DataIdentifier outputParam = oparams.get(i);
String name = outputParam.getName().toLowerCase();
sb.append(name);
}
sb.append(");\nreturn res;\n");
}
sb.append("}\n");
return sb.toString();
}
/**
* Obtain method for invoking a script function and returning the results as
* an MLResults object. Currently this method is not used.
*
* @param scriptFilePath
* the path to a script file
* @param fs
* a SystemML function statement
* @param dmlFunctionCall
* a string representing the invocation of a script function
* @return string representation of a method that performs a function call
*/
public static String generateFunctionCallMethodMLResults(String scriptFilePath, FunctionStatement fs,
String dmlFunctionCall) {
StringBuilder sb = new StringBuilder();
sb.append("public org.apache.sysml.api.mlcontext.MLResults ");
sb.append(fs.getName());
sb.append("(");
ArrayList<DataIdentifier> inputParams = fs.getInputParams();
for (int i = 0; i < inputParams.size(); i++) {
if (i > 0) {
sb.append(", ");
}
DataIdentifier inputParam = inputParams.get(i);
/*
* Note: Using Object is currently preferrable to using
* datatype/valuetype to explicitly set the input type to
* Integer/Double/Boolean/String since Object allows the automatic
* handling of things such as automatic conversions from longs to
* ints.
*/
sb.append("Object ");
sb.append(inputParam.getName());
}
sb.append(") {\n");
sb.append("String scriptString = \"" + dmlFunctionCall + "\";\n");
sb.append(
"org.apache.sysml.api.mlcontext.Script script = new org.apache.sysml.api.mlcontext.Script(scriptString);\n");
ArrayList<DataIdentifier> outputParams = fs.getOutputParams();
if ((inputParams.size() > 0) || (outputParams.size() > 0)) {
sb.append("script");
}
for (int i = 0; i < inputParams.size(); i++) {
DataIdentifier inputParam = inputParams.get(i);
String name = inputParam.getName();
sb.append(".in(\"" + name + "\", " + name + ")");
}
for (int i = 0; i < outputParams.size(); i++) {
DataIdentifier outputParam = outputParams.get(i);
String name = outputParam.getName();
sb.append(".out(\"" + name + "\")");
}
if ((inputParams.size() > 0) || (outputParams.size() > 0)) {
sb.append(";\n");
}
sb.append("org.apache.sysml.api.mlcontext.MLResults results = script.execute();\n");
sb.append("return results;\n");
sb.append("}\n");
return sb.toString();
}
/**
* Obtain the DML representing a function invocation.
*
* @param scriptFilePath
* the path to a script file
* @param fs
* a SystemML function statement
* @return string representation of a DML function invocation
*/
public static String generateDmlFunctionCall(String scriptFilePath, FunctionStatement fs) {
StringBuilder sb = new StringBuilder();
sb.append("source('" + scriptFilePath + "') as mlcontextns;");
ArrayList<DataIdentifier> outputParams = fs.getOutputParams();
if (outputParams.size() == 0) {
sb.append("mlcontextns::");
}
if (outputParams.size() == 1) {
DataIdentifier outputParam = outputParams.get(0);
sb.append(outputParam.getName());
sb.append(" = mlcontextns::");
} else if (outputParams.size() > 1) {
sb.append("[");
for (int i = 0; i < outputParams.size(); i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(outputParams.get(i).getName());
}
sb.append("] = mlcontextns::");
}
sb.append(fs.getName());
sb.append("(");
ArrayList<DataIdentifier> inputParams = fs.getInputParams();
for (int i = 0; i < inputParams.size(); i++) {
if (i > 0) {
sb.append(", ");
}
DataIdentifier inputParam = inputParams.get(i);
sb.append(inputParam.getName());
}
sb.append(");");
return sb.toString();
}
/**
* Obtain the content of a file as a string.
*
* @param filePath
* the path to a file
* @return the file content as a string
*/
public static String fileToString(String filePath) {
try {
File f = new File(filePath);
FileReader fr = new FileReader(f);
StringBuilder sb = new StringBuilder();
int n;
char[] charArray = new char[1024];
while ((n = fr.read(charArray)) > 0) {
sb.append(charArray, 0, n);
}
fr.close();
return sb.toString();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Obtain a constructor body for a Script subclass that sets the
* scriptString based on the content of a script file.
*
* @param scriptFilePath
* the path to a script file
* @return constructor body for a Script subclass that sets the scriptString
* based on the content of a script file
*/
public static String scriptConstructorBody(String scriptFilePath) {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("String scriptFilePath = \"" + scriptFilePath + "\";");
sb.append(
"java.io.InputStream is = org.apache.sysml.api.mlcontext.Script.class.getResourceAsStream(\"/\"+scriptFilePath);");
sb.append("java.io.InputStreamReader isr = new java.io.InputStreamReader(is);");
sb.append("int n;");
sb.append("char[] charArray = new char[1024];");
sb.append("StringBuilder s = new StringBuilder();");
sb.append("try {");
sb.append(" while ((n = isr.read(charArray)) > 0) {");
sb.append(" s.append(charArray, 0, n);");
sb.append(" }");
sb.append("} catch (java.io.IOException e) {");
sb.append(" e.printStackTrace();");
sb.append("}");
sb.append("setScriptString(s.toString());");
sb.append("}");
return sb.toString();
}
}
| apache-2.0 |
roberthafner/flowable-engine | modules/flowable-form-model/src/main/java/org/activiti/form/model/ExpressionFormField.java | 922 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.form.model;
/**
* @author Tijs Rademakers
*
*/
public class ExpressionFormField extends FormField {
private static final long serialVersionUID = 1L;
protected String expression;
public String getExpression() {
return expression;
}
public void setExpression(String expression) {
this.expression = expression;
}
} | apache-2.0 |
struberg/deltaspike | deltaspike/modules/jpa/impl/src/test/java/org/apache/deltaspike/test/jpa/api/transactional/multipleinjection/manual/BeanManagedlTransactionTest.java | 3783 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.test.jpa.api.transactional.multipleinjection.manual;
import org.apache.deltaspike.core.api.projectstage.ProjectStage;
import org.apache.deltaspike.core.util.ProjectStageProducer;
import org.apache.deltaspike.jpa.impl.transaction.context.TransactionContextExtension;
import org.apache.deltaspike.test.category.SeCategory;
import org.apache.deltaspike.test.util.ArchiveUtils;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.Asset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import javax.enterprise.inject.spi.Extension;
import javax.inject.Inject;
/**
* Same test as {@link ManualTransactionTest} but now with a UserTransaction instead
* of manual EM Tx.
*/
@RunWith(Arquillian.class)
@Category(SeCategory.class)
public class BeanManagedlTransactionTest
{
private static Asset beansXml = new StringAsset(
"<beans>" +
"<alternatives>" +
"<class>org.apache.deltaspike.jpa.impl.transaction.BeanManagedUserTransactionStrategy</class>" +
"</alternatives>" +
"</beans>"
);
@Inject
private ManualTransactionBean manualTransactionBean;
@Inject
private MockUserTransactionResolver mockTxResolver;
@Deployment
public static WebArchive deploy()
{
JavaArchive testJar = ShrinkWrap.create(JavaArchive.class, "manualTransactionTest.jar")
.addPackage(ArchiveUtils.SHARED_PACKAGE)
.addPackage(BeanManagedlTransactionTest.class.getPackage().getName())
.addAsManifestResource(beansXml, "beans.xml");
return ShrinkWrap.create(WebArchive.class)
.addAsLibraries(ArchiveUtils.getDeltaSpikeCoreAndJpaArchive())
.addAsLibraries(testJar)
.addAsServiceProvider(Extension.class, TransactionContextExtension.class)
.addAsWebInfResource(ArchiveUtils.getBeansXml(), "beans.xml");
}
@Before
public void init()
{
ProjectStageProducer.setProjectStage(ProjectStage.UnitTest);
}
@Test
public void manualTransactionTest()
{
mockTxResolver.resetTx();
MockUserTransactionResolver.MockUserTransaction mockTx = mockTxResolver.resolveUserTransaction();
manualTransactionBean.executeInTransaction();
Assert.assertEquals(false, mockTx.isActive());
Assert.assertEquals(true, mockTx.isBegin());
Assert.assertEquals(true, mockTx.isCommit());
Assert.assertEquals(false, mockTx.isRollback());
Assert.assertEquals(false, mockTx.isRollBackOnly());
}
}
| apache-2.0 |
howepeng/isis | mothballed/component/viewer/html/impl/src/test/java/org/apache/isis/viewer/html/context/ContextTest_serialization.java | 3529 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.html.context;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.jmock.Expectations;
import org.jmock.auto.Mock;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.isis.core.commons.config.IsisConfiguration;
import org.apache.isis.core.integtestsupport.IsisSystemWithFixtures;
import org.apache.isis.core.unittestsupport.jmocking.JUnitRuleMockery2;
import org.apache.isis.core.unittestsupport.jmocking.JUnitRuleMockery2.Mode;
import org.apache.isis.viewer.html.HtmlViewerConstants;
import org.apache.isis.viewer.html.PathBuilder;
import org.apache.isis.viewer.html.PathBuilderDefault;
import org.apache.isis.viewer.html.component.ComponentFactory;
import org.apache.isis.viewer.html.component.html.HtmlComponentFactory;
public class ContextTest_serialization {
@Rule
public IsisSystemWithFixtures iswf = IsisSystemWithFixtures.builder().build();
@Rule
public JUnitRuleMockery2 context = JUnitRuleMockery2.createFor(Mode.INTERFACES_ONLY);
@Mock
private IsisConfiguration isisConfiguration;
private ComponentFactory factory;
private PathBuilder pathBuilder;
private Context viewerContext;
@Before
public void setUp() throws Exception {
Logger.getRootLogger().setLevel(Level.OFF);
pathBuilder = new PathBuilderDefault("shtml");
context.checking(new Expectations() {
{
allowing(isisConfiguration).getString(HtmlViewerConstants.STYLE_SHEET);
will(returnValue("someStyleSheet.css"));
allowing(isisConfiguration).getString(HtmlViewerConstants.HEADER_FILE);
will(returnValue(null));
allowing(isisConfiguration).getString(HtmlViewerConstants.HEADER);
will(returnValue("<div></div>"));
allowing(isisConfiguration).getString(HtmlViewerConstants.FOOTER_FILE);
will(returnValue(null));
allowing(isisConfiguration).getString(HtmlViewerConstants.FOOTER);
will(returnValue("<div></div>"));
}
});
factory = new HtmlComponentFactory(pathBuilder, isisConfiguration);
viewerContext = new Context(factory);
}
@Test
public void writeObject() throws IOException {
OutputStream baos = new ByteArrayOutputStream();
final ObjectOutputStream objectOutputStream = new ObjectOutputStream(baos);
objectOutputStream.writeObject(viewerContext);
}
}
| apache-2.0 |
libgdx/gdx-ai | tests/src/com/badlogic/gdx/ai/tests/msg/MessageTestBase.java | 3046 | /*******************************************************************************
* Copyright 2014 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.ai.tests.msg;
import com.badlogic.gdx.ai.GdxAI;
import com.badlogic.gdx.ai.tests.MessageTests;
import com.badlogic.gdx.ai.tests.utils.scene2d.CollapsableWindow;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Label.LabelStyle;
import com.badlogic.gdx.scenes.scene2d.ui.Stack;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
/** Base class for individual message tests.
*
* @author davebaol */
public abstract class MessageTestBase {
protected MessageTests container;
public String testName;
protected CollapsableWindow detailWindow;
private float lastUpdateTime;
private Stack testStack;
protected Table testTable;
public MessageTestBase (MessageTests container, String testName) {
this.container = container;
this.testName = testName;
}
public void create () {
lastUpdateTime = 0;
testStack = new Stack();
container.stage.getRoot().addActorAt(0, testStack);
testStack.setSize(container.stageWidth, container.stageHeight);
testStack.add(testTable = new Table() {
@Override
public void act (float delta) {
float time = GdxAI.getTimepiece().getTime();
if (lastUpdateTime != time) {
lastUpdateTime = time;
super.act(GdxAI.getTimepiece().getDeltaTime());
}
}
});
testStack.layout();
}
public void dispose () {
testStack.remove();
testStack = null;
}
public abstract void update ();
public abstract void draw ();
public abstract String getDescription ();
public CollapsableWindow getDetailWindow () {
return detailWindow;
}
protected CollapsableWindow createDetailWindow (Table table) {
CollapsableWindow window = new CollapsableWindow(this.testName, container.skin);
window.row();
window.add(table);
window.pack();
window.setX(container.stage.getWidth() - window.getWidth() + 1);
window.setY(container.stage.getHeight() - window.getHeight() + 1);
window.layout();
window.collapse();
return window;
}
protected void addSeparator (Table table) {
Label lbl = new Label("", container.skin);
lbl.setColor(0.75f, 0.75f, 0.75f, 1);
lbl.setStyle(new LabelStyle(lbl.getStyle()));
lbl.getStyle().background = container.skin.newDrawable("white");
table.add(lbl).colspan(2).height(1).width(220).pad(5, 1, 5, 1);
}
}
| apache-2.0 |
tectronics/splinelibrary | 2.3/src/org/drip/param/definition/ScenarioDiscountCurve.java | 5931 |
package org.drip.param.definition;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*!
* Copyright (C) 2014 Lakshmi Krishnamurthy
* Copyright (C) 2013 Lakshmi Krishnamurthy
* Copyright (C) 2012 Lakshmi Krishnamurthy
*
* This file is part of DRIP, a free-software/open-source library for fixed income analysts and developers -
* http://www.credit-trader.org/Begin.html
*
* DRIP is a free, full featured, fixed income rates, credit, and FX analytics library with a focus towards
* pricing/valuation, risk, and market making.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* ScenarioDiscountCurve abstract class exposes the interface the constructs scenario discount curves. The
* following curve construction scenarios are supported:
* - Base, flat/tenor up/down by arbitrary bumps
* - Tenor bumped discount curve set - keyed using the tenor.
* - NTP-based custom scenario curves.
*
* @author Lakshmi Krishnamurthy
*/
public abstract class ScenarioDiscountCurve {
/**
* Base Discount Curve
*/
public static final int DC_BASE = 0;
/**
* Discount Curve Parallel Bump Up
*/
public static final int DC_FLAT_UP = 1;
/**
* Discount Curve Parallel Bump Down
*/
public static final int DC_FLAT_DN = 2;
/**
* Discount Curve Tenor Bump Up
*/
public static final int DC_TENOR_UP = 4;
/**
* Discount Curve Tenor Bump Down
*/
public static final int DC_TENOR_DN = 8;
/**
* Generate the set of discount curves from the scenario specified, and the instrument quotes
*
* @param valParams Valuation Parameters
* @param dcTSY The Treasury Discount Curve
* @param dcEDSF The EDSF Discount Curve
* @param adblQuotes Matched array of the calibration instrument quotes
* @param dblBump Amount of bump to be applied
* @param astrCalibMeasure Matched array of the calibration instrument measures
* @param mmFixings Double map of date/rate index and fixings
* @param quotingParams Quoting Parameters
* @param iDCMode One of the values in the DC_ enum listed above.
*
* @return Success (true), failure (false)
*/
public abstract boolean cookScenarioDC (
final org.drip.param.valuation.ValuationParams valParams,
final org.drip.analytics.rates.DiscountCurve dcTSY,
final org.drip.analytics.rates.DiscountCurve dcEDSF,
final double[] adblQuotes,
final double dblBump,
final java.lang.String[] astrCalibMeasure,
final java.util.Map<org.drip.analytics.date.JulianDate,
org.drip.analytics.support.CaseInsensitiveTreeMap<java.lang.Double>> mmFixings,
final org.drip.param.valuation.QuotingParams quotingParams,
final int iDCMode);
/**
* Cook a custom discount curve according to the desired tweak parameters
*
* @param strCurveName Scenario Discount Curve Name
* @param strCustomName Custom Scenario Name
* @param valParams Valuation Parameters
* @param dcTSY TSY Discount Curve
* @param dcEDSF EDSF Discount Curve
* @param adblQuotes Double array of input quotes
* @param astrCalibMeasure Array of calibration measures
* @param mmFixings Date/Index fixings
* @param quotingParams Calibration quoting parameters
* @param ntpTSY Node Tweak Parameters for the TSY Discount Curve
* @param ntpEDSF Node Tweak Parameters for the EDSF Discount Curve
* @param ntpDC Node Tweak Parameters for the Base Discount Curve
*
* @return Creates a custom discount curve
*/
public abstract boolean cookCustomDC (
final java.lang.String strCurveName,
final java.lang.String strCustomName,
final org.drip.param.valuation.ValuationParams valParams,
final org.drip.analytics.rates.DiscountCurve dcTSY,
final org.drip.analytics.rates.DiscountCurve dcEDSF,
final double[] adblQuotes,
final java.lang.String[] astrCalibMeasure,
final java.util.Map<org.drip.analytics.date.JulianDate,
org.drip.analytics.support.CaseInsensitiveTreeMap<java.lang.Double>> mmFixings,
final org.drip.param.valuation.QuotingParams quotingParams,
final org.drip.param.definition.ResponseValueTweakParams ntpTSY,
final org.drip.param.definition.ResponseValueTweakParams ntpEDSF,
final org.drip.param.definition.ResponseValueTweakParams ntpDC);
/**
* Return the base Discount Curve
*
* @return The base Discount Curve
*/
public abstract org.drip.analytics.rates.DiscountCurve getDCBase();
/**
* Return the Bump Up Discount Curve
*
* @return The Bump Up Discount Curve
*/
public abstract org.drip.analytics.rates.DiscountCurve getDCBumpUp();
/**
* Return the Bump Down Discount Curve
*
* @return The Bump Down Discount Curve
*/
public abstract org.drip.analytics.rates.DiscountCurve getDCBumpDn();
/**
* Return the map of the tenor Bump Up Discount Curve
*
* @return The map of the tenor Bump Up Discount Curve
*/
public abstract
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.analytics.rates.DiscountCurve>
getTenorDCBumpUp();
/**
* Return the map of the tenor Bump Down Discount Curve
*
* @return The map of the tenor Bump Down Discount Curve
*/
public abstract
org.drip.analytics.support.CaseInsensitiveTreeMap<org.drip.analytics.rates.DiscountCurve>
getTenorDCBumpDn();
}
| apache-2.0 |
duboisf/gerrit | gerrit-server/src/main/java/com/google/gerrit/server/patch/PatchSetInfoFactory.java | 4778 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.patch;
import com.google.gerrit.reviewdb.Account;
import com.google.gerrit.reviewdb.Change;
import com.google.gerrit.reviewdb.PatchSet;
import com.google.gerrit.reviewdb.PatchSetInfo;
import com.google.gerrit.reviewdb.Project;
import com.google.gerrit.reviewdb.RevId;
import com.google.gerrit.reviewdb.ReviewDb;
import com.google.gerrit.reviewdb.UserIdentity;
import com.google.gerrit.server.account.AccountByEmailCache;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gwtorm.client.OrmException;
import com.google.gwtorm.client.SchemaFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* Factory class creating PatchSetInfo from meta-data found in Git repository.
*/
@Singleton
public class PatchSetInfoFactory {
private final GitRepositoryManager repoManager;
private final SchemaFactory<ReviewDb> schemaFactory;
private final AccountByEmailCache byEmailCache;
@Inject
public PatchSetInfoFactory(final GitRepositoryManager grm,
final SchemaFactory<ReviewDb> schemaFactory,
final AccountByEmailCache byEmailCache) {
this.repoManager = grm;
this.schemaFactory = schemaFactory;
this.byEmailCache = byEmailCache;
}
public PatchSetInfo get(RevCommit src, PatchSet.Id psi) {
PatchSetInfo info = new PatchSetInfo(psi);
info.setSubject(src.getShortMessage());
info.setMessage(src.getFullMessage());
info.setAuthor(toUserIdentity(src.getAuthorIdent()));
info.setCommitter(toUserIdentity(src.getCommitterIdent()));
info.setRevId(src.getName());
return info;
}
public PatchSetInfo get(PatchSet.Id patchSetId)
throws PatchSetInfoNotAvailableException {
ReviewDb db = null;
Repository repo = null;
try {
db = schemaFactory.open();
final PatchSet patchSet = db.patchSets().get(patchSetId);
final Change change = db.changes().get(patchSet.getId().getParentKey());
final Project.NameKey projectKey = change.getProject();
repo = repoManager.openRepository(projectKey);
final RevWalk rw = new RevWalk(repo);
try {
final RevCommit src =
rw.parseCommit(ObjectId.fromString(patchSet.getRevision().get()));
PatchSetInfo info = get(src, patchSetId);
info.setParents(toParentInfos(src.getParents(), rw));
return info;
} finally {
rw.release();
}
} catch (OrmException e) {
throw new PatchSetInfoNotAvailableException(e);
} catch (IOException e) {
throw new PatchSetInfoNotAvailableException(e);
} finally {
if (db != null) {
db.close();
}
if (repo != null) {
repo.close();
}
}
}
private UserIdentity toUserIdentity(final PersonIdent who) {
final UserIdentity u = new UserIdentity();
u.setName(who.getName());
u.setEmail(who.getEmailAddress());
u.setDate(new Timestamp(who.getWhen().getTime()));
u.setTimeZone(who.getTimeZoneOffset());
// If only one account has access to this email address, select it
// as the identity of the user.
//
final Set<Account.Id> a = byEmailCache.get(u.getEmail());
if (a.size() == 1) {
u.setAccount(a.iterator().next());
}
return u;
}
private List<PatchSetInfo.ParentInfo> toParentInfos(final RevCommit[] parents,
final RevWalk walk) throws IOException, MissingObjectException {
List<PatchSetInfo.ParentInfo> pInfos =
new ArrayList<PatchSetInfo.ParentInfo>(parents.length);
for (RevCommit parent : parents) {
walk.parseBody(parent);
RevId rev = new RevId(parent.getId().name());
String msg = parent.getShortMessage();
pInfos.add(new PatchSetInfo.ParentInfo(rev, msg));
}
return pInfos;
}
}
| apache-2.0 |
bgrozev/libjitsi | src/org/jitsi/impl/neomedia/jmfext/media/protocol/ivffile/IVFFileReader.java | 5558 | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia.jmfext.media.protocol.ivffile;
import java.io.*;
/**
* This class represent an IVF file and provide an API to get the vp8 video
* frames it contains.
*
* @author Thomas Kuntz
*/
public class IVFFileReader
{
/**
* The length in bytes of the IVF file header.
*/
private static int IVF_HEADER_LENGTH = 32;
/**
* A <tt>IVFHeader</tt> representing the global header of the IVF
* file which this <tt>IVFFileReader</tt> will read.
* This header contains information like the dimension of the frame,
* the framerate, the number of frame. in the file, etc.
*/
private IVFHeader header;
/**
* The <tt>RandomAccessFile</tt> used to read the IVF file.
*/
private RandomAccessFile stream;
/**
* Initialize a new instance of <tt>IVFFileReader</tt> that will read
* the IVF file located by <tt>filePath</tt>.
* @param filePath the location of the IVF file this <tt>IVFFileReader</tt>
* will read.
*/
public IVFFileReader(String filePath)
{
header = new IVFHeader(filePath);
try
{
stream = new RandomAccessFile(filePath,"r");
stream.seek(IVF_HEADER_LENGTH);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
}
/**
* Get the header of the IVF file.
* @return the header of the IVF file represented by a <tt>IVFHeader</tt>.
*/
public IVFHeader getHeader()
{
return header;
}
/**
* Get the next vp8 frame of the IVF file as a <tt>byte</tt> array.
* A VP8Frame is allocated for each call to this function.
*
* @param loopFile if true and the end of the file is reached,
* this <tt>IVFFileReader</tt> will go back at the beginning of the file
* and start over the reading of the file.
* @return the next vp8 frame of the IVF file as a <tt>byte</tt> array.
* @throws IOException if an error occur during the read, of if EOF is reached.
*/
public VP8Frame getNextFrame(boolean loopFile) throws IOException
{
VP8Frame frame = new VP8Frame();
getNextFrame(frame, loopFile);
return frame;
}
/**
* Get the next vp8 frame of the IVF file as a <tt>byte</tt> array.
* You should use this function if you don't want to allocate a new VP8Frame
* for each call.
*
* @param frame the <tt>VP8Frame</tt> that will be filled with the
* next frame from the file.
* @param loopFile if true and the end of the file is reached,
* this <tt>IVFFileReader</tt> will go back at the beginning of the file
* and start over the reading of the file.
* @throws IOException if an error occur during the read, of if EOF is reached.
*/
public void getNextFrame(VP8Frame frame,boolean loopFile) throws IOException
{
if(loopFile && (stream.getFilePointer() >= stream.length()))
{
stream.seek(header.getHeaderLength());
}
byte[] data;
int frameSizeInBytes;
long timestamp;
frameSizeInBytes = changeEndianness(stream.readInt());
timestamp = changeEndianness(stream.readLong());
data = new byte[frameSizeInBytes];
stream.read(data);
frame.set(timestamp, frameSizeInBytes, data);
}
/**
* Change the endianness of a 32bits int.
* @param value the value which you want to change the endianness.
* @return the <tt>value</tt> with a changed endianness.
*/
public static int changeEndianness(int value)
{
return
(((value << 24) & 0xFF000000) |
((value << 8) & 0x00FF0000) |
((value >> 8) & 0x0000FF00) |
((value >> 24) & 0x000000FF));
}
/**
* Change the endianness of a 16bits short.
* @param value the value which you want to change the endianness.
* @return the <tt>value</tt> with a changed endianness
*/
public static short changeEndianness(short value)
{
return (short) (
((value << 8) & 0xFF00) |
((value >> 8) & 0x00FF) );
}
/**
* Change the endianness of a 64bits long.
* @param value the value which you want to change the endianness.
* @return the <tt>value</tt> with a changed endianness
*/
public static long changeEndianness(long value)
{
long b1 = (value >> 0) & 0xff;
long b2 = (value >> 8) & 0xff;
long b3 = (value >> 16) & 0xff;
long b4 = (value >> 24) & 0xff;
long b5 = (value >> 32) & 0xff;
long b6 = (value >> 40) & 0xff;
long b7 = (value >> 48) & 0xff;
long b8 = (value >> 56) & 0xff;
return b1 << 56 | b2 << 48 | b3 << 40 | b4 << 32 |
b5 << 24 | b6 << 16 | b7 << 8 | b8 << 0;
}
}
| apache-2.0 |
TheTypoMaster/struts-2.3.24 | src/plugins/plexus/src/main/java/org/apache/struts2/plexus/PlexusLifecycleListener.java | 4426 | /*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.plexus;
import java.util.Collections;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
import org.codehaus.plexus.DefaultPlexusContainer;
import org.codehaus.plexus.PlexusContainer;
import com.opensymphony.xwork2.util.logging.Logger;
import com.opensymphony.xwork2.util.logging.LoggerFactory;
/**
* Manages the Plexus lifecycle for the servlet and session contexts
*/
public class PlexusLifecycleListener implements ServletContextListener, HttpSessionListener {
private static final Logger LOG = LoggerFactory.getLogger(PlexusObjectFactory.class);
private static boolean loaded = false;
public static final String KEY = "struts.plexus.container";
/**
* @return Returns if the container is loaded.
*/
public static boolean isLoaded() {
return loaded;
}
/* (non-Javadoc)
* @see javax.servlet.ServletContextListener#contextInitialized(javax.servlet.ServletContextEvent)
*/
public void contextInitialized(ServletContextEvent servletContextEvent) {
loaded = true;
try {
PlexusContainer pc = new DefaultPlexusContainer();
PlexusUtils.configure(pc, "plexus-application.xml");
ServletContext ctx = servletContextEvent.getServletContext();
ctx.setAttribute(KEY, pc);
pc.initialize();
pc.start();
} catch (Exception e) {
LOG.error("Error initializing plexus container (scope: application)", e);
}
}
/* (non-Javadoc)
* @see javax.servlet.ServletContextListener#contextDestroyed(javax.servlet.ServletContextEvent)
*/
public void contextDestroyed(ServletContextEvent servletContextEvent) {
try {
ServletContext ctx = servletContextEvent.getServletContext();
PlexusContainer pc = (PlexusContainer) ctx.getAttribute(KEY);
pc.dispose();
} catch (Exception e) {
LOG.error("Error disposing plexus container (scope: application)", e);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpSessionListener#sessionCreated(javax.servlet.http.HttpSessionEvent)
*/
public void sessionCreated(HttpSessionEvent httpSessionEvent) {
try {
HttpSession session = httpSessionEvent.getSession();
ServletContext ctx = session.getServletContext();
PlexusContainer parent = (PlexusContainer) ctx.getAttribute(KEY);
PlexusContainer child = parent.createChildContainer("session", Collections.EMPTY_LIST, Collections.EMPTY_MAP);
session.setAttribute(KEY, child);
PlexusUtils.configure(child, "plexus-session.xml");
child.initialize();
child.start();
} catch (Exception e) {
LOG.error("Error initializing plexus container (scope: session)", e);
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpSessionListener#sessionDestroyed(javax.servlet.http.HttpSessionEvent)
*/
public void sessionDestroyed(HttpSessionEvent httpSessionEvent) {
try {
HttpSession session = httpSessionEvent.getSession();
PlexusContainer child = (PlexusContainer) session.getAttribute(KEY);
child.dispose();
} catch (Exception e) {
LOG.error("Error initializing plexus container (scope: session)", e);
}
}
}
| apache-2.0 |
katyrae/QuickTheories | core/src/test/java/org/quicktheories/generators/StringsDSLTest.java | 917 | package org.quicktheories.generators;
import static org.quicktheories.impl.GenAssert.assertThatGenerator;
import org.junit.Test;
import org.quicktheories.WithQuickTheories;
import org.quicktheories.core.Gen;
public class StringsDSLTest implements WithQuickTheories {
@Test
public void boundedLengthStringsRespectsLengthBounds() {
Gen<String> testee = strings().allPossible().ofLengthBetween(3, 200);
qt()
.withExamples(100000)
.forAll(testee)
.check( s -> s.length() <= 200 && s.length() >= 3);
}
@Test
public void boundedLengthStringsProducesDistinctValues() {
Gen<String> testee = strings().allPossible().ofLengthBetween(0, 100);
assertThatGenerator(testee).generatesAtLeastNDistinctValues(1000);
}
@Test
public void fixedLengthStringsAreFixedLength() {
qt()
.forAll(strings().allPossible().ofLength(100))
.check(s -> s.length() == 100);
}
}
| apache-2.0 |
ketan/gocd | server/src/test-shared/java/com/thoughtworks/go/server/perf/AgentPerformanceVerifier.java | 10248 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.perf;
import com.google.common.collect.Sets;
import com.thoughtworks.go.config.Agent;
import com.thoughtworks.go.server.perf.commands.*;
import com.thoughtworks.go.server.persistence.AgentDao;
import com.thoughtworks.go.server.service.AgentService;
import com.thoughtworks.go.server.service.EnvironmentConfigService;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.util.Csv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.stream.IntStream;
import static com.google.common.collect.Streams.stream;
/**
* A test component which helps in verifying multi-threaded scenarios related to agents
* Usage: add the following line as a test. Make sure that the service objects passed are autowired ones.
* new AgentPerformanceVerifier(agentService, agentDao, envConfigService, goConfigService, 10).verify();
*/
public class AgentPerformanceVerifier {
private static final Logger LOG = LoggerFactory.getLogger(AgentPerformanceVerifier.class);
private static final int DEFAULT_NO_OF_THREADS_TO_USE = 5;
private final GoConfigService goConfigService;
private int noOfThreadsToUse;
private AgentService agentService;
private final AgentDao agentDao;
private EnvironmentConfigService environmentConfigService;
public AgentPerformanceVerifier(AgentService agentService, AgentDao agentDao, EnvironmentConfigService environmentConfigService, GoConfigService goConfigService, int threadCount) {
this.agentService = agentService;
this.agentDao = agentDao;
this.environmentConfigService = environmentConfigService;
this.goConfigService = goConfigService;
this.noOfThreadsToUse = threadCount > 0 ? threadCount : DEFAULT_NO_OF_THREADS_TO_USE;
}
public void verify() {
ScheduledExecutorService execService = Executors.newScheduledThreadPool(noOfThreadsToUse);
Collection<Future<Optional<String>>> futures = new ArrayList<>(noOfThreadsToUse);
registerSpecifiedNumberOfAgents(execService, futures);
IntStream.iterate(0, i -> i + 1)
.limit(noOfThreadsToUse)
.forEach(val -> {
int nextInt = new Random().nextInt(val+1);
// UpdateAgentHostCommand updateAgentHostCmd = new UpdateAgentHostCommand(agentService);
UpdateAgentResourcesCommand updateAgentResourcesCmd = new UpdateAgentResourcesCommand(agentService);
UpdateAgentEnvironmentsCommand updateAgentEnvsCmd = new UpdateAgentEnvironmentsCommand(agentService);
UpdateAllAgentAttributesCommand updateAllAgentDetailsCmd = new UpdateAllAgentAttributesCommand(agentService);
DisableAgentCommand disableAgentCmd = new DisableAgentCommand(agentService);
// BulkUpdateAgentCommand bulkUpdateAgentCmd = new BulkUpdateAgentCommand(agentService, environmentConfigService);
CreateEnvironmentCommand createEnvironmentCommand = new CreateEnvironmentCommand(goConfigService, "e" + val);
DeleteEnvironmentCommand deleteEnvironmentCommand = new DeleteEnvironmentCommand(goConfigService, "e" + nextInt);
futures.add(execService.submit(updateAgentResourcesCmd));
futures.add(execService.submit(updateAgentEnvsCmd));
futures.add(execService.submit(updateAllAgentDetailsCmd));
futures.add(execService.submit(disableAgentCmd));
futures.add(execService.submit(createEnvironmentCommand));
futures.add(execService.submit(deleteEnvironmentCommand));
});
joinFutures(futures);
generateReport();
doAssertAgentAndItsAssociationInDBAndCache();
}
private void generateReport() {
Csv csv = new Csv();
LinkedBlockingQueue<AgentPerformanceCommand> queue = AgentPerformanceCommand.queue;
try {
while (!queue.isEmpty()) {
AgentPerformanceCommand commandExecuted = queue.take();
addRowToCsv(csv, commandExecuted.getResult());
}
String logDir = System.getProperty("gocd.server.log.dir", "logs");
Path reportFilePath = Files.write(Paths.get(logDir + "/agent-perf-result.csv"), csv.toString().getBytes(), StandardOpenOption.CREATE);
LOG.info("Report is available at {}", reportFilePath.toAbsolutePath());
} catch (InterruptedException e) {
LOG.error("Error while dequeuing", e);
} catch (IOException e) {
LOG.error("Error while appending to csv file", e);
}
}
private void addRowToCsv(Csv csv, AgentPerformanceCommandResult result) {
csv.newRow()
.put("command_name", result.getName())
.put("agents", result.getAgentUuids())
.put("status", result.getStatus())
.put("failure_message", result.getFailureMessage())
.put("time_taken_in_millis", String.valueOf(result.getTimeTakenInMillis()));
}
private void registerSpecifiedNumberOfAgents(ScheduledExecutorService execService, Collection<Future<Optional<String>>> futures) {
IntStream.iterate(0, i -> i + 1)
.limit(noOfThreadsToUse)
.forEach(val -> {
RegisterAgentCommand registerAgentCmd = new RegisterAgentCommand(agentService);
futures.add(execService.submit(registerAgentCmd));
});
}
private void doAssertAgentAndItsAssociationInDBAndCache() {
stream(agentService.getAgentInstances())
.filter(agentInstance -> agentInstance.getUuid().startsWith("Perf-Test-Agent-"))
.forEach(agentInstance -> {
Agent agentInCache = agentInstance.getAgent();
Agent agentInDB = agentDao.fetchAgentFromDBByUUID(agentInCache.getUuid());
if (agentInDB == null && !agentInstance.isPending()) {
LOG.debug("Agent {} is not pending but not present in DB", agentInCache.getUuid());
bombIfAgentInDBAndCacheAreDifferent(agentInCache, agentInDB);
}
Set<String> agentEnvsInEnvCache = environmentConfigService.getAgentEnvironmentNames(agentInCache.getUuid());
HashSet<String> agentEnvsInDB = new HashSet<>(agentInDB.getEnvironmentsAsList());
bombIfAgentEnvAssociationInDBAndEnvCacheAreDifferent(agentInCache, agentEnvsInDB, agentEnvsInEnvCache);
bombIfMissingAgentKnownEnvAssociationInEnvCache(agentInCache, agentEnvsInEnvCache, agentEnvsInDB);
});
LOG.debug("\n\n*************** Hurray! Verification of performance tests succeeded and there are no threading issues reported! ***************\n\n");
}
private void bombIfMissingAgentKnownEnvAssociationInEnvCache(Agent agentInCache, Set<String> agentEnvsInEnvCache, HashSet<String> agentEnvsInDB) {
Set<String> difference = Sets.difference(agentEnvsInDB, agentEnvsInEnvCache);
HashSet<String> knownEnvNames = new HashSet<>(environmentConfigService.getEnvironmentNames());
boolean containsOnlyUnknownEnvs = (difference.isEmpty() || !knownEnvNames.containsAll(difference));
if (!containsOnlyUnknownEnvs) {
LOG.error("Throwing RuntimeException as verification of agent environments {} in db and environments cache has failed. There are some agent environment associations in DB that does not exist in environment cache", agentInCache.getUuid());
throw new RuntimeException("WARNING : There is some threading issue found during agent performance test!!!");
}
}
private void bombIfAgentEnvAssociationInDBAndEnvCacheAreDifferent(Agent agentInCache, Set<String> agentEnvsInDB, Set<String> agentEnvsInEnvCache) {
if (!agentEnvsInDB.containsAll(agentEnvsInEnvCache)) {
LOG.error("Throwing RuntimeException as verification of agent environments {} in db and environments cache has failed", agentInCache.getUuid());
throw new RuntimeException("WARNING : There is some threading issue found during agent performance test!!!");
}
}
private void bombIfAgentInDBAndCacheAreDifferent(Agent agentInCache, Agent agentInDB) {
if (!agentInCache.equals(agentInDB)) {
LOG.error("Throwing RuntimeException as verification of agents {} in db and cache has failed.\nAgent in DB: {}\nAgent in cache: {}", agentInCache.getUuid(), agentInDB, agentInCache);
throw new RuntimeException("WARNING : There is some threading issue found during agent performance test!!!");
}
}
private void joinFutures(Collection<Future<Optional<String>>> futures) {
for (Future<Optional<String>> f : futures) {
try {
Optional<String> uuid = f.get();
uuid.orElseThrow(() -> new Exception("Operation Failed!"));
} catch (Exception e) {
LOG.error("############# Exception while performing some operation on performance agent...!!! #############", e);
}
}
}
}
| apache-2.0 |
signed/intellij-community | plugins/tasks/tasks-core/src/com/intellij/tasks/impl/LocalTaskImpl.java | 9766 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.impl;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.tasks.*;
import com.intellij.tasks.timeTracking.model.WorkItem;
import com.intellij.util.xmlb.annotations.*;
import icons.TasksIcons;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* @author Dmitry Avdeev
*/
@Tag("task")
@SuppressWarnings({"UnusedDeclaration"})
public class LocalTaskImpl extends LocalTask {
@NonNls public static final String DEFAULT_TASK_ID = "Default";
private String myId = "";
private String mySummary = "";
private String myDescription = null;
private Comment[] myComments = Comment.EMPTY_ARRAY;
private boolean myClosed = false;
private Date myCreated;
private Date myUpdated;
private TaskType myType = TaskType.OTHER;
private String myPresentableName;
private String myCustomIcon = null;
private String myProject = null;
private String myNumber = "";
private String myPresentableId = "";
private boolean myIssue = false;
private TaskRepository myRepository = null;
private String myIssueUrl = null;
private boolean myActive;
private List<ChangeListInfo> myChangeLists = new ArrayList<>();
private boolean myRunning = false;
private List<WorkItem> myWorkItems = new ArrayList<>();
private Date myLastPost;
private List<BranchInfo> myBranches = new ArrayList<>();
/** for serialization */
public LocalTaskImpl() {
}
public LocalTaskImpl(@NotNull String id, @NotNull String summary) {
myId = id;
mySummary = summary;
}
public LocalTaskImpl(Task origin) {
myId = origin.getId();
myIssue = origin.isIssue();
myRepository = origin.getRepository();
copy(origin);
if (origin instanceof LocalTaskImpl) {
myChangeLists = ((LocalTaskImpl)origin).getChangeLists();
myBranches = ((LocalTaskImpl)origin).getBranches();
myActive = ((LocalTaskImpl)origin).isActive();
myWorkItems = ((LocalTaskImpl)origin).getWorkItems();
myRunning = ((LocalTaskImpl)origin).isRunning();
myLastPost = ((LocalTaskImpl)origin).getLastPost();
}
}
@Attribute("id")
@NotNull
public String getId() {
return myId;
}
@Attribute("summary")
@NotNull
public String getSummary() {
return mySummary;
}
@Override
public String getDescription() {
return myDescription;
}
@NotNull
@Override
public Comment[] getComments() {
return myComments;
}
@Tag("updated")
public Date getUpdated() {
return myUpdated == null ? getCreated() : myUpdated;
}
@Tag("created")
public Date getCreated() {
if (myCreated == null) {
myCreated = new Date();
}
return myCreated;
}
@Attribute("active")
public boolean isActive() {
return myActive;
}
@Override
public void updateFromIssue(Task issue) {
copy(issue);
myIssue = true;
}
private void copy(Task issue) {
mySummary = issue.getSummary();
myDescription = issue.getDescription();
myComments = issue.getComments();
myClosed = issue.isClosed();
myCreated = issue.getCreated();
if (Comparing.compare(myUpdated, issue.getUpdated()) < 0) {
myUpdated = issue.getUpdated();
}
myType = issue.getType();
myPresentableName = issue.getPresentableName();
myCustomIcon = issue.getCustomIcon();
myIssueUrl = issue.getIssueUrl();
myRepository = issue.getRepository();
myProject = issue.getProject();
myNumber = issue.getNumber();
myPresentableId = issue.getPresentableId();
}
public void setId(String id) {
myId = id;
}
public void setSummary(String summary) {
mySummary = summary;
}
public void setActive(boolean active) {
myActive = active;
}
@Override
public boolean isIssue() {
return myIssue;
}
@Tag("url")
@Override
public String getIssueUrl() {
return myIssueUrl;
}
public String setIssueUrl(String url) {
return myIssueUrl = url;
}
public void setIssue(boolean issue) {
myIssue = issue;
}
@Transient
@Override
public TaskRepository getRepository() {
return myRepository;
}
public void setRepository(TaskRepository repository) {
myRepository = repository;
}
public void setCreated(Date created) {
myCreated = created;
}
public void setUpdated(Date updated) {
myUpdated = updated;
}
@NotNull
@Property(surroundWithTag = false)
@AbstractCollection(surroundWithTag = false, elementTag="changelist")
public List<ChangeListInfo> getChangeLists() {
return myChangeLists;
}
// for serialization
public void setChangeLists(List<ChangeListInfo> changeLists) {
myChangeLists = changeLists;
}
@Override
public void addChangelist(final ChangeListInfo info) {
if (!myChangeLists.contains(info)) {
myChangeLists.add(info);
}
}
@Override
public void removeChangelist(final ChangeListInfo info) {
myChangeLists.remove(info);
}
@NotNull
@Override
@Property(surroundWithTag = false)
@AbstractCollection(surroundWithTag = false, elementTag="branch")
public List<BranchInfo> getBranches() {
return myBranches;
}
public void setBranches(List<BranchInfo> branches) {
myBranches = branches;
}
@Override
public void addBranch(BranchInfo info) {
myBranches.add(info);
}
@Override
public void removeBranch(BranchInfo info) {
myBranches.add(info);
}
public boolean isClosed() {
return myClosed;
}
public void setClosed(boolean closed) {
myClosed = closed;
}
@NotNull
@Override
public Icon getIcon() {
final String customIcon = getCustomIcon();
if (customIcon != null) {
return IconLoader.getIcon(customIcon, LocalTask.class);
}
return getIconFromType(myType, isIssue());
}
public static Icon getIconFromType(TaskType type, boolean issue) {
switch (type) {
case BUG:
return TasksIcons.Bug;
case EXCEPTION:
return TasksIcons.Exception;
case FEATURE:
return TasksIcons.Feature;
default:
case OTHER:
return issue ? TasksIcons.Other : TasksIcons.Unknown;
}
}
@NotNull
@Override
public TaskType getType() {
return myType;
}
public void setType(TaskType type) {
myType = type == null ? TaskType.OTHER : type;
}
@Override
public boolean isDefault() {
return myId.equals(DEFAULT_TASK_ID);
}
@Override
public String getPresentableName() {
return myPresentableName != null ? myPresentableName : toString();
}
public String getCustomIcon() {
return myCustomIcon;
}
public long getTotalTimeSpent() {
long timeSpent = 0;
for (WorkItem item : myWorkItems) {
timeSpent += item.duration;
}
return timeSpent;
}
@Tag("running")
@Override
public boolean isRunning() {
return myRunning;
}
public void setRunning(final boolean running) {
myRunning = running;
}
@Override
public void setWorkItems(final List<WorkItem> workItems) {
myWorkItems = workItems;
}
@NotNull
@Property(surroundWithTag = false)
@AbstractCollection(surroundWithTag = false, elementTag="workItem")
@Override
public List<WorkItem> getWorkItems() {
return myWorkItems;
}
@Override
public void addWorkItem(final WorkItem workItem) {
myWorkItems.add(workItem);
}
@Tag("lastPost")
@Override
public Date getLastPost() {
return myLastPost;
}
@Override
public void setLastPost(final Date date) {
myLastPost = date;
}
@Override
public long getTimeSpentFromLastPost() {
long timeSpent = 0;
if (myLastPost != null) {
for (WorkItem item : myWorkItems) {
if (item.from.getTime() < myLastPost.getTime()) {
if (item.from.getTime() + item.duration > myLastPost.getTime()) {
timeSpent += item.from.getTime() + item.duration - myLastPost.getTime();
}
}
else {
timeSpent += item.duration;
}
}
}
else {
for (WorkItem item : myWorkItems) {
timeSpent += item.duration;
}
}
return timeSpent;
}
@NotNull
@Override
public String getNumber() {
// extract number from ID for compatibility
return StringUtil.isEmpty(myNumber) ? extractNumberFromId(myId) : myNumber;
}
public void setNumber(@NotNull String number) {
myNumber = number;
}
@Nullable
@Override
public String getProject() {
// extract project from ID for compatibility
return StringUtil.isEmpty(myProject) ? extractProjectFromId(myId) : myProject;
}
public void setProject(@Nullable String project) {
myProject = project;
}
public void setPresentableId(@NotNull String presentableId) {
myPresentableId = presentableId;
}
@NotNull
@Override
public String getPresentableId() {
// Use global ID for compatibility
return StringUtil.isEmpty(myPresentableId) ? getId() : myPresentableId;
}
}
| apache-2.0 |
rancherio/cattle | modules/caas/backend/src/main/java/io/cattle/platform/lifecycle/impl/SecretsLifecycleManagerImpl.java | 2132 | package io.cattle.platform.lifecycle.impl;
import io.cattle.platform.core.addon.SecretReference;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.dao.StorageDriverDao;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.StorageDriver;
import io.cattle.platform.core.model.Volume;
import io.cattle.platform.lifecycle.SecretsLifecycleManager;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.token.TokenService;
import io.cattle.platform.util.type.CollectionUtils;
import java.util.Date;
import java.util.List;
public class SecretsLifecycleManagerImpl implements SecretsLifecycleManager {
TokenService tokenService;
StorageDriverDao storageDriverDao;
public SecretsLifecycleManagerImpl(TokenService tokenService, StorageDriverDao storageDriverDao) {
this.tokenService = tokenService;
this.storageDriverDao = storageDriverDao;
}
@Override
public Object create(Instance instance) {
return setSecrets(instance);
}
@Override
public void persistCreate(Instance instance, Object obj) {
if (obj == null) {
return;
}
if (!(obj instanceof Volume)) {
throw new IllegalStateException("Invalid secrets object passed to persist");
}
storageDriverDao.assignSecretsVolume(instance, (Volume) obj);
}
protected Volume setSecrets(Instance instance) {
List<SecretReference> secrets = DataAccessor.fieldObjectList(instance, InstanceConstants.FIELD_SECRETS,
SecretReference.class);
if (secrets == null || secrets.isEmpty()) {
return null;
}
StorageDriver driver = storageDriverDao.findSecretsDriver(instance.getClusterId());
if (driver == null) {
return null;
}
String token = tokenService.generateToken(CollectionUtils.asMap("uuid", instance.getUuid()),
new Date(System.currentTimeMillis() + 31556926000L));
return storageDriverDao.createSecretsVolume(instance, driver, token);
}
}
| apache-2.0 |
routexl/graphhopper | core/src/main/java/com/graphhopper/reader/dem/CGIARProvider.java | 5636 | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.reader.dem;
import com.graphhopper.util.Helper;
import org.apache.xmlgraphics.image.codec.tiff.TIFFDecodeParam;
import org.apache.xmlgraphics.image.codec.tiff.TIFFImageDecoder;
import org.apache.xmlgraphics.image.codec.util.SeekableStream;
import java.awt.image.Raster;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* Elevation data from CGIAR project http://srtm.csi.cgiar.org/ 'PROCESSED SRTM DATA VERSION 4.1'.
* Every file covers a region of 5x5 degree. License granted for all people using GraphHopper:
* http://graphhopper.com/public/license/CGIAR.txt
* <p>
* Every zip contains readme.txt with the necessary information e.g.:
* <ol>
* <li>
* All GeoTiffs with 6000 x 6000 pixels.
* </li>
* </ol>
* <p>
*
* @author NopMap
* @author Peter Karich
*/
public class CGIARProvider extends AbstractTiffElevationProvider {
private final double invPrecision = 1 / precision;
public CGIARProvider() {
this("");
}
public CGIARProvider(String cacheDir) {
// Alternative URLs for the CGIAR data can be found in #346
super("https://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/TIFF/",
cacheDir.isEmpty() ? "/tmp/cgiar" : cacheDir,
"GraphHopper CGIARReader",
6000, 6000,
5, 5);
}
public static void main(String[] args) {
CGIARProvider provider = new CGIARProvider();
System.out.println(provider.getEle(46, -20));
// 337.0
System.out.println(provider.getEle(49.949784, 11.57517));
// 466.0
System.out.println(provider.getEle(49.968668, 11.575127));
// 455.0
System.out.println(provider.getEle(49.968682, 11.574842));
// 3134
System.out.println(provider.getEle(-22.532854, -65.110474));
// 120
System.out.println(provider.getEle(38.065392, -87.099609));
// 1615
System.out.println(provider.getEle(40, -105.2277023));
System.out.println(provider.getEle(39.99999999, -105.2277023));
System.out.println(provider.getEle(39.9999999, -105.2277023));
// 1616
System.out.println(provider.getEle(39.999999, -105.2277023));
// 0
System.out.println(provider.getEle(29.840644, -42.890625));
// 841
System.out.println(provider.getEle(48.469123, 9.576393));
}
@Override
Raster generateRasterFromFile(File file, String tifName) {
SeekableStream ss = null;
try {
InputStream is = new FileInputStream(file);
ZipInputStream zis = new ZipInputStream(is);
// find tif file in zip
ZipEntry entry = zis.getNextEntry();
while (entry != null && !entry.getName().equals(tifName)) {
entry = zis.getNextEntry();
}
ss = SeekableStream.wrapInputStream(zis, true);
TIFFImageDecoder imageDecoder = new TIFFImageDecoder(ss, new TIFFDecodeParam());
return imageDecoder.decodeAsRaster();
} catch (Exception e) {
throw new RuntimeException("Can't decode " + tifName, e);
} finally {
if (ss != null)
Helper.close(ss);
}
}
int down(double val) {
// 'rounding' to closest 5
int intVal = (int) (val / LAT_DEGREE) * LAT_DEGREE;
if (!(val >= 0 || intVal - val < invPrecision))
intVal = intVal - LAT_DEGREE;
return intVal;
}
@Override
boolean isOutsideSupportedArea(double lat, double lon) {
return lat >= 60 || lat <= -56;
}
protected String getFileName(double lat, double lon) {
lon = 1 + (180 + lon) / LAT_DEGREE;
int lonInt = (int) lon;
lat = 1 + (60 - lat) / LAT_DEGREE;
int latInt = (int) lat;
if (Math.abs(latInt - lat) < invPrecision / LAT_DEGREE)
latInt--;
// replace String.format as it seems to be slow
// String.format("srtm_%02d_%02d", lonInt, latInt);
String str = "srtm_";
str += lonInt < 10 ? "0" : "";
str += lonInt;
str += latInt < 10 ? "_0" : "_";
str += latInt;
return str;
}
@Override
int getMinLatForTile(double lat) {
return down(lat);
}
@Override
int getMinLonForTile(double lon) {
return down(lon);
}
@Override
String getDownloadURL(double lat, double lon) {
return baseUrl + "/" + getFileName(lat, lon) + ".zip";
}
@Override
String getFileNameOfLocalFile(double lat, double lon) {
return getDownloadURL(lat, lon);
}
@Override
public String toString() {
return "cgiar";
}
}
| apache-2.0 |
Gasol/commons-io2 | src/test/java/org/apache/commons/io/comparator/DefaultFileComparatorTest.java | 1682 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io.comparator;
import java.io.File;
/**
* Test case for {@link DefaultFileComparator}.
*/
public class DefaultFileComparatorTest extends ComparatorAbstractTestCase {
/**
* Construct a new test case with the specified name.
*
* @param name Name of the test
*/
public DefaultFileComparatorTest(String name) {
super(name);
}
/** @see junit.framework.TestCase#setUp() */
@Override
protected void setUp() throws Exception {
super.setUp();
comparator = (AbstractFileComparator)DefaultFileComparator.DEFAULT_COMPARATOR;
reverse = DefaultFileComparator.DEFAULT_REVERSE;
equalFile1 = new File("foo");
equalFile2 = new File("foo");
lessFile = new File("abc");
moreFile = new File("xyz");
}
}
| apache-2.0 |
dumitru-petrusca/gosu-lang | gosu-core-api/src/main/java/gw/lang/ir/Internal.java | 648 | package gw.lang.ir;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The compiler adds this to fields and methods that are explicitly assigned
* 'internal' accessibility because private members may be compiled with internal
* accessibility for the sake of inner class access. Basically this annotation
* serves to distinguish between what is explicitly static and what is not,
* primarily for tooling.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD, ElementType.METHOD})
public @interface Internal {
}
| apache-2.0 |
AndroidX/androidx | media2/media2-session/version-compat-tests/current/client/src/androidTest/java/androidx/media2/test/client/tests/MediaBrowserCallbackTest.java | 21720 | /*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media2.test.client.tests;
import static androidx.media2.session.LibraryResult.RESULT_ERROR_BAD_VALUE;
import static androidx.media2.session.LibraryResult.RESULT_SUCCESS;
import static androidx.media2.test.common.CommonConstants.MOCK_MEDIA2_LIBRARY_SERVICE;
import static androidx.media2.test.common.MediaBrowserConstants.CUSTOM_ACTION_ASSERT_PARAMS;
import static androidx.media2.test.common.MediaBrowserConstants.LONG_LIST_COUNT;
import static androidx.media2.test.common.MediaBrowserConstants.NOTIFY_CHILDREN_CHANGED_EXTRAS;
import static androidx.media2.test.common.MediaBrowserConstants.NOTIFY_CHILDREN_CHANGED_ITEM_COUNT;
import static androidx.media2.test.common.MediaBrowserConstants.ROOT_EXTRAS;
import static androidx.media2.test.common.MediaBrowserConstants.ROOT_ID;
import static androidx.media2.test.common.MediaBrowserConstants.SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ALL;
import static androidx.media2.test.common.MediaBrowserConstants.SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ALL_WITH_NON_SUBSCRIBED_ID;
import static androidx.media2.test.common.MediaBrowserConstants.SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ONE;
import static androidx.media2.test.common.MediaBrowserConstants.SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ONE_WITH_NON_SUBSCRIBED_ID;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertNull;
import static junit.framework.Assert.assertTrue;
import static junit.framework.Assert.fail;
import static org.junit.Assert.assertNotEquals;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.media2.common.MediaItem;
import androidx.media2.common.MediaMetadata;
import androidx.media2.session.LibraryResult;
import androidx.media2.session.MediaBrowser;
import androidx.media2.session.MediaBrowser.BrowserCallback;
import androidx.media2.session.MediaController;
import androidx.media2.session.MediaLibraryService.LibraryParams;
import androidx.media2.session.SessionCommand;
import androidx.media2.session.SessionResult;
import androidx.media2.session.SessionToken;
import androidx.media2.test.client.MediaTestUtils;
import androidx.media2.test.common.MediaBrowserConstants;
import androidx.media2.test.common.TestUtils;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.FlakyTest;
import androidx.test.filters.LargeTest;
import androidx.versionedparcelable.ParcelUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
/**
* Tests {@link MediaBrowser.BrowserCallback}.
* <p>
* This test inherits {@link MediaControllerCallbackTest} to ensure that inherited APIs from
* {@link MediaController} works cleanly.
*/
// TODO: (internal cleanup) Move tests that aren't related with callbacks.
@FlakyTest(bugId = 202942942)
@RunWith(AndroidJUnit4.class)
@LargeTest
public class MediaBrowserCallbackTest extends MediaControllerCallbackTest {
private static final String TAG = "MediaBrowserCallbackTest";
@Override
MediaController onCreateController(@NonNull final SessionToken token,
@Nullable final Bundle connectionHints, @Nullable final TestBrowserCallback callback)
throws InterruptedException {
assertNotNull("Test bug", token);
final AtomicReference<MediaController> controller = new AtomicReference<>();
sHandler.postAndSync(new Runnable() {
@Override
public void run() {
// Create controller on the test handler, for changing MediaBrowserCompat's Handler
// Looper. Otherwise, MediaBrowserCompat will post all the commands to the handler
// and commands wouldn't be run if tests codes waits on the test handler.
MediaBrowser.Builder builder = new MediaBrowser.Builder(mContext)
.setSessionToken(token)
.setControllerCallback(sHandlerExecutor, callback);
if (connectionHints != null) {
builder.setConnectionHints(connectionHints);
}
controller.set(builder.build());
}
});
return controller.get();
}
final MediaBrowser createBrowser() throws InterruptedException {
return createBrowser(null, null);
}
final MediaBrowser createBrowser(@Nullable Bundle connectionHints,
@Nullable BrowserCallback callback) throws InterruptedException {
final SessionToken token = new SessionToken(mContext, MOCK_MEDIA2_LIBRARY_SERVICE);
return (MediaBrowser) createController(token, true, connectionHints, callback);
}
@Test
public void getLibraryRoot() throws Exception {
final LibraryParams params = new LibraryParams.Builder()
.setOffline(true).setRecent(true).setExtras(new Bundle()).build();
MediaBrowser browser = createBrowser();
setExpectedLibraryParam(browser, params);
LibraryResult result = browser.getLibraryRoot(params)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
MediaMetadata metadata = result.getMediaItem().getMetadata();
assertEquals(ROOT_ID, metadata.getString(MediaMetadata.METADATA_KEY_MEDIA_ID));
assertTrue(TestUtils.equals(ROOT_EXTRAS, result.getLibraryParams().getExtras()));
}
@Test
public void getItem() throws Exception {
final String mediaId = MediaBrowserConstants.MEDIA_ID_GET_ITEM;
LibraryResult result = createBrowser().getItem(mediaId)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
MediaTestUtils.assertMediaItemHasId(result.getMediaItem(), mediaId);
}
@Test
public void getItem_unknownId() throws Exception {
final String mediaId = "random_media_id";
LibraryResult result = createBrowser().getItem(mediaId)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_ERROR_BAD_VALUE, result.getResultCode());
assertNull(result.getMediaItem());
}
@Test
public void getItem_nullResult() throws Exception {
final String mediaId = MediaBrowserConstants.MEDIA_ID_GET_NULL_ITEM;
try {
LibraryResult result = createBrowser().getItem(mediaId)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertNotEquals(RESULT_SUCCESS, result.getResultCode());
} catch (TimeoutException e) {
// May happen.
}
}
@Test
public void getItem_invalidResult() throws Exception {
final String mediaId = MediaBrowserConstants.MEDIA_ID_GET_INVALID_ITEM;
try {
LibraryResult result = createBrowser().getItem(mediaId)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertNotEquals(RESULT_SUCCESS, result.getResultCode());
} catch (TimeoutException e) {
// May happen.
}
}
@Test
public void getChildren() throws Exception {
final String parentId = MediaBrowserConstants.PARENT_ID;
final int page = 4;
final int pageSize = 10;
final LibraryParams params = MediaTestUtils.createLibraryParams();
MediaBrowser browser = createBrowser();
setExpectedLibraryParam(browser, params);
LibraryResult result = browser.getChildren(parentId, page, pageSize, params)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
assertNull(result.getLibraryParams());
MediaTestUtils.assertPaginatedListHasIds(
result.getMediaItems(), MediaBrowserConstants.GET_CHILDREN_RESULT,
page, pageSize);
}
@Test
@LargeTest
public void getChildren_withLongList() throws Exception {
final String parentId = MediaBrowserConstants.PARENT_ID_LONG_LIST;
final int page = 0;
final int pageSize = Integer.MAX_VALUE;
final LibraryParams params = MediaTestUtils.createLibraryParams();
MediaBrowser browser = createBrowser();
setExpectedLibraryParam(browser, params);
LibraryResult result = browser.getChildren(parentId, page, pageSize, params)
.get(10, TimeUnit.SECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
assertNull(result.getLibraryParams());
List<MediaItem> list = result.getMediaItems();
assertEquals(LONG_LIST_COUNT, list.size());
for (int i = 0; i < result.getMediaItems().size(); i++) {
assertEquals(TestUtils.getMediaIdInFakeList(i), list.get(i).getMediaId());
}
}
@Test
public void getChildren_emptyResult() throws Exception {
final String parentId = MediaBrowserConstants.PARENT_ID_NO_CHILDREN;
MediaBrowser browser = createBrowser();
LibraryResult result = browser.getChildren(parentId, 1, 1, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
assertEquals(0, result.getMediaItems().size());
}
@Test
public void getChildren_nullResult() throws Exception {
final String parentId = MediaBrowserConstants.PARENT_ID_ERROR;
MediaBrowser browser = createBrowser();
LibraryResult result = browser.getChildren(parentId, 1, 1, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertNotEquals(RESULT_SUCCESS, result.getResultCode());
assertNull(result.getMediaItems());
}
@Test
public void searchCallbacks() throws Exception {
final String query = MediaBrowserConstants.SEARCH_QUERY;
final int page = 4;
final int pageSize = 10;
final LibraryParams testParams = MediaTestUtils.createLibraryParams();
final CountDownLatch latchForSearch = new CountDownLatch(1);
final BrowserCallback callback = new BrowserCallback() {
@Override
public void onSearchResultChanged(MediaBrowser browser,
String queryOut, int itemCount, LibraryParams params) {
assertEquals(query, queryOut);
MediaTestUtils.assertLibraryParamsEquals(testParams, params);
assertEquals(MediaBrowserConstants.SEARCH_RESULT_COUNT, itemCount);
latchForSearch.countDown();
}
};
// Request the search.
MediaBrowser browser = createBrowser(null, callback);
setExpectedLibraryParam(browser, testParams);
LibraryResult result = browser.search(query, testParams)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
// Get the search result.
result = browser.getSearchResult(query, page, pageSize, testParams)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
MediaTestUtils.assertPaginatedListHasIds(result.getMediaItems(),
MediaBrowserConstants.SEARCH_RESULT, page, pageSize);
}
@Test
@LargeTest
public void searchCallbacks_withLongList() throws Exception {
final String query = MediaBrowserConstants.SEARCH_QUERY_LONG_LIST;
final int page = 0;
final int pageSize = Integer.MAX_VALUE;
final LibraryParams testParams = MediaTestUtils.createLibraryParams();
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback callback = new BrowserCallback() {
@Override
public void onSearchResultChanged(
MediaBrowser browser, String queryOut, int itemCount, LibraryParams params) {
assertEquals(query, queryOut);
MediaTestUtils.assertLibraryParamsEquals(testParams, params);
assertEquals(MediaBrowserConstants.LONG_LIST_COUNT, itemCount);
latch.countDown();
}
};
MediaBrowser browser = createBrowser(null, callback);
setExpectedLibraryParam(browser, testParams);
LibraryResult result = browser.search(query, testParams)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
result = browser.getSearchResult(query, page, pageSize, testParams)
.get(10, TimeUnit.SECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
List<MediaItem> list = result.getMediaItems();
for (int i = 0; i < list.size(); i++) {
assertEquals(TestUtils.getMediaIdInFakeList(i), list.get(i).getMediaId());
}
}
@Test
@LargeTest
public void onSearchResultChanged_searchTakesTime() throws Exception {
final String query = MediaBrowserConstants.SEARCH_QUERY_TAKES_TIME;
final LibraryParams testParams = MediaTestUtils.createLibraryParams();
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback callback = new BrowserCallback() {
@Override
public void onSearchResultChanged(
MediaBrowser browser, String queryOut, int itemCount, LibraryParams params) {
assertEquals(query, queryOut);
MediaTestUtils.assertLibraryParamsEquals(testParams, params);
assertEquals(MediaBrowserConstants.SEARCH_RESULT_COUNT, itemCount);
latch.countDown();
}
};
MediaBrowser browser = createBrowser(null, callback);
setExpectedLibraryParam(browser, testParams);
LibraryResult result = browser.search(query, testParams)
.get(MediaBrowserConstants.SEARCH_TIME_IN_MS + TIMEOUT_MS,
TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
}
@Test
public void onSearchResultChanged_emptyResult() throws Exception {
final String query = MediaBrowserConstants.SEARCH_QUERY_EMPTY_RESULT;
final LibraryParams testParams = MediaTestUtils.createLibraryParams();
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback callback = new BrowserCallback() {
@Override
public void onSearchResultChanged(
MediaBrowser browser, String queryOut, int itemCount, LibraryParams params) {
assertEquals(query, queryOut);
MediaTestUtils.assertLibraryParamsEquals(testParams, params);
assertEquals(0, itemCount);
latch.countDown();
}
};
MediaBrowser browser = createBrowser(null, callback);
setExpectedLibraryParam(browser, testParams);
LibraryResult result = browser.search(query, testParams)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
}
@Test
public void onChildrenChanged_calledWhenSubscribed() throws Exception {
// This test uses MediaLibrarySession.notifyChildrenChanged().
final String expectedParentId = SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ALL;
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback controllerCallbackProxy = new BrowserCallback() {
@Override
public void onChildrenChanged(MediaBrowser browser, String parentId,
int itemCount, LibraryParams params) {
assertEquals(expectedParentId, parentId);
assertEquals(NOTIFY_CHILDREN_CHANGED_ITEM_COUNT, itemCount);
MediaTestUtils.assertLibraryParamsEquals(params, NOTIFY_CHILDREN_CHANGED_EXTRAS);
latch.countDown();
}
};
LibraryResult result = createBrowser(null, controllerCallbackProxy)
.subscribe(expectedParentId, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
// The MediaLibrarySession in MockMediaLibraryService is supposed to call
// notifyChildrenChanged() in its callback onSubscribe().
assertTrue(latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
}
@Test
public void onChildrenChanged_calledWhenSubscribed2() throws Exception {
// This test uses MediaLibrarySession.notifyChildrenChanged(ControllerInfo).
final String expectedParentId = SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ONE;
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback controllerCallbackProxy = new BrowserCallback() {
@Override
public void onChildrenChanged(MediaBrowser browser, String parentId,
int itemCount, LibraryParams params) {
assertEquals(expectedParentId, parentId);
assertEquals(NOTIFY_CHILDREN_CHANGED_ITEM_COUNT, itemCount);
MediaTestUtils.assertLibraryParamsEquals(params, NOTIFY_CHILDREN_CHANGED_EXTRAS);
latch.countDown();
}
};
LibraryResult result = createBrowser(null, controllerCallbackProxy)
.subscribe(expectedParentId, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
// The MediaLibrarySession in MockMediaLibraryService is supposed to call
// notifyChildrenChanged(ControllerInfo) in its callback onSubscribe().
assertTrue(latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
}
@Test
public void onChildrenChanged_notCalledWhenNotSubscribed() throws Exception {
// This test uses MediaLibrarySession.notifyChildrenChanged().
final String subscribedMediaId =
SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ALL_WITH_NON_SUBSCRIBED_ID;
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback controllerCallbackProxy = new BrowserCallback() {
@Override
public void onChildrenChanged(MediaBrowser browser, String parentId,
int itemCount, LibraryParams params) {
// Unexpected call.
fail();
latch.countDown();
}
};
LibraryResult result = createBrowser(null, controllerCallbackProxy)
.subscribe(subscribedMediaId, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
// The MediaLibrarySession in MockMediaLibraryService is supposed to call
// notifyChildrenChanged() in its callback onSubscribe(), but with a different media ID.
// Therefore, onChildrenChanged() should not be called.
assertFalse(latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
}
@Test
public void onChildrenChanged_notCalledWhenNotSubscribed2() throws Exception {
// This test uses MediaLibrarySession.notifyChildrenChanged(ControllerInfo).
final String subscribedMediaId =
SUBSCRIBE_ID_NOTIFY_CHILDREN_CHANGED_TO_ONE_WITH_NON_SUBSCRIBED_ID;
final CountDownLatch latch = new CountDownLatch(1);
final BrowserCallback controllerCallbackProxy = new BrowserCallback() {
@Override
public void onChildrenChanged(MediaBrowser browser, String parentId,
int itemCount, LibraryParams params) {
// Unexpected call.
fail();
latch.countDown();
}
};
LibraryResult result = createBrowser(null, controllerCallbackProxy)
.subscribe(subscribedMediaId, null)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(RESULT_SUCCESS, result.getResultCode());
// The MediaLibrarySession in MockMediaLibraryService is supposed to call
// notifyChildrenChanged(ControllerInfo) in its callback onSubscribe(),
// but with a different media ID.
// Therefore, onChildrenChanged() should not be called.
assertFalse(latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
}
private void setExpectedLibraryParam(MediaBrowser browser, LibraryParams params)
throws Exception {
SessionCommand command = new SessionCommand(CUSTOM_ACTION_ASSERT_PARAMS, null);
Bundle args = new Bundle();
ParcelUtils.putVersionedParcelable(args, CUSTOM_ACTION_ASSERT_PARAMS, params);
SessionResult result = browser.sendCustomCommand(command, args)
.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertEquals(SessionResult.RESULT_SUCCESS, result.getResultCode());
}
}
| apache-2.0 |
mxm/incubator-beam | sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/metrics/MetricsReader.java | 5551 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.testutils.metrics;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.stream.StreamSupport;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.metrics.DistributionResult;
import org.apache.beam.sdk.metrics.MetricNameFilter;
import org.apache.beam.sdk.metrics.MetricQueryResults;
import org.apache.beam.sdk.metrics.MetricResult;
import org.apache.beam.sdk.metrics.MetricsFilter;
import org.joda.time.Duration;
import org.slf4j.LoggerFactory;
/** Provides methods for querying metrics from {@link PipelineResult} per namespace. */
public class MetricsReader {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(MetricsReader.class);
private static final long ERRONEOUS_METRIC_VALUE = -1;
private final PipelineResult result;
private final String namespace;
private final long now;
@VisibleForTesting
MetricsReader(PipelineResult result, String namespace, long now) {
this.result = result;
this.namespace = namespace;
this.now = now;
}
public MetricsReader(PipelineResult result, String namespace) {
this(result, namespace, System.currentTimeMillis());
}
/**
* Return the current value for a long counter, or -1 if can't be retrieved. Note this uses only
* attempted metrics because some runners don't support committed metrics.
*/
public long getCounterMetric(String name) {
MetricQueryResults metrics =
result
.metrics()
.queryMetrics(
MetricsFilter.builder()
.addNameFilter(MetricNameFilter.named(namespace, name))
.build());
Iterable<MetricResult<Long>> counters = metrics.getCounters();
checkIfMetricResultIsUnique(name, counters);
try {
MetricResult<Long> metricResult = counters.iterator().next();
return metricResult.getAttempted();
} catch (NoSuchElementException e) {
LOG.error("Failed to get metric {}, from namespace {}", name, namespace);
}
return ERRONEOUS_METRIC_VALUE;
}
/**
* Return start time metric by counting the difference between "now" and min value from a
* distribution metric.
*/
public long getStartTimeMetric(String name) {
Iterable<MetricResult<DistributionResult>> timeDistributions = getDistributions(name);
return getLowestMin(timeDistributions);
}
private Long getLowestMin(Iterable<MetricResult<DistributionResult>> distributions) {
Optional<Long> lowestMin =
StreamSupport.stream(distributions.spliterator(), true)
.map(element -> element.getAttempted().getMin())
.filter(this::isCredible)
.min(Long::compareTo);
return lowestMin.orElse(ERRONEOUS_METRIC_VALUE);
}
/**
* Return end time metric by counting the difference between "now" and MAX value from a
* distribution metric.
*/
public long getEndTimeMetric(String name) {
Iterable<MetricResult<DistributionResult>> timeDistributions = getDistributions(name);
return getGreatestMax(timeDistributions);
}
private Long getGreatestMax(Iterable<MetricResult<DistributionResult>> distributions) {
Optional<Long> greatestMax =
StreamSupport.stream(distributions.spliterator(), true)
.map(element -> element.getAttempted().getMax())
.filter(this::isCredible)
.max(Long::compareTo);
return greatestMax.orElse(ERRONEOUS_METRIC_VALUE);
}
private Iterable<MetricResult<DistributionResult>> getDistributions(String name) {
MetricQueryResults metrics =
result
.metrics()
.queryMetrics(
MetricsFilter.builder()
.addNameFilter(MetricNameFilter.named(namespace, name))
.build());
return metrics.getDistributions();
}
private <T> void checkIfMetricResultIsUnique(String name, Iterable<MetricResult<T>> metricResult)
throws IllegalStateException {
int resultCount = Iterables.size(metricResult);
Preconditions.checkState(
resultCount <= 1,
"More than one metric result matches name: %s in namespace %s. Metric results count: %s",
name,
namespace,
resultCount);
}
/**
* timestamp metrics are used to monitor time of execution of transforms. If result timestamp
* metric is too far from now, consider that metric is erroneous private boolean isCredible(long
* value) {
*/
private boolean isCredible(long value) {
return (Math.abs(value - now) <= Duration.standardDays(10000).getMillis());
}
}
| apache-2.0 |
mhurne/aws-sdk-java | aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/DeleteClusterRequestMarshaller.java | 2362 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.redshift.model.transform;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.internal.ListWithAutoConstructFlag;
import com.amazonaws.services.redshift.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* Delete Cluster Request Marshaller
*/
public class DeleteClusterRequestMarshaller implements Marshaller<Request<DeleteClusterRequest>, DeleteClusterRequest> {
public Request<DeleteClusterRequest> marshall(DeleteClusterRequest deleteClusterRequest) {
if (deleteClusterRequest == null) {
throw new AmazonClientException("Invalid argument passed to marshall(...)");
}
Request<DeleteClusterRequest> request = new DefaultRequest<DeleteClusterRequest>(deleteClusterRequest, "AmazonRedshift");
request.addParameter("Action", "DeleteCluster");
request.addParameter("Version", "2012-12-01");
if (deleteClusterRequest.getClusterIdentifier() != null) {
request.addParameter("ClusterIdentifier", StringUtils.fromString(deleteClusterRequest.getClusterIdentifier()));
}
if (deleteClusterRequest.isSkipFinalClusterSnapshot() != null) {
request.addParameter("SkipFinalClusterSnapshot", StringUtils.fromBoolean(deleteClusterRequest.isSkipFinalClusterSnapshot()));
}
if (deleteClusterRequest.getFinalClusterSnapshotIdentifier() != null) {
request.addParameter("FinalClusterSnapshotIdentifier", StringUtils.fromString(deleteClusterRequest.getFinalClusterSnapshotIdentifier()));
}
return request;
}
}
| apache-2.0 |
noobyang/AndroidStudy | opencv/src/main/java/org/opencv/imgproc/LineSegmentDetector.java | 4330 | //
// This file is auto-generated. Please don't modify it!
//
package org.opencv.imgproc;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
import org.opencv.core.Size;
// C++: class LineSegmentDetector
//javadoc: LineSegmentDetector
public class LineSegmentDetector extends Algorithm {
protected LineSegmentDetector(long addr) { super(addr); }
// internal usage only
public static LineSegmentDetector __fromPtr__(long addr) { return new LineSegmentDetector(addr); }
//
// C++: int cv::LineSegmentDetector::compareSegments(Size size, Mat lines1, Mat lines2, Mat& _image = Mat())
//
//javadoc: LineSegmentDetector::compareSegments(size, lines1, lines2, _image)
public int compareSegments(Size size, Mat lines1, Mat lines2, Mat _image)
{
int retVal = compareSegments_0(nativeObj, size.width, size.height, lines1.nativeObj, lines2.nativeObj, _image.nativeObj);
return retVal;
}
//javadoc: LineSegmentDetector::compareSegments(size, lines1, lines2)
public int compareSegments(Size size, Mat lines1, Mat lines2)
{
int retVal = compareSegments_1(nativeObj, size.width, size.height, lines1.nativeObj, lines2.nativeObj);
return retVal;
}
//
// C++: void cv::LineSegmentDetector::detect(Mat _image, Mat& _lines, Mat& width = Mat(), Mat& prec = Mat(), Mat& nfa = Mat())
//
//javadoc: LineSegmentDetector::detect(_image, _lines, width, prec, nfa)
public void detect(Mat _image, Mat _lines, Mat width, Mat prec, Mat nfa)
{
detect_0(nativeObj, _image.nativeObj, _lines.nativeObj, width.nativeObj, prec.nativeObj, nfa.nativeObj);
return;
}
//javadoc: LineSegmentDetector::detect(_image, _lines, width, prec)
public void detect(Mat _image, Mat _lines, Mat width, Mat prec)
{
detect_1(nativeObj, _image.nativeObj, _lines.nativeObj, width.nativeObj, prec.nativeObj);
return;
}
//javadoc: LineSegmentDetector::detect(_image, _lines, width)
public void detect(Mat _image, Mat _lines, Mat width)
{
detect_2(nativeObj, _image.nativeObj, _lines.nativeObj, width.nativeObj);
return;
}
//javadoc: LineSegmentDetector::detect(_image, _lines)
public void detect(Mat _image, Mat _lines)
{
detect_3(nativeObj, _image.nativeObj, _lines.nativeObj);
return;
}
//
// C++: void cv::LineSegmentDetector::drawSegments(Mat& _image, Mat lines)
//
//javadoc: LineSegmentDetector::drawSegments(_image, lines)
public void drawSegments(Mat _image, Mat lines)
{
drawSegments_0(nativeObj, _image.nativeObj, lines.nativeObj);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: int cv::LineSegmentDetector::compareSegments(Size size, Mat lines1, Mat lines2, Mat& _image = Mat())
private static native int compareSegments_0(long nativeObj, double size_width, double size_height, long lines1_nativeObj, long lines2_nativeObj, long _image_nativeObj);
private static native int compareSegments_1(long nativeObj, double size_width, double size_height, long lines1_nativeObj, long lines2_nativeObj);
// C++: void cv::LineSegmentDetector::detect(Mat _image, Mat& _lines, Mat& width = Mat(), Mat& prec = Mat(), Mat& nfa = Mat())
private static native void detect_0(long nativeObj, long _image_nativeObj, long _lines_nativeObj, long width_nativeObj, long prec_nativeObj, long nfa_nativeObj);
private static native void detect_1(long nativeObj, long _image_nativeObj, long _lines_nativeObj, long width_nativeObj, long prec_nativeObj);
private static native void detect_2(long nativeObj, long _image_nativeObj, long _lines_nativeObj, long width_nativeObj);
private static native void detect_3(long nativeObj, long _image_nativeObj, long _lines_nativeObj);
// C++: void cv::LineSegmentDetector::drawSegments(Mat& _image, Mat lines)
private static native void drawSegments_0(long nativeObj, long _image_nativeObj, long lines_nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}
| apache-2.0 |
deepu105/generator-jhipster | generators/server/templates/src/test/java/package/config/_WebConfigurerTestController.java | 1101 | <%#
Copyright 2013-2017 the original author or authors from the JHipster project.
This file is part of the JHipster project, see http://www.jhipster.tech/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
package <%= packageName %>.config;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class WebConfigurerTestController {
@GetMapping("/api/test-cors")
public void testCorsOnApiPath() {
}
@GetMapping("/test/test-cors")
public void testCorsOnOtherPath() {
}
}
| apache-2.0 |
apache/zest-qi4j | libraries/sql-liquibase/src/main/java/org/apache/polygene/library/sql/liquibase/LiquibaseAssembler.java | 1944 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.sql.liquibase;
import org.apache.polygene.bootstrap.Assemblers;
import org.apache.polygene.bootstrap.ModuleAssembly;
import org.apache.polygene.bootstrap.ServiceDeclaration;
public class LiquibaseAssembler
extends Assemblers.VisibilityIdentityConfig<LiquibaseAssembler>
{
private boolean applyChangelogOnStartup;
@Override
public void assemble( ModuleAssembly module )
{
super.assemble( module );
ServiceDeclaration service = module.services( LiquibaseService.class ).visibleIn( visibility() );
if( applyChangelogOnStartup )
{
service.withActivators( LiquibaseService.ApplyChangelogActivator.class ).instantiateOnStartup();
}
if( hasIdentity() )
{
service.identifiedBy( identity() );
}
if( hasConfig() )
{
configModule().entities( LiquibaseConfiguration.class ).visibleIn( configVisibility() );
}
}
public LiquibaseAssembler applyChangelogOnStartup()
{
applyChangelogOnStartup = true;
return this;
}
}
| apache-2.0 |
ChrisA89/assertj-core | src/test/java/org/assertj/core/test/EqualsHashCodeContractTestCase.java | 2559 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.test;
/**
* Test case that provides the contract for verification that an object's {@code equals} and {@code hashCode} are
* implemented correctly.
*
* @author Alex Ruiz
*/
public interface EqualsHashCodeContractTestCase {
void should_not_be_equal_to_Object_of_different_type();
/**
* If two objects are equal, they must remain equal as long as they are not modified.
*/
void equals_should_be_consistent();
/**
* The object must be equal to itself, which it would be at any given instance; unless you intentionally override the
* equals method to behave otherwise.
*/
void equals_should_be_reflexive();
/**
* If object of one class is equal to another class object, the other class object must be equal to this class object.
* In other words, one object can not unilaterally decide whether it is equal to another object; two objects, and
* consequently the classes to which they belong, must bilaterally decide if they are equal or not. They BOTH must
* agree.
*/
void equals_should_be_symmetric();
/**
* If the first object is equal to the second object and the second object is equal to the third object; then the
* first object is equal to the third object. In other words, if two objects agree that they are equal, and follow the
* symmetry principle, one of them can not decide to have a similar contract with another object of different class.
* All three must agree and follow symmetry principle for various permutations of these three classes.
*/
void equals_should_be_transitive();
/**
* If two objects are equal, then they must have the same hash code, however the opposite is NOT true.
*/
void should_maintain_equals_and_hashCode_contract();
/**
* Verifies that the implementation of the method {@code equals} returns {@code false} if a {@code null} is passed as
* argument.
*/
void should_not_be_equal_to_null();
}
| apache-2.0 |
charleso/intellij-haskforce | src/com/haskforce/parsing/srcExtsDatatypes/TypeAnn.java | 400 | package com.haskforce.parsing.srcExtsDatatypes;
/**
* TypeAnn l (Name l) (Exp l)
*/
public class TypeAnn extends AnnotationTopType {
public SrcInfoSpan srcInfoSpan;
public NameTopType name;
public ExpTopType exp;
@Override
public String toString() {
return "TypeAnn{" +
"name=" + name +
", exp=" + exp +
'}';
}
}
| apache-2.0 |
mbenson/spring-cloud-config | spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/encryption/CipherEnvironmentEncryptor.java | 3146 | /*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.server.encryption;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.config.environment.Environment;
import org.springframework.cloud.config.environment.PropertySource;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
/**
* EnvironmentEncryptor that can decrypt property values prefixed with {cipher} marker.
*
* @author Dave Syer
* @author Bartosz Wojtkiewicz
* @author Rafal Zukowski
*
*/
@Component
public class CipherEnvironmentEncryptor implements EnvironmentEncryptor {
private static Log logger = LogFactory.getLog(CipherEnvironmentEncryptor.class);
private final TextEncryptorLocator encryptor;
private EnvironmentPrefixHelper helper = new EnvironmentPrefixHelper();
@Autowired
public CipherEnvironmentEncryptor(TextEncryptorLocator encryptor) {
this.encryptor = encryptor;
}
@Override
public Environment decrypt(Environment environment) {
return this.encryptor != null ? decrypt(environment, this.encryptor)
: environment;
}
private Environment decrypt(Environment environment, TextEncryptorLocator encryptor) {
Environment result = new Environment(environment.getName(),
environment.getProfiles(), environment.getLabel(), environment.getVersion());
for (PropertySource source : environment.getPropertySources()) {
Map<Object, Object> map = new LinkedHashMap<Object, Object>(
source.getSource());
for (Map.Entry<Object, Object> entry : new LinkedHashSet<>(map.entrySet())) {
Object key = entry.getKey();
String name = key.toString();
String value = entry.getValue().toString();
if (value.startsWith("{cipher}")) {
map.remove(key);
try {
value = value.substring("{cipher}".length());
value = encryptor.locate(
this.helper.getEncryptorKeys(name, StringUtils
.arrayToCommaDelimitedString(environment
.getProfiles()), value)).decrypt(this.helper.stripPrefix(value));
}
catch (Exception e) {
value = "<n/a>";
name = "invalid." + name;
logger.warn("Cannot decrypt key: " + key + " (" + e.getClass()
+ ": " + e.getMessage() + ")");
}
map.put(name, value);
}
}
result.add(new PropertySource(source.getName(), map));
}
return result;
}
}
| apache-2.0 |
kierarad/gocd | config/config-api/src/main/java/com/thoughtworks/go/domain/scm/SCMs.java | 4624 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain.scm;
import com.thoughtworks.go.config.ConfigCollection;
import com.thoughtworks.go.config.ConfigTag;
import com.thoughtworks.go.config.Validatable;
import com.thoughtworks.go.config.ValidationContext;
import com.thoughtworks.go.domain.BaseCollection;
import com.thoughtworks.go.domain.ConfigErrors;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ConfigTag("scms")
@ConfigCollection(value = SCM.class)
public class SCMs extends BaseCollection<SCM> implements Validatable {
public SCMs() {
}
public SCMs(SCM... scms) {
Collections.addAll(this, scms);
}
public SCM find(final String scmId) {
return stream().filter(scm -> scm.getId().equals(scmId)).findFirst().orElse(null);
}
public boolean canAdd(final SCM scm) {
return findDuplicate(scm) == null &&
findByName(scm.getName()) == null;
}
public SCM findDuplicate(final SCM scm) {
if (find(scm.getSCMId()) != null) {
return find(scm.getSCMId());
} else if (findByFingerprint(scm.getFingerprint()) != null) {
return findByFingerprint(scm.getFingerprint());
}
return null;
}
public SCM findByFingerprint(String fingerprint) {
return stream().filter(scm -> scm.getFingerprint().equals(fingerprint)).findFirst().orElse(null);
}
public SCM findByName(final String name) {
return stream().filter(scm -> scm.getName().toLowerCase().equals(name.toLowerCase())).findFirst().orElse(null);
}
@Override
public void validate(ValidationContext validationContext) {
validateNameUniqueness();
validateFingerprintUniqueness();
}
@Override
public ConfigErrors errors() {
return new ConfigErrors();
}
@Override
public void addError(String fieldName, String message) {
throw new RuntimeException("Not Implemented");
}
public void removeSCM(String id) {
SCM scmToBeDeleted = this.find(id);
if (scmToBeDeleted == null) {
throw new RuntimeException(String.format("Could not find SCM with id '%s'", id));
}
this.remove(scmToBeDeleted);
}
private void validateNameUniqueness() {
HashMap<String, SCMs> map = new HashMap<>();
for (SCM scm : this) {
String name = scm.getName().toLowerCase();
if (!map.containsKey(name)) {
map.put(name, new SCMs());
}
map.get(name).add(scm);
}
for (String name : map.keySet()) {
SCMs scmsWithSameName = map.get(name);
if (scmsWithSameName.size() > 1) {
for (SCM scm : scmsWithSameName) {
scm.addError(SCM.NAME, String.format("Cannot save SCM, found multiple SCMs called '%s'. SCM names are case-insensitive and must be unique.", scm.getName()));
}
}
}
}
private void validateFingerprintUniqueness() {
HashMap<String, SCMs> map = new HashMap<>();
for (SCM scm : this) {
String fingerprint = scm.getFingerprint();
if (!map.containsKey(fingerprint)) {
map.put(fingerprint, new SCMs());
}
map.get(fingerprint).add(scm);
}
for (String fingerprint : map.keySet()) {
SCMs scmsWithSameFingerprint = map.get(fingerprint);
if (scmsWithSameFingerprint.size() > 1) {
List<String> scmNames = new ArrayList<>();
for (SCM scm : scmsWithSameFingerprint) {
scmNames.add(scm.getName());
}
for (SCM scm : scmsWithSameFingerprint) {
scm.addError(SCM.SCM_ID, String.format("Cannot save SCM, found duplicate SCMs. %s", StringUtils.join(scmNames, ", ")));
}
}
}
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | java/java-analysis-impl/src/com/intellij/codeInspection/defUse/DefUseInspectionBase.java | 12731 | // Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.defUse;
import com.intellij.codeInsight.daemon.GroupNames;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil;
import com.intellij.codeInsight.daemon.impl.quickfix.RemoveUnusedVariableUtil;
import com.intellij.codeInspection.*;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.DefUseUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class DefUseInspectionBase extends AbstractBaseJavaLocalInspectionTool {
public boolean REPORT_PREFIX_EXPRESSIONS;
public boolean REPORT_POSTFIX_EXPRESSIONS = true;
public boolean REPORT_REDUNDANT_INITIALIZER = true;
public static final String DISPLAY_NAME = InspectionsBundle.message("inspection.unused.assignment.display.name");
public static final String SHORT_NAME = "UnusedAssignment";
@Override
@NotNull
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, final boolean isOnTheFly) {
return new JavaElementVisitor() {
@Override
public void visitMethod(PsiMethod method) {
checkCodeBlock(method.getBody(), holder, isOnTheFly);
}
@Override
public void visitClassInitializer(PsiClassInitializer initializer) {
checkCodeBlock(initializer.getBody(), holder, isOnTheFly);
}
@Override
public void visitLambdaExpression(PsiLambdaExpression expression) {
PsiElement body = expression.getBody();
if (body instanceof PsiCodeBlock) {
checkCodeBlock((PsiCodeBlock)body, holder, isOnTheFly);
}
}
@Override
public void visitField(PsiField field) {
checkField(field, holder, isOnTheFly);
}
};
}
private void checkCodeBlock(final PsiCodeBlock body,
final ProblemsHolder holder,
final boolean isOnTheFly) {
if (body == null) return;
final Set<PsiVariable> usedVariables = new THashSet<>();
List<DefUseUtil.Info> unusedDefs = DefUseUtil.getUnusedDefs(body, usedVariables);
if (unusedDefs != null && !unusedDefs.isEmpty()) {
Collections.sort(unusedDefs, (o1, o2) -> {
int offset1 = o1.getContext().getTextOffset();
int offset2 = o2.getContext().getTextOffset();
if (offset1 == offset2) return 0;
if (offset1 < offset2) return -1;
return 1;
});
for (DefUseUtil.Info info : unusedDefs) {
PsiElement context = info.getContext();
PsiVariable psiVariable = info.getVariable();
if (context instanceof PsiDeclarationStatement || context instanceof PsiResourceVariable) {
if (info.isRead() && REPORT_REDUNDANT_INITIALIZER) {
reportInitializerProblem(psiVariable, holder, isOnTheFly);
}
}
else if (context instanceof PsiAssignmentExpression) {
reportAssignmentProblem(psiVariable, (PsiAssignmentExpression)context, holder, isOnTheFly);
}
else {
if (context instanceof PsiPrefixExpression && REPORT_PREFIX_EXPRESSIONS ||
context instanceof PsiPostfixExpression && REPORT_POSTFIX_EXPRESSIONS) {
holder.registerProblem(context,
InspectionsBundle.message("inspection.unused.assignment.problem.descriptor4", "<code>#ref</code> #loc"));
}
}
}
}
}
private void reportInitializerProblem(PsiVariable psiVariable, ProblemsHolder holder, boolean isOnTheFly) {
List<LocalQuickFix> fixes = ContainerUtil.createMaybeSingletonList(
isOnTheFlyOrNoSideEffects(isOnTheFly, psiVariable, psiVariable.getInitializer()) ? createRemoveInitializerFix() : null);
holder.registerProblem(ObjectUtils.notNull(psiVariable.getInitializer(), psiVariable),
InspectionsBundle.message("inspection.unused.assignment.problem.descriptor2",
"<code>" + psiVariable.getName() + "</code>", "<code>#ref</code> #loc"),
ProblemHighlightType.LIKE_UNUSED_SYMBOL,
fixes.toArray(LocalQuickFix.EMPTY_ARRAY)
);
}
private void reportAssignmentProblem(PsiVariable psiVariable,
PsiAssignmentExpression assignment,
ProblemsHolder holder,
boolean isOnTheFly) {
List<LocalQuickFix> fixes = ContainerUtil.createMaybeSingletonList(
isOnTheFlyOrNoSideEffects(isOnTheFly, psiVariable, assignment.getRExpression()) ? createRemoveAssignmentFix() : null);
holder.registerProblem(assignment.getLExpression(),
InspectionsBundle.message("inspection.unused.assignment.problem.descriptor3",
ObjectUtils.assertNotNull(assignment.getRExpression()).getText(), "<code>#ref</code>" + " #loc"),
ProblemHighlightType.LIKE_UNUSED_SYMBOL, fixes.toArray(LocalQuickFix.EMPTY_ARRAY)
);
}
private void checkField(@NotNull PsiField field, @NotNull ProblemsHolder holder, boolean isOnTheFly) {
if (field.hasModifierProperty(PsiModifier.FINAL)) return;
final PsiClass psiClass = field.getContainingClass();
if (psiClass == null) return;
final PsiClassInitializer[] classInitializers = psiClass.getInitializers();
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
final PsiMethod[] constructors = !isStatic ? psiClass.getConstructors() : PsiMethod.EMPTY_ARRAY;
final boolean fieldHasInitializer = field.hasInitializer();
final int maxPossibleWritesCount = classInitializers.length + (constructors.length != 0 ? 1 : 0) + (fieldHasInitializer ? 1 : 0);
if (maxPossibleWritesCount <= 1) return;
final PsiClassInitializer initializerBeforeField = PsiTreeUtil.getPrevSiblingOfType(field, PsiClassInitializer.class);
final List<FieldWrite> fieldWrites = new ArrayList<>(); // class initializers and field initializer in the program order
if (fieldHasInitializer && initializerBeforeField == null) {
fieldWrites.add(FieldWrite.createInitializer());
}
for (PsiClassInitializer classInitializer : classInitializers) {
if (classInitializer.hasModifierProperty(PsiModifier.STATIC) == isStatic) {
final List<PsiAssignmentExpression> assignments = collectAssignments(field, classInitializer);
if (!assignments.isEmpty()) {
boolean isDefinitely = HighlightControlFlowUtil.variableDefinitelyAssignedIn(field, classInitializer.getBody());
fieldWrites.add(FieldWrite.createAssignments(isDefinitely, assignments));
}
}
if (fieldHasInitializer && initializerBeforeField == classInitializer) {
fieldWrites.add(FieldWrite.createInitializer());
}
}
Collections.reverse(fieldWrites);
boolean wasDefinitelyAssigned = isAssignedInAllConstructors(field, constructors);
for (final FieldWrite fieldWrite : fieldWrites) {
if (wasDefinitelyAssigned) {
if (fieldWrite.isInitializer()) {
if (REPORT_REDUNDANT_INITIALIZER) {
reportInitializerProblem(field, holder, isOnTheFly);
}
}
else {
for (PsiAssignmentExpression assignment : fieldWrite.getAssignments()) {
reportAssignmentProblem(field, assignment, holder, isOnTheFly);
}
}
}
else if (fieldWrite.isDefinitely()) {
wasDefinitelyAssigned = true;
}
}
}
private static boolean isAssignedInAllConstructors(@NotNull PsiField field, @NotNull PsiMethod[] constructors) {
if (constructors.length == 0 || field.hasModifierProperty(PsiModifier.STATIC)) {
return false;
}
for (PsiMethod constructor : constructors) {
final PsiCodeBlock body = constructor.getBody();
if (body == null || !HighlightControlFlowUtil.variableDefinitelyAssignedIn(field, body)) {
return false;
}
}
return true;
}
@NotNull
private static List<PsiAssignmentExpression> collectAssignments(@NotNull PsiField field, @NotNull PsiClassInitializer classInitializer) {
final List<PsiAssignmentExpression> assignmentExpressions = new ArrayList<>();
classInitializer.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitAssignmentExpression(PsiAssignmentExpression expression) {
final PsiExpression lExpression = expression.getLExpression();
if (lExpression instanceof PsiJavaReference && ((PsiJavaReference)lExpression).isReferenceTo(field)) {
final PsiExpression rExpression = expression.getRExpression();
if (rExpression != null) {
assignmentExpressions.add(expression);
}
}
super.visitAssignmentExpression(expression);
}
});
return assignmentExpressions;
}
private static boolean isOnTheFlyOrNoSideEffects(boolean isOnTheFly,
PsiVariable psiVariable,
PsiExpression initializer) {
return isOnTheFly || !RemoveUnusedVariableUtil.checkSideEffects(initializer, psiVariable, new ArrayList<>());
}
protected LocalQuickFix createRemoveInitializerFix() {
return null;
}
protected LocalQuickFix createRemoveAssignmentFix() {
return null;
}
@Override
public JComponent createOptionsPanel() {
return new OptionsPanel();
}
private class OptionsPanel extends JPanel {
private final JCheckBox myReportPrefix;
private final JCheckBox myReportPostfix;
private final JCheckBox myReportInitializer;
private OptionsPanel() {
super(new GridBagLayout());
GridBagConstraints gc = new GridBagConstraints();
gc.weighty = 0;
gc.weightx = 1;
gc.fill = GridBagConstraints.HORIZONTAL;
gc.anchor = GridBagConstraints.NORTHWEST;
myReportInitializer = new JCheckBox(InspectionsBundle.message("inspection.unused.assignment.option2"));
myReportInitializer.setSelected(REPORT_REDUNDANT_INITIALIZER);
myReportInitializer.getModel().addItemListener(e -> REPORT_REDUNDANT_INITIALIZER = myReportInitializer.isSelected());
gc.insets = JBUI.insetsBottom(15);
gc.gridy = 0;
add(myReportInitializer, gc);
myReportPrefix = new JCheckBox(InspectionsBundle.message("inspection.unused.assignment.option"));
myReportPrefix.setSelected(REPORT_PREFIX_EXPRESSIONS);
myReportPrefix.getModel().addItemListener(e -> REPORT_PREFIX_EXPRESSIONS = myReportPrefix.isSelected());
gc.insets = JBUI.emptyInsets();
gc.gridy++;
add(myReportPrefix, gc);
myReportPostfix = new JCheckBox(InspectionsBundle.message("inspection.unused.assignment.option1"));
myReportPostfix.setSelected(REPORT_POSTFIX_EXPRESSIONS);
myReportPostfix.getModel().addItemListener(e -> REPORT_POSTFIX_EXPRESSIONS = myReportPostfix.isSelected());
gc.weighty = 1;
gc.gridy++;
add(myReportPostfix, gc);
}
}
@Override
@NotNull
public String getDisplayName() {
return DISPLAY_NAME;
}
@Override
@NotNull
public String getGroupDisplayName() {
return GroupNames.BUGS_GROUP_NAME;
}
@Override
@NotNull
public String getShortName() {
return SHORT_NAME;
}
private static class FieldWrite {
final boolean myDefinitely;
final List<PsiAssignmentExpression> myAssignments;
private FieldWrite(boolean definitely, List<PsiAssignmentExpression> assignments) {
myDefinitely = definitely;
myAssignments = assignments;
}
public boolean isDefinitely() {
return myDefinitely;
}
public boolean isInitializer() {
return myAssignments == null;
}
public List<PsiAssignmentExpression> getAssignments() {
return myAssignments != null ? myAssignments : Collections.emptyList();
}
@NotNull
public static FieldWrite createInitializer() {
return new FieldWrite(true, null);
}
@NotNull
public static FieldWrite createAssignments(boolean definitely, @NotNull List<PsiAssignmentExpression> assignmentExpressions) {
return new FieldWrite(definitely, assignmentExpressions);
}
}
} | apache-2.0 |
deeplearning4j/nd4j | nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/aggregates/impl/AggregateSkipGram.java | 4426 | package org.nd4j.linalg.api.ops.aggregates.impl;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.aggregates.BaseAggregate;
import org.nd4j.linalg.factory.Nd4j;
/**
* This aggregate encapsulates AggregateSkipGram training round for a given word and context
*
* @author raver119@gmail.com
*/
@Slf4j
public class AggregateSkipGram extends BaseAggregate {
private int vectorLength;
public AggregateSkipGram(INDArray syn0, INDArray syn1, INDArray syn1Neg, INDArray expTable, INDArray negTable,
int idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, int ngStarter, int vectorLength,
double alpha, long nextRandom, int vocabSize, INDArray inferenceVector) {
this(syn0, syn1, syn1Neg, expTable, negTable, idxSyn0, idxSyn1, codes, negativeRounds, ngStarter, vectorLength,
alpha, nextRandom, vocabSize);
arguments.set(5, inferenceVector);
indexingArguments.set(8, inferenceVector == null ? 0 : 1); // set isInference to true
}
public AggregateSkipGram(@NonNull INDArray syn0, INDArray syn1, INDArray syn1Neg, @NonNull INDArray expTable,
INDArray negTable, int idxSyn0, int[] idxSyn1, int[] codes, int negativeRounds, int ngStarter,
int vectorLength, double alpha, long nextRandom, int vocabSize) {
indexingArguments.add(idxSyn0);
indexingArguments.add(vectorLength);
indexingArguments.add(idxSyn1.length);
indexingArguments.add(negativeRounds);
// FIXME: int cast
indexingArguments.add((int) expTable.length());
indexingArguments.add(vocabSize);
indexingArguments.add(ngStarter);
indexingArguments.add(negTable == null ? 0 : (int) negTable.length());
indexingArguments.add(0);
arguments.add(syn0);
arguments.add(syn1);
arguments.add(expTable);
arguments.add(syn1Neg);
arguments.add(negTable);
arguments.add(null);
intArrayArguments.add(idxSyn1);
intArrayArguments.add(codes);
realArguments.add(alpha);
realArguments.add((double) nextRandom);
this.vectorLength = vectorLength;
}
/**
* This is special signature suitable for use with VoidParameterServer, never ever use it outside of spark-nlp
*
* @param w1
* @param w2
* @param lr
* @param vectorLength
*/
// TODO: probably this signature should be removed?
public AggregateSkipGram(int w1, int w2, int[] codes, int[] points, int negSamples, double lr, int vectorLength) {
indexingArguments.add(w1);
indexingArguments.add(w2);
indexingArguments.add(vectorLength);
intArrayArguments.add(codes);
intArrayArguments.add(points);
realArguments.add(lr);
}
/**
* This method returns amount of shared memory required for this specific Aggregate.
* PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate.
*
* @return
*/
@Override
public int getSharedMemorySize() {
return (vectorLength * Nd4j.sizeOfDataType()) + 512;
}
/**
* This method returns desired number of threads per Aggregate instance
* PLEASE NOTE: this method is especially important for CUDA backend. On CPU backend it might be ignored, depending on Aggregate.
*
* @return
*/
@Override
public int getThreadsPerInstance() {
if (vectorLength > 768)
return 768;
return vectorLength;
}
@Override
public String name() {
return "aggregate_skipgram";
}
@Override
public int opNum() {
return 3;
}
@Override
public int maxArguments() {
return 6;
}
@Override
public int maxShapes() {
return 0;
}
@Override
public int maxIntArrays() {
return 2;
}
@Override
public int maxIntArraySize() {
// we hardcode 40 here, due to w2v codeLength mechanics
// TODO: make sure this limitation doesn't bother with spark environment
return 40;
}
@Override
public int maxIndexArguments() {
return 10;
}
@Override
public int maxRealArguments() {
return 2;
}
}
| apache-2.0 |
spinnaker/kork | kork-cloud-config-server/src/main/java/com/netflix/spinnaker/kork/configserver/CloudConfigAwarePropertySource.java | 2235 | /*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.kork.configserver;
import org.springframework.beans.BeansException;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.env.EnumerablePropertySource;
import org.springframework.core.env.PropertySource;
public class CloudConfigAwarePropertySource extends EnumerablePropertySource<PropertySource> {
private final ConfigurableApplicationContext context;
private CloudConfigResourceService resourceService;
CloudConfigAwarePropertySource(PropertySource source, ConfigurableApplicationContext context) {
super(source.getName(), source);
this.context = context;
}
@Override
public Object getProperty(String name) {
Object value = source.getProperty(name);
if (value instanceof String) {
String stringValue = (String) value;
if (CloudConfigResourceService.isCloudConfigResource(stringValue)) {
resolveResourceService(stringValue);
value = resourceService.getLocalPath(stringValue);
}
}
return value;
}
private void resolveResourceService(String path) {
if (resourceService == null) {
try {
resourceService = context.getBean(CloudConfigResourceService.class);
} catch (BeansException e) {
throw new ConfigFileLoadingException(
"Config Server repository not configured for resource \"" + path + "\"");
}
}
}
@Override
public String[] getPropertyNames() {
if (source instanceof EnumerablePropertySource) {
return ((EnumerablePropertySource) source).getPropertyNames();
} else {
return new String[0];
}
}
}
| apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/Behaviors/Thiefness.java | 4116 | package com.planet_ink.coffee_mud.Behaviors;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Thiefness extends CombatAbilities
{
@Override public String ID(){return "Thiefness";}
@Override public long flags(){return Behavior.FLAG_TROUBLEMAKING;}
protected int tickDown=0;
@Override
public String accountForYourself()
{
return "thiefliness";
}
@Override
public void startBehavior(PhysicalAgent forMe)
{
super.startBehavior(forMe);
if(!(forMe instanceof MOB))
return;
final MOB mob=(MOB)forMe;
combatMode=COMBAT_RANDOM;
makeClass(mob,getParmsMinusCombatMode(),"Thief");
newCharacter(mob);
//%%%%%att,armor,damage,hp,mana,move
if((preCastSet==Integer.MAX_VALUE)||(preCastSet<=0))
{
setCombatStats(mob,0,10,15,-15,-15,-15, true);
setCharStats(mob);
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
super.tick(ticking,tickID);
if(tickID!=Tickable.TICKID_MOB)
return true;
if(!canActAtAll(ticking))
return true;
if(!(ticking instanceof MOB))
return true;
final MOB mob=(MOB)ticking;
if((--tickDown)<=0)
if((CMLib.dice().rollPercentage()<10)&&(mob.location()!=null))
{
tickDown=2;
MOB victim=null;
if(mob.isInCombat())
victim=mob.getVictim();
else
for(int i=0;i<mob.location().numInhabitants();i++)
{
final MOB potentialVictim=mob.location().fetchInhabitant(i);
if((potentialVictim!=null)
&&(potentialVictim!=mob)
&&(!potentialVictim.isMonster())
&&(CMLib.flags().canBeSeenBy(potentialVictim,mob)))
victim=potentialVictim;
}
if((victim!=null)
&&(!CMSecurity.isAllowed(victim,victim.location(),CMSecurity.SecFlag.CMDROOMS))
&&(!CMSecurity.isAllowed(victim,victim.location(),CMSecurity.SecFlag.ORDER)))
{
final Vector V=new Vector();
final Ability A=mob.fetchAbility((CMLib.dice().rollPercentage()>50)?(mob.isInCombat()?"Thief_Mug":"Thief_Steal"):"Thief_Swipe");
if(A!=null)
{
if(!A.ID().equalsIgnoreCase("Thief_Swipe"))
{
Item I=null;
for(int i=0;i<victim.numItems();i++)
{
final Item potentialI=victim.getItem(i);
if((potentialI!=null)
&&(potentialI.amWearingAt(Wearable.IN_INVENTORY))
&&(CMLib.flags().canBeSeenBy(potentialI,mob)))
I=potentialI;
}
if(I!=null)
V.addElement(I.ID());
}
if(!A.ID().equalsIgnoreCase("Thief_Mug"))
V.addElement(victim.name());
A.setProficiency(CMLib.dice().roll(1,50,A.adjustedLevel(mob,0)*15));
A.invoke(mob,V,null,false,0);
}
}
}
return true;
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/query/src/test/java/org/infinispan/query/distributed/MultiNodeDistributedTest.java | 6855 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.infinispan.query.distributed;
import junit.framework.Assert;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.hibernate.search.engine.spi.SearchFactoryImplementor;
import org.infinispan.Cache;
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.query.CacheQuery;
import org.infinispan.query.Search;
import org.infinispan.query.SearchManager;
import org.infinispan.query.indexmanager.InfinispanCommandsBackend;
import org.infinispan.query.indexmanager.InfinispanIndexManager;
import org.infinispan.query.test.Person;
import org.infinispan.test.AbstractInfinispanTest;
import org.infinispan.test.TestingUtil;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.FileLookupFactory;
import org.testng.annotations.Test;
import javax.transaction.TransactionManager;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Configures the Hibernate Search backend to use Infinispan custom commands as a backend
* transport, and a consistent hash for Master election for each index.
* The test changes the view several times while indexing and verifying index state.
*
* @author Sanne Grinovero
*/
@Test(groups = "functional", testName = "query.distributed.MultiNodeDistributedTest")
public class MultiNodeDistributedTest extends AbstractInfinispanTest {
protected List<EmbeddedCacheManager> cacheManagers = new ArrayList<EmbeddedCacheManager>(4);
protected List<Cache<String, Person>> caches = new ArrayList<Cache<String, Person>>(4);
protected EmbeddedCacheManager createCacheManager() throws IOException {
EmbeddedCacheManager cacheManager = TestCacheManagerFactory.fromXml(getConfigurationResourceName());
cacheManagers.add(cacheManager);
Cache<String, Person> cache = cacheManager.getCache();
caches.add(cache);
TestingUtil.waitForRehashToComplete(caches);
return cacheManager;
}
protected String getConfigurationResourceName() {
return "dynamic-indexing-distribution.xml";
}
protected void storeOn(Cache<String, Person> cache, String key, Person person) throws Exception {
TransactionManager transactionManager = cache.getAdvancedCache().getTransactionManager();
if (transactionsEnabled()) transactionManager.begin();
cache.put(key, person);
if (transactionsEnabled()) transactionManager.commit();
}
public void testIndexingWorkDistribution() throws Exception {
try {
createCacheManager();
createCacheManager();
assertIndexSize(0);
//depending on test run, the index master selection might pick either cache.
//We don't know which cache it picks, but we allow writing & searching on all.
storeOn(caches.get(0), "k1", new Person("K. Firt", "Is not a character from the matrix", 1));
assertIndexSize(1);
storeOn(caches.get(1), "k2", new Person("K. Seycond", "Is a pilot", 1));
assertIndexSize(2);
storeOn(caches.get(0), "k3", new Person("K. Theerd", "Forgot the fundamental laws", 1));
assertIndexSize(3);
storeOn(caches.get(1), "k3", new Person("K. Overide", "Impersonating Mr. Theerd", 1));
assertIndexSize(3);
createCacheManager();
storeOn(caches.get(2), "k4", new Person("K. Forth", "Dynamic Topology!", 1));
assertIndexSize(4);
createCacheManager();
assertIndexSize(4);
killMasterNode();
storeOn(caches.get(2), "k5", new Person("K. Vife", "Failover!", 1));
assertIndexSize(5);
}
finally {
TestingUtil.killCacheManagers(cacheManagers);
}
}
protected void killMasterNode() {
for (Cache cache : caches) {
if (isMasterNode(cache)) {
TestingUtil.killCacheManagers(cache.getCacheManager());
caches.remove(cache);
cacheManagers.remove(cache.getCacheManager());
if(cache.getCacheConfiguration().clustering().cacheMode() != CacheMode.LOCAL)
TestingUtil.waitForRehashToComplete(caches);
break;
}
}
}
private boolean isMasterNode(Cache cache) {
//Implicitly verifies the components are setup as configured by casting:
SearchManager searchManager = Search.getSearchManager(cache);
SearchFactoryImplementor searchFactory = (SearchFactoryImplementor) searchManager.getSearchFactory();
InfinispanIndexManager indexManager = (InfinispanIndexManager) searchFactory.getAllIndexesManager().getIndexManager("person");
InfinispanCommandsBackend commandsBackend = indexManager.getRemoteMaster();
return commandsBackend.isMasterLocal();
}
protected void assertIndexSize(int expectedIndexSize) {
for (Cache cache : caches) {
SearchManager searchManager = Search.getSearchManager(cache);
CacheQuery query = searchManager.getQuery(new MatchAllDocsQuery(), Person.class);
Assert.assertEquals(expectedIndexSize, query.list().size());
}
}
protected boolean transactionsEnabled() {
return false; //TODO extend this test using a Transactional configuration
}
protected ConfigurationBuilderHolder readFromXml() throws FileNotFoundException {
InputStream is = FileLookupFactory.newInstance().lookupFileStrict(
getConfigurationResourceName(), Thread.currentThread().getContextClassLoader());
ParserRegistry parserRegistry = new ParserRegistry(Thread.currentThread().getContextClassLoader());
ConfigurationBuilderHolder holder = parserRegistry.parse(is);
return holder;
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java | 1420 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.common.tuples;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
public class TypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
private static final long serialVersionUID = 1L;
protected ITypeTraits[] typeTraits;
public TypeAwareTupleWriterFactory(ITypeTraits[] typeTraits) {
this.typeTraits = typeTraits;
}
@Override
public TypeAwareTupleWriter createTupleWriter() {
return new TypeAwareTupleWriter(typeTraits);
}
}
| apache-2.0 |
smgoller/geode | geode-core/src/main/java/org/apache/geode/cache/client/internal/CacheServerLoadMessage.java | 3706 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.client.internal;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.geode.DataSerializer;
import org.apache.geode.cache.server.ServerLoad;
import org.apache.geode.distributed.Locator;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.distributed.internal.InternalLocator;
import org.apache.geode.distributed.internal.SerialDistributionMessage;
import org.apache.geode.distributed.internal.ServerLocation;
import org.apache.geode.distributed.internal.ServerLocator;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.SerializationContext;
/**
* A message from a server to a locator to update the locator with new load information from the
* server. Also includes the id of any clients whose estimate is no longer needed on the
* server-locator.
*
* @since GemFire 5.7
*
*/
public class CacheServerLoadMessage extends SerialDistributionMessage {
protected ServerLoad load;
protected ServerLocation location;
protected ArrayList clientIds;
public CacheServerLoadMessage() {
super();
}
public CacheServerLoadMessage(ServerLoad load, ServerLocation location, ArrayList clientIds) {
super();
this.load = load;
this.location = location;
this.clientIds = clientIds;
}
@Override
protected void process(ClusterDistributionManager dm) {
updateLocalLocators();
}
public void updateLocalLocators() {
List locators = Locator.getLocators();
for (int i = 0; i < locators.size(); i++) {
InternalLocator l = (InternalLocator) locators.get(i);
ServerLocator serverLocator = l.getServerLocatorAdvisee();
if (serverLocator != null) {
serverLocator.updateLoad(location, this.getSender().getUniqueId(), load, this.clientIds);
}
}
}
@Override
public int getDSFID() {
return CACHE_SERVER_LOAD_MESSAGE;
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
super.fromData(in, context);
load = new ServerLoad();
InternalDataSerializer.invokeFromData(load, in);
location = new ServerLocation();
InternalDataSerializer.invokeFromData(location, in);
this.clientIds = DataSerializer.readArrayList(in);
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
super.toData(out, context);
InternalDataSerializer.invokeToData(load, out);
InternalDataSerializer.invokeToData(location, out);
DataSerializer.writeArrayList(this.clientIds, out);
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
| apache-2.0 |
flowable/flowable-engine | modules/flowable-engine/src/test/java/org/flowable/engine/test/concurrency/SetRandomVariablesTaskListener.java | 2139 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.concurrency;
import java.util.Random;
import org.flowable.engine.delegate.TaskListener;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.task.service.delegate.DelegateTask;
/**
* Tasklistener that sets some random process and task-variables.
*
* @author Frederik Heremans
*/
public class SetRandomVariablesTaskListener implements TaskListener {
private static final long serialVersionUID = 1L;
@Override
public void notify(DelegateTask delegateTask) {
String varName;
for (int i = 0; i < 5; i++) {
varName = "variable-" + new Random().nextInt(10);
ExecutionEntity execution = CommandContextUtil.getExecutionEntityManager().findById(delegateTask.getExecutionId());
execution.setVariable(varName, getRandomValue());
}
for (int i = 0; i < 5; i++) {
varName = "task-variable-" + new Random().nextInt(10);
delegateTask.setVariableLocal(varName, getRandomValue());
}
}
protected Object getRandomValue() {
switch (new Random().nextInt(4)) {
case 0:
return new Random().nextLong();
case 1:
return new Random().nextDouble();
case 2:
return "Activiti is a light-weight workflow and Business Process Management (BPM) Platform";
default:
return new Random().nextBoolean();
// return "Some bytearray".getBytes();
}
}
}
| apache-2.0 |
Darsstar/framework | uitest/src/test/java/com/vaadin/tests/components/grid/GridHeaderFooterComponentsTest.java | 4876 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.components.grid;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.vaadin.testbench.By;
import com.vaadin.testbench.elements.ButtonElement;
import com.vaadin.testbench.elements.GridElement;
import com.vaadin.testbench.elements.GridElement.GridCellElement;
import com.vaadin.testbench.elements.TextFieldElement;
import com.vaadin.tests.tb3.SingleBrowserTest;
public class GridHeaderFooterComponentsTest extends SingleBrowserTest {
@Before
public void setUp() {
setDebug(true);
openTestURL();
}
@Test
public void hideAndShowComponentsInHeader() {
GridElement grid = $(GridElement.class).first();
int filterRow = 2;
assertNull(getHeaderElement(grid, filterRow, 1));
assertNotNull(getHeaderElement(grid, filterRow, 2));
assertNotNull(getHeaderElement(grid, filterRow, 3));
// Show (1,2)
grid.getHeaderCell(1, 1).$(ButtonElement.class).first().click();
TextFieldElement textfield = getHeaderElement(grid, filterRow, 1);
assertNotNull(textfield);
assertEquals("Filter: string", textfield.getValue());
textfield.setValue("foo");
assertEquals("1. value change for field in string to foo",
getLogRow(0));
assertNoErrorNotifications();
}
private TextFieldElement getHeaderElement(GridElement grid, int row,
int col) {
GridCellElement cell = grid.getHeaderCell(row, col);
List<TextFieldElement> all = cell.$(TextFieldElement.class).all();
if (all.isEmpty()) {
return null;
} else if (all.size() == 1) {
return all.get(0);
} else {
throw new RuntimeException(
"Multiple elements found in the header cell at " + row + ","
+ col);
}
}
@Test
public void hideAndShowComponentsInFooter() {
GridElement grid = $(GridElement.class).first();
int filterRow = 0;
assertNull(getFooterElement(grid, filterRow, 1));
assertNotNull(getFooterElement(grid, filterRow, 2));
assertNotNull(getFooterElement(grid, filterRow, 3));
// Show (1,2)
grid.getFooterCell(1, 1).$(ButtonElement.class).first().click();
TextFieldElement textfield = getFooterElement(grid, filterRow, 1);
assertNotNull(textfield);
assertEquals("Filter: string", textfield.getValue());
textfield.setValue("foo");
assertEquals("1. value change for field in string to foo",
getLogRow(0));
assertNoErrorNotifications();
}
private TextFieldElement getFooterElement(GridElement grid, int row,
int col) {
GridCellElement cell = grid.getFooterCell(row, col);
List<TextFieldElement> all = cell.$(TextFieldElement.class).all();
if (all.isEmpty()) {
return null;
} else if (all.size() == 1) {
return all.get(0);
} else {
throw new RuntimeException(
"Multiple elements found in the footer cell at " + row + ","
+ col);
}
}
@Test
public void testRemoveAllHeadersAndFooters() {
openTestURL();
for (int i = 2; i >= 0; --i) {
// Remove Header
$(GridElement.class).first().getHeaderCell(i, 0)
.$(ButtonElement.class).first().click();
assertFalse("Header " + i + " should not be present.",
$(GridElement.class).first()
.isElementPresent(By.vaadin("#header[" + i + "]")));
// Remove Footer
$(GridElement.class).first().getFooterCell(i, 0)
.$(ButtonElement.class).first().click();
assertFalse("Footer " + i + " should not be present.",
$(GridElement.class).first()
.isElementPresent(By.vaadin("#footer[" + i + "]")));
}
assertNoErrorNotifications();
}
}
| apache-2.0 |
djechelon/spring-security | web/src/test/java/org/springframework/security/web/header/writers/StaticHeaderWriterTests.java | 4056 | /*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.web.header.writers;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.security.web.header.Header;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
/**
* Test for the {@code StaticHeadersWriter}
*
* @author Marten Deinum
* @author Rob Winch
* @author Ankur Pathak
* @since 3.2
*/
public class StaticHeaderWriterTests {
private MockHttpServletRequest request;
private MockHttpServletResponse response;
@BeforeEach
public void setup() {
this.request = new MockHttpServletRequest();
this.response = new MockHttpServletResponse();
}
@Test
public void constructorNullHeaders() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(null));
}
@Test
public void constructorEmptyHeaders() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(Collections.<Header>emptyList()));
}
@Test
public void constructorNullHeaderName() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(null, "value1"));
}
@Test
public void constructorNullHeaderValues() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter("name", (String[]) null));
}
@Test
public void constructorContainsNullHeaderValue() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter("name", "value1", null));
}
@Test
public void sameHeaderShouldBeReturned() {
String headerName = "X-header";
String headerValue = "foo";
StaticHeadersWriter factory = new StaticHeadersWriter(headerName, headerValue);
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderValues(headerName)).isEqualTo(Arrays.asList(headerValue));
}
@Test
public void writeHeadersMulti() {
Header pragma = new Header("Pragma", "no-cache");
Header cacheControl = new Header("Cache-Control", "no-cache", "no-store", "must-revalidate");
StaticHeadersWriter factory = new StaticHeadersWriter(Arrays.asList(pragma, cacheControl));
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(2);
assertThat(this.response.getHeaderValues(pragma.getName())).isEqualTo(pragma.getValues());
assertThat(this.response.getHeaderValues(cacheControl.getName())).isEqualTo(cacheControl.getValues());
}
@Test
public void writeHeaderWhenNotPresent() {
String pragmaValue = new String("pragmaValue");
String cacheControlValue = new String("cacheControlValue");
this.response.setHeader("Pragma", pragmaValue);
this.response.setHeader("Cache-Control", cacheControlValue);
Header pragma = new Header("Pragma", "no-cache");
Header cacheControl = new Header("Cache-Control", "no-cache", "no-store", "must-revalidate");
StaticHeadersWriter factory = new StaticHeadersWriter(Arrays.asList(pragma, cacheControl));
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(2);
assertThat(this.response.getHeader("Pragma")).isSameAs(pragmaValue);
assertThat(this.response.getHeader("Cache-Control")).isSameAs(cacheControlValue);
}
}
| apache-2.0 |
sohaniwso2/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/custom/configure/ConfigureSwitchMediatorAction.java | 3147 | /*
* Copyright 2009-2010 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.configure;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.emf.common.command.CompoundCommand;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.edit.command.AddCommand;
import org.eclipse.emf.transaction.TransactionalEditingDomain;
import org.eclipse.emf.transaction.util.TransactionUtil;
import org.eclipse.gef.EditPart;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.PlatformUI;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.SwitchCaseBranchOutputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.SwitchMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.configure.ui.ConfigureSwitchMediatorDialog;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart;
/**
* A {@link ConfigureEsbNodeAction} used to configure a switch mediator through a
* custom UI.
*/
public class ConfigureSwitchMediatorAction extends ConfigureEsbNodeAction {
CompoundCommand resultCommand;
/**
* Creates a new {@link ConfigureSwitchMediatorAction} instance.
*
* @param part a {@link IWorkbenchPart} instance.
*/
public ConfigureSwitchMediatorAction(IWorkbenchPart part) {
super(part);
setId("configure-switch-mediator-action-id");
setText("Configure...");
setToolTipText("Configure switch mediator.");
ISharedImages workbenchImages = PlatformUI.getWorkbench().getSharedImages();
setImageDescriptor(workbenchImages.getImageDescriptor(ISharedImages.IMG_TOOL_NEW_WIZARD));
}
/**
* {@inheritDoc}
*/
protected void doRun(IProgressMonitor progressMonitor) {
EditPart selectedEP = getSelectedEditPart();
Assert.isNotNull(selectedEP, "Empty selection.");
EObject selectedObj = ((View) selectedEP.getModel()).getElement();
Assert.isTrue(selectedObj instanceof SwitchMediator, "Invalid selection.");
Shell shell = Display.getDefault().getActiveShell();
Dialog ConfigureSwitchMediatorDialog = new ConfigureSwitchMediatorDialog(shell, (SwitchMediator) selectedObj);
ConfigureSwitchMediatorDialog.setBlockOnOpen(true);
ConfigureSwitchMediatorDialog.open();
}
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/execute/DropHDFSStoreConstantAction.java | 5324 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.execute;
import java.util.List;
import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore;
import com.pivotal.gemfirexd.internal.engine.store.ServerGroupUtils;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.sql.Activation;
import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.SchemaDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecIndexRow;
import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController;
import com.pivotal.gemfirexd.internal.iapi.types.SQLVarchar;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.DataDictionaryImpl;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.TabInfoImpl;
import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager;
/**
*
* @author jianxiachen
*
*/
public class DropHDFSStoreConstantAction extends DDLConstantAction {
final String hdfsStoreName;
final boolean onlyIfExists;
DropHDFSStoreConstantAction(String hdfsStoreName, boolean onlyIfExists) {
this.hdfsStoreName = hdfsStoreName;
this.onlyIfExists = onlyIfExists;
}
// Override the getSchemaName/getObjectName to enable
// DDL conflation of CREATE and DROP HDFSSTORE statements.
@Override
public final String getSchemaName() {
// HDFS stores have no schema, so return 'SYS'
return SchemaDescriptor.STD_SYSTEM_SCHEMA_NAME;
}
@Override
public final String getTableName() {
return CreateHDFSStoreConstantAction.REGION_PREFIX_FOR_CONFLATION
+ hdfsStoreName;
}
@Override
public final boolean isDropStatement() {
return true;
}
@Override
public void executeConstantAction(Activation activation)
throws StandardException {
int rowsDeleted = 0;
List<GemFireContainer> containers = Misc.getMemStore().getAllContainers();
for (GemFireContainer container : containers) {
if (container.getRegion() != null && container.isApplicationTable()) {
String regionHDFSStoreName = container.getRegionAttributes()
.getHDFSStoreName();
if (regionHDFSStoreName != null
&& regionHDFSStoreName.equalsIgnoreCase(hdfsStoreName)) {
throw StandardException.newException(
SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, "DROP", "HDFSStore "
+ hdfsStoreName, "table", container.getQualifiedTableName());
}
}
}
// OK, we're good to go - drop the object from the catalog first
// and then drop the gemfire object second
LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
DataDictionaryImpl dd = (DataDictionaryImpl)lcc.getDataDictionary();
TransactionController tc = lcc.getTransactionExecute();
dd.startWriting(lcc);
ExecIndexRow keyRow = dd.getExecutionFactory().getIndexableRow(1);
keyRow.setColumn(1, new SQLVarchar(hdfsStoreName));
TabInfoImpl ti = dd
.getNonCoreTI(DataDictionaryImpl.SYSHDFSSTORES_CATALOG_NUM);
rowsDeleted = ti.deleteRow(tc, keyRow, 0);
// If no row deleted from catalog, it's an error unless IF EXISTS specified
if (rowsDeleted == 0) {
if (onlyIfExists) {
return;
}
else {
// The HDFS store wasn't in the catalog in the first place
// Throw object-not-found exception
throw StandardException.newException(
SQLState.LANG_OBJECT_DOES_NOT_EXIST, "DROP HDFSSTORE", hdfsStoreName);
}
}
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_CONGLOM,
"DropHDFSStore:: removed HDFSStore " + hdfsStoreName+ " from SYS table");
// If this node is not hosting data, nothing to do
if (!ServerGroupUtils.isDataStore()) {
return;
}
HDFSStoreImpl store = (HDFSStoreImpl) Misc.getGemFireCache().findHDFSStore(
hdfsStoreName);
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_CONGLOM,
"DropHDFSStore :: found HDFSStore " + store);
if (store != null) {
Misc.getGemFireCache().removeHDFSStore(store);
}
}
@Override
public String toString() {
return constructToString("DROP HDFSSTORE ", hdfsStoreName);
}
@Override
public boolean isCancellable() {
return false;
};
}
| apache-2.0 |
smgoller/geode | geode-for-redis/src/integrationTest/java/org/apache/geode/redis/internal/commands/executor/string/IncrIntegrationTest.java | 1172 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.commands.executor.string;
import org.junit.ClassRule;
import org.apache.geode.redis.GeodeRedisServerRule;
public class IncrIntegrationTest extends AbstractIncrIntegrationTest {
@ClassRule
public static GeodeRedisServerRule server = new GeodeRedisServerRule();
@Override
public int getPort() {
return server.getPort();
}
}
| apache-2.0 |
shenjie1993/Corp-talk | src/main/java/com/drfish/corptalk/Deliverier.java | 1102 | package com.drfish.corptalk;
import java.util.Scanner;
import com.drfish.corptalk.command.Command_login;
import com.drfish.corptalk.command.Command_speak;
public enum Deliverier {
INSTANCE;
private static Client client;
private static String account;
static{
client = new Client();
client.start();
}
private void send(Object object){
client.sendObject(object);
}
public void startConversation(){
@SuppressWarnings("resource")
Scanner scanner = new Scanner(System.in);
//login before start conversation with others
System.out.print("please log in with your account: ");
account = scanner.nextLine();
Command_login command_login = new Command_login();
command_login.setAccount(account);
send(command_login);
String message;
//send message
while(true){
message = scanner.nextLine();
System.out.print("Me: ");
Command_speak command_speak = new Command_speak();
command_speak.setFromAccount(account);
command_speak.setMessage(message);
send(command_speak);
System.out.println(message);
}
}
}
| apache-2.0 |
android-ia/platform_tools_idea | plugins/github/src/org/jetbrains/plugins/github/ui/GithubBasicLoginDialog.java | 1457 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.github.ui;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.github.GithubAuthData;
import org.jetbrains.plugins.github.GithubSettings;
/**
* @author Aleksey Pivovarov
*/
public class GithubBasicLoginDialog extends GithubLoginDialog {
public GithubBasicLoginDialog(@Nullable Project project) {
super(project);
myGithubLoginPanel.lockAuthType(GithubAuthData.AuthType.BASIC);
}
@Override
protected void saveCredentials(GithubAuthData auth) {
final GithubSettings settings = GithubSettings.getInstance();
if (settings.getAuthType() != GithubAuthData.AuthType.TOKEN) {
settings
.setCredentials(myGithubLoginPanel.getHost(), myGithubLoginPanel.getLogin(), auth, myGithubLoginPanel.isSavePasswordSelected());
}
}
}
| apache-2.0 |
aidendoherty/biobankAccelerometerAnalysis | java/LowpassFilter.java | 4446 | //BSD 2-Clause, (c) 2014: A.Doherty (Oxford), D.Jackson, N.Hammerla (Newcastle)
import java.util.Arrays;
import java.util.List;
// [dgj] Butterworth 4th-order lowpass filter
public class LowpassFilter extends Filter {
// Constructs 4th order Butterworth lowpass filter with cutoff Fc at rate Fs.
public LowpassFilter(double Fc, double Fs, Boolean verbose)
{
if (Fc >= (Fs / 2)) {
System.out.format(
"\nThe specified lowpass filter cutoff (%s) "
+ "is >= Nyquist frequency of the sampling rate (%s), "
+ "therefore the cutoff will be capped at %s\n\n", Fc, Fs, Fs/2
);
Fc = (Fs / 2) * 0.999d;
}
// Calculate normalised cut-off
double W = Math.min( (Fc / (Fs / 2)), 0.999d); // W cannot be > 1
// Create coefficients
BUTTERWORTH4_NUM_COEFFICIENTS = (BUTTERWORTH4_ORDER + 1);
B = new double[BUTTERWORTH4_NUM_COEFFICIENTS];
A = new double[BUTTERWORTH4_NUM_COEFFICIENTS];
// Calculate coefficients
CoefficientsButterworth4LP(W, B, A);
// [debug] Dump coefficients
if (verbose) {
System.out.println("B = " + Arrays.toString(B));
System.out.println("A = " + Arrays.toString(A));
}
// Create final/initial condition tracker
z = new double[BUTTERWORTH4_NUM_COEFFICIENTS];
reset();
}
// Calculate coefficients for a 4th order Butterworth lowpass filter.
// Based on http://www.exstrom.com/journal/sigproc/
// Copyright (C) 2014 Exstrom Laboratories LLC
private void CoefficientsButterworth4LP(double W, double B[], double A[])
{
// (Bit hacky:) treat a negative value as a high-pass
Boolean highpass = false;
if (W < 0) { W = -W; highpass = true; }
int i, j;
// Calculate B coefficients for a Butterworth lowpass/highpass filter.
int prev = BUTTERWORTH4_ORDER;
int tcof[] = new int[BUTTERWORTH4_ORDER + 1];
tcof[0] = 1;
tcof[1] = BUTTERWORTH4_ORDER;
for (i = 2; i <= (BUTTERWORTH4_ORDER / 2); i++)
{
prev = (BUTTERWORTH4_ORDER - i + 1) * prev / i;
tcof[i] = prev;
tcof[BUTTERWORTH4_ORDER - i] = prev;
}
tcof[BUTTERWORTH4_ORDER - 1] = BUTTERWORTH4_ORDER;
tcof[BUTTERWORTH4_ORDER] = 1;
// Calculate the scaling factor for the B coefficients of Butterworth
// lowpass filter (so the filter response has a maximum value of 1).
double fcf = W;
double omega = Math.PI * fcf;
double fomega = Math.sin(omega);
double parg0 = Math.PI / (double)(2 * BUTTERWORTH4_ORDER);
double sf = 1.0;
for (i = 0; i < BUTTERWORTH4_ORDER / 2; ++i)
{
sf *= 1.0 + fomega * Math.sin((double)(2 * i + 1) * parg0);
}
if (highpass) {
fomega = Math.cos(omega / 2.0); // High-pass
if (BUTTERWORTH4_ORDER % 2 != 0) { sf *= fomega + Math.sin(omega / 2.0); } // Odd order high-pass
} else {
fomega = Math.sin(omega / 2.0); // Low-pass
if (BUTTERWORTH4_ORDER % 2 != 0) { sf *= fomega + Math.cos(omega / 2.0); } // Odd order low-pass
}
// Final scaling factor
sf = Math.pow(fomega, BUTTERWORTH4_ORDER) / sf;
// Update the coefficients by applying the scaling factor
for (i = 0; i < BUTTERWORTH4_ORDER; ++i) {
B[i] = sf * tcof[i];
}
B[BUTTERWORTH4_ORDER] = sf * tcof[BUTTERWORTH4_ORDER];
if (highpass) {
for (i = 1; i <= BUTTERWORTH4_ORDER; i += 2) { B[i] = -B[i]; }
}
// Begin to calculate the A coefficients for a high-pass or low-pass Butterworth filter
double theta = Math.PI * W;
// Binomials
double b[] = new double[2 * BUTTERWORTH4_ORDER];
for (i = 0; i < BUTTERWORTH4_ORDER; i++)
{
double parg = Math.PI * (double)(2*i + 1) / (double)(2*BUTTERWORTH4_ORDER);
double a = 1.0 + Math.sin(theta) * Math.sin(parg);
b[2 * i] = -Math.cos(theta) / a;
b[2 * i + 1] = -Math.sin(theta) * Math.cos(parg) / a;
}
// Multiply binomials together and returns the coefficients of the resulting polynomial.
double a[] = new double[2 * BUTTERWORTH4_ORDER];
for (i = 0; i < BUTTERWORTH4_ORDER; i++)
{
for (j = i; j > 0; --j)
{
a[2 * j] += b[2 * i] * a[2 * (j - 1)] - b[2 * i + 1] * a[2 * (j - 1) + 1];
a[2 * j + 1] += b[2 * i] * a[2 * (j - 1) + 1] + b[2 * i + 1] * a[2 * (j - 1)];
}
a[0] += b[2 * i];
a[1] += b[2 * i + 1];
}
// Read out results as A coefficients for high-pass or low-pass filter.
A[1] = a[0];
A[0] = 1.0;
A[2] = a[2];
for (i = 3; i <= BUTTERWORTH4_ORDER; ++i)
{
A[i] = a[2 * i - 2];
}
return;
}
}
| bsd-2-clause |
kevinsdooapp/jaxb2-basics | runtime/src/main/java/org/jvnet/jaxb2_commons/lang/HashCode2.java | 207 | package org.jvnet.jaxb2_commons.lang;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
public interface HashCode2 {
public int hashCode(ObjectLocator locator, HashCodeStrategy2 hashCodeStrategy);
}
| bsd-2-clause |
uonafya/jphes-core | dhis-2/dhis-support/dhis-support-hibernate/src/main/java/org/hisp/dhis/datasource/CircularRoutingDataSource.java | 3237 | package org.hisp.dhis.datasource;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.sql.DataSource;
import org.springframework.jdbc.datasource.AbstractDataSource;
import com.google.common.collect.Iterators;
/**
* Data source implementation which routes to the configured target data sources
* in a circular fashion.
*
* @author Lars Helge Overland
*/
public class CircularRoutingDataSource
extends AbstractDataSource
{
private Iterator<DataSource> dataSourceIterator;
public CircularRoutingDataSource()
{
}
public CircularRoutingDataSource( List<DataSource> targetDataSources )
{
this.dataSourceIterator = Iterators.cycle( Collections.synchronizedList( targetDataSources ) );
}
// -------------------------------------------------------------------------
// AbstractDataSource implementation
// -------------------------------------------------------------------------
@Override
public Connection getConnection()
throws SQLException
{
return getDataSource().getConnection();
}
@Override
public Connection getConnection( String username, String password )
throws SQLException
{
return getDataSource().getConnection( username, password );
}
// -------------------------------------------------------------------------
// Private methods
// -------------------------------------------------------------------------
private synchronized DataSource getDataSource()
{
return dataSourceIterator.next();
}
}
| bsd-3-clause |
ExLibrisGroup/Rosetta.dps-sdk-projects | 5.0/dps-sdk-plugins/src/com/exlibris/rosetta/repository/plugin/mdExtractor/jpeg2000/JPEG2000HULMDExtractorPlugin.java | 16511 | package com.exlibris.rosetta.repository.plugin.mdExtractor.jpeg2000;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.exlibris.core.infra.common.exceptions.logging.ExLogger;
import com.exlibris.rosetta.repository.plugin.mdExtractor.base.AbstractJhoveMDExtractorPlugin;
public class JPEG2000HULMDExtractorPlugin extends AbstractJhoveMDExtractorPlugin{
private static ExLogger log = ExLogger.getExLogger(JPEG2000HULMDExtractorPlugin.class);
private static String jhoveModule = "JPEG2000-hul";
private static String multiPagePropertyToCheck = "Codestreams";
private static String multiPagePropertyToCount = "Codestream";
private static final String PLUGIN_VERSION_INIT_PARAM = "PLUGIN_VERSION_INIT_PARAM";
private String pluginVersion = null;
private static List<String> attList = new ArrayList<String>();
static {
attList.add("JPEG2000Metadata.Brand");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.CodeBlockHeight");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.CodeBlockStyle");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.CodeBlockWidth");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.CodingStyle");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.MultipleComponentTransformation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.NumberDecompositionLevels");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.NumberOfLayers");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.ProgressionOrder");
attList.add("JPEG2000Metadata.Codestreams.Codestream.CodingStyleDefault.Transformation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.Comments.Comment[0]");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.Capabilities");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.CSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.SSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.XOSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.XRSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.XSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.XTOSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.XTSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.YOSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.YRSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.YSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.YTOSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.ImageAndTileSize.YTSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.AutoFocus");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.BackLight");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.BitsPerSample");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Brightness");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ByteOrder");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ChecksumMethod");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ChecksumValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Class");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColormapBitCodeValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColormapBlueValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColormapGreenValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColormapRedValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColormapReference");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColorSpace");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ColorTemp");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.CompressionLevel");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.CompressionScheme");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DateTimeCreated");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DateTimeProcessed");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DeviceSource");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DigitalCameraManufacturer");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DigitalCameraModel");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.DisplayOrientation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ExposureBias");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ExposureIndex");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ExposureTime");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ExtraSamples");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.FileSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Flash");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.FlashEnergy");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.FlashReturn");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.FNumber");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.FocalLength");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.GrayResponseCurve");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.GrayResponseUnit");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.HostComputer");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageData");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageIdentifier");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageIdentifierLocation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageLength");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageProducer");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ImageWidth");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.MeteringMode");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Methodology");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.MimeType");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Orientation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.OS");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.OSVersion");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PerformanceData");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PixelSize");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PlanarConfiguration");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PreferredPresentation");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesBlueX");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesBlueY");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesGreenX");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesGreenY");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesRedX");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.PrimaryChromaticitiesRedY");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProcessingActions");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProcessingAgency");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProcessingSoftwareName");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProcessingSoftwareVersion");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProfileName");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Profiles");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ProfileURL");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ReferenceBlackWhite");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.RowsPerStrip");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SamplesPerPixel");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SamplingFrequencyPlane");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SamplingFrequencyUnit");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScannerManufacturer");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScannerModelName");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScannerModelNumber");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScannerModelSerialNo");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScanningSoftware");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ScanningSoftwareVersionNo");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SceneIlluminant");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SegmentType");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.Sensor");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceData");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceID");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceType");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceXDimension");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceXDimensionUnit");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceYDimension");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SourceYDimensionUnit");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.StripByteCounts");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.StripOffsets");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.SubjectDistance");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TargetIDManufacturer");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TargetIDMedia");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TargetIDName");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TargetIDNo");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TargetType");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TileByteCounts");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TileLength");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TileOffsets");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.TileWidth");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.ViewerData");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.WhitePointXValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.WhitePointYValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.XPhysScanResolution");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.XPrintAspectRatio");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.XSamplingFrequency");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.XTargetedDisplayAR");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YCbCrCoefficients");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YCbCrPositioning");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YCbCrSubSampling");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YPhysScanResolution");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YPrintAspectRatio");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YSamplingFrequency");
attList.add("JPEG2000Metadata.Codestreams.Codestream.NisoImageMetadata.YTargetedDisplayAR");
attList.add("JPEG2000Metadata.Codestreams.Codestream.QuantizationDefault.QuantizationStyle");
attList.add("JPEG2000Metadata.Codestreams.Codestream.QuantizationDefault.StepValue");
attList.add("JPEG2000Metadata.Codestreams.Codestream.Tiles.Tile.TilePart.Index");
attList.add("JPEG2000Metadata.Codestreams.Codestream.Tiles.Tile.TilePart.Length");
attList.add("JPEG2000Metadata.ColorspaceUnknown");
attList.add("JPEG2000Metadata.ColorSpecs.ColorSpec.Approx");
attList.add("JPEG2000Metadata.ColorSpecs.ColorSpec.EnumCS");
attList.add("JPEG2000Metadata.ColorSpecs.ColorSpec.Method");
attList.add("JPEG2000Metadata.ColorSpecs.ColorSpec.Precedence");
attList.add("JPEG2000Metadata.ColorSpecs.ColorSpec.RestrictedICCProfile");
attList.add("JPEG2000Metadata.Compatibility");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.HorizResolution.Denominator");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.HorizResolution.Exponent");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.HorizResolution.Numerator");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.VertResolution.Denominator");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.VertResolution.Exponent");
attList.add("JPEG2000Metadata.DefaultDisplayResolution.VertResolution.Numerator");
attList.add("JPEG2000Metadata.MinorVersion");
attList.add("JPEG2000Metadata.XML");
}
@Override
public void extract(String fileName) throws Exception {
super.extract(fileName,jhoveModule, multiPagePropertyToCheck, multiPagePropertyToCount,pluginVersion);
}
@Override
public List<String> getSupportedAttributeNames() {
return attList;
}
public String getAgentName()
{
return "JHOVE , JPEG2000-hul " + getRelease();
}
public String getAgent()
{
return getAgentName() + " , Plugin Version " + pluginVersion;
}
public void initParams(Map<String, String> initParams) {
this.pluginVersion = initParams.get(PLUGIN_VERSION_INIT_PARAM);
}
}
| bsd-3-clause |
hzhao/lemur-galago | tupleflow/src/main/java/org/lemurproject/galago/tupleflow/execution/NetworkedCounter.java | 1686 | package org.lemurproject.galago.tupleflow.execution;
import org.lemurproject.galago.utility.debug.Counter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
public class NetworkedCounter implements Counter {
long count = 0;
long lastFlushCount = Long.MIN_VALUE;
String counterName;
String stageName;
String instance;
String url;
public NetworkedCounter(String counterName, String stageName, String instance, String url) {
super();
this.counterName = counterName;
this.stageName = stageName;
this.instance = instance;
this.url = url;
}
public void increment() {
incrementBy(1);
}
public void incrementBy(int value) {
count += value;
}
public void flush() {
// No need to send updates for counters that aren't changing.
if (lastFlushCount == count) {
return;
}
try {
String fullUrl = String.format("%s/setcounter?counterName=%s&stageName=%s&create=%s&value=%d",
url, URLEncoder.encode(counterName, "UTF-8"),
URLEncoder.encode(stageName, "UTF-8"),
URLEncoder.encode(instance, "UTF-8"), count);
connectUrl(fullUrl);
lastFlushCount = count;
} catch (Exception e) {
}
}
public void connectUrl(String url) throws MalformedURLException, IOException {
URLConnection connection = new URL(url).openConnection();
// limit the connection attempt to 1 sec -- just in case.
connection.setConnectTimeout(1000); // 1 s
connection.connect();
connection.getInputStream().close();
connection.getOutputStream().close();
}
}
| bsd-3-clause |
garrettjonesgoogle/api-client-staging | generated/java/proto-google-appengine-v1/src/main/java/com/google/appengine/v1/LocationMetadata.java | 19393 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/appengine/v1/location.proto
package com.google.appengine.v1;
/**
* <pre>
* Metadata for the given [google.cloud.location.Location][google.cloud.location.Location].
* </pre>
*
* Protobuf type {@code google.appengine.v1.LocationMetadata}
*/
public final class LocationMetadata extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.appengine.v1.LocationMetadata)
LocationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use LocationMetadata.newBuilder() to construct.
private LocationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LocationMetadata() {
standardEnvironmentAvailable_ = false;
flexibleEnvironmentAvailable_ = false;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LocationMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 16: {
standardEnvironmentAvailable_ = input.readBool();
break;
}
case 32: {
flexibleEnvironmentAvailable_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.appengine.v1.LocationProto.internal_static_google_appengine_v1_LocationMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.LocationProto.internal_static_google_appengine_v1_LocationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.LocationMetadata.class, com.google.appengine.v1.LocationMetadata.Builder.class);
}
public static final int STANDARD_ENVIRONMENT_AVAILABLE_FIELD_NUMBER = 2;
private boolean standardEnvironmentAvailable_;
/**
* <pre>
* App Engine Standard Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool standard_environment_available = 2;</code>
*/
public boolean getStandardEnvironmentAvailable() {
return standardEnvironmentAvailable_;
}
public static final int FLEXIBLE_ENVIRONMENT_AVAILABLE_FIELD_NUMBER = 4;
private boolean flexibleEnvironmentAvailable_;
/**
* <pre>
* App Engine Flexible Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool flexible_environment_available = 4;</code>
*/
public boolean getFlexibleEnvironmentAvailable() {
return flexibleEnvironmentAvailable_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (standardEnvironmentAvailable_ != false) {
output.writeBool(2, standardEnvironmentAvailable_);
}
if (flexibleEnvironmentAvailable_ != false) {
output.writeBool(4, flexibleEnvironmentAvailable_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (standardEnvironmentAvailable_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, standardEnvironmentAvailable_);
}
if (flexibleEnvironmentAvailable_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, flexibleEnvironmentAvailable_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.appengine.v1.LocationMetadata)) {
return super.equals(obj);
}
com.google.appengine.v1.LocationMetadata other = (com.google.appengine.v1.LocationMetadata) obj;
boolean result = true;
result = result && (getStandardEnvironmentAvailable()
== other.getStandardEnvironmentAvailable());
result = result && (getFlexibleEnvironmentAvailable()
== other.getFlexibleEnvironmentAvailable());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STANDARD_ENVIRONMENT_AVAILABLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getStandardEnvironmentAvailable());
hash = (37 * hash) + FLEXIBLE_ENVIRONMENT_AVAILABLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getFlexibleEnvironmentAvailable());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.LocationMetadata parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.appengine.v1.LocationMetadata parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.LocationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.appengine.v1.LocationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Metadata for the given [google.cloud.location.Location][google.cloud.location.Location].
* </pre>
*
* Protobuf type {@code google.appengine.v1.LocationMetadata}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.appengine.v1.LocationMetadata)
com.google.appengine.v1.LocationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.appengine.v1.LocationProto.internal_static_google_appengine_v1_LocationMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.LocationProto.internal_static_google_appengine_v1_LocationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.LocationMetadata.class, com.google.appengine.v1.LocationMetadata.Builder.class);
}
// Construct using com.google.appengine.v1.LocationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
standardEnvironmentAvailable_ = false;
flexibleEnvironmentAvailable_ = false;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.appengine.v1.LocationProto.internal_static_google_appengine_v1_LocationMetadata_descriptor;
}
public com.google.appengine.v1.LocationMetadata getDefaultInstanceForType() {
return com.google.appengine.v1.LocationMetadata.getDefaultInstance();
}
public com.google.appengine.v1.LocationMetadata build() {
com.google.appengine.v1.LocationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.appengine.v1.LocationMetadata buildPartial() {
com.google.appengine.v1.LocationMetadata result = new com.google.appengine.v1.LocationMetadata(this);
result.standardEnvironmentAvailable_ = standardEnvironmentAvailable_;
result.flexibleEnvironmentAvailable_ = flexibleEnvironmentAvailable_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.appengine.v1.LocationMetadata) {
return mergeFrom((com.google.appengine.v1.LocationMetadata)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.appengine.v1.LocationMetadata other) {
if (other == com.google.appengine.v1.LocationMetadata.getDefaultInstance()) return this;
if (other.getStandardEnvironmentAvailable() != false) {
setStandardEnvironmentAvailable(other.getStandardEnvironmentAvailable());
}
if (other.getFlexibleEnvironmentAvailable() != false) {
setFlexibleEnvironmentAvailable(other.getFlexibleEnvironmentAvailable());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.appengine.v1.LocationMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.appengine.v1.LocationMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private boolean standardEnvironmentAvailable_ ;
/**
* <pre>
* App Engine Standard Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool standard_environment_available = 2;</code>
*/
public boolean getStandardEnvironmentAvailable() {
return standardEnvironmentAvailable_;
}
/**
* <pre>
* App Engine Standard Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool standard_environment_available = 2;</code>
*/
public Builder setStandardEnvironmentAvailable(boolean value) {
standardEnvironmentAvailable_ = value;
onChanged();
return this;
}
/**
* <pre>
* App Engine Standard Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool standard_environment_available = 2;</code>
*/
public Builder clearStandardEnvironmentAvailable() {
standardEnvironmentAvailable_ = false;
onChanged();
return this;
}
private boolean flexibleEnvironmentAvailable_ ;
/**
* <pre>
* App Engine Flexible Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool flexible_environment_available = 4;</code>
*/
public boolean getFlexibleEnvironmentAvailable() {
return flexibleEnvironmentAvailable_;
}
/**
* <pre>
* App Engine Flexible Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool flexible_environment_available = 4;</code>
*/
public Builder setFlexibleEnvironmentAvailable(boolean value) {
flexibleEnvironmentAvailable_ = value;
onChanged();
return this;
}
/**
* <pre>
* App Engine Flexible Environment is available in the given location.
* @OutputOnly
* </pre>
*
* <code>bool flexible_environment_available = 4;</code>
*/
public Builder clearFlexibleEnvironmentAvailable() {
flexibleEnvironmentAvailable_ = false;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.appengine.v1.LocationMetadata)
}
// @@protoc_insertion_point(class_scope:google.appengine.v1.LocationMetadata)
private static final com.google.appengine.v1.LocationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.appengine.v1.LocationMetadata();
}
public static com.google.appengine.v1.LocationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<LocationMetadata>
PARSER = new com.google.protobuf.AbstractParser<LocationMetadata>() {
public LocationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LocationMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<LocationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<LocationMetadata> getParserForType() {
return PARSER;
}
public com.google.appengine.v1.LocationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| bsd-3-clause |
dhis2/dhis2-core | dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/events/importer/shared/validation/EventBaseCheck.java | 5567 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.events.importer.shared.validation;
import static org.hisp.dhis.dxf2.importsummary.ImportStatus.ERROR;
import static org.hisp.dhis.program.ProgramStatus.COMPLETED;
import static org.hisp.dhis.security.Authorities.F_EDIT_EXPIRED;
import static org.hisp.dhis.util.DateUtils.dateIsValid;
import static org.hisp.dhis.util.DateUtils.parseDate;
import static org.hisp.dhis.util.DateUtils.removeTimeStamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.events.importer.Checker;
import org.hisp.dhis.dxf2.events.importer.context.WorkContext;
import org.hisp.dhis.dxf2.events.importer.shared.ImmutableEvent;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.hisp.dhis.program.ProgramInstance;
import org.springframework.stereotype.Component;
/**
* @author Luciano Fiandesio
*/
@Component
public class EventBaseCheck implements Checker
{
@Override
public ImportSummary check( ImmutableEvent event, WorkContext ctx )
{
ImportSummary importSummary = new ImportSummary();
List<String> errors = validate( event, ctx );
if ( !errors.isEmpty() )
{
importSummary.setStatus( ERROR );
importSummary.setReference( event.getEvent() );
errors.forEach( error -> importSummary.addConflict( "Event", error ) );
importSummary.incrementIgnored();
}
return importSummary;
}
private List<String> validate( ImmutableEvent event, WorkContext ctx )
{
List<String> errors = new ArrayList<>();
validateDates( event, errors );
validateProgramInstance( event, ctx, errors );
return errors;
}
private void validateDates( ImmutableEvent event, List<String> errors )
{
if ( event.getDueDate() != null && !dateIsValid( event.getDueDate() ) )
{
errors.add( "Invalid event due date: " + event.getDueDate() );
}
if ( event.getEventDate() != null && !dateIsValid( event.getEventDate() ) )
{
errors.add( "Invalid event date: " + event.getEventDate() );
}
if ( event.getCreatedAtClient() != null && !dateIsValid( event.getCreatedAtClient() ) )
{
errors.add( "Invalid event created at client date: " + event.getCreatedAtClient() );
}
if ( event.getLastUpdatedAtClient() != null && !dateIsValid( event.getLastUpdatedAtClient() ) )
{
errors.add( "Invalid event last updated at client date: " + event.getLastUpdatedAtClient() );
}
}
private void validateProgramInstance( ImmutableEvent event, WorkContext ctx, List<String> errors )
{
ProgramInstance programInstance = ctx.getProgramInstanceMap().get( event.getUid() );
ImportOptions importOptions = ctx.getImportOptions();
if ( programInstance == null )
{
errors.add( "No program instance found for event: " + event.getEvent() );
}
else if ( COMPLETED.equals( programInstance.getStatus() ) )
{
if ( importOptions == null || importOptions.getUser() == null
|| importOptions.getUser().isAuthorized( F_EDIT_EXPIRED.getAuthority() ) )
{
return;
}
Date referenceDate = parseDate( event.getCreated() );
if ( referenceDate == null )
{
referenceDate = new Date();
}
referenceDate = removeTimeStamp( referenceDate );
if ( referenceDate.after( removeTimeStamp( programInstance.getEndDate() ) ) )
{
errors.add( "Not possible to add event to a completed enrollment. Event created date ( " + referenceDate
+ " ) is after enrollment completed date ( " + removeTimeStamp( programInstance.getEndDate() )
+ " )." );
}
}
}
}
| bsd-3-clause |
ylfonline/ormlite-core | src/main/java/com/j256/ormlite/field/types/BaseDataType.java | 4601 | package com.j256.ormlite.field.types;
import java.lang.reflect.Field;
import java.sql.SQLException;
import com.j256.ormlite.field.BaseFieldConverter;
import com.j256.ormlite.field.DataPersister;
import com.j256.ormlite.field.FieldType;
import com.j256.ormlite.field.SqlType;
import com.j256.ormlite.support.DatabaseResults;
/**
* Base data type that defines the default persistance methods for the various data types.
*
* <p>
* Here's a good page about the <a href="http://docs.codehaus.org/display/CASTOR/Type+Mapping" >mapping for a number of
* database types</a>:
* </p>
*
* <p>
* <b>NOTE:</b> If you are creating your own custom database persister, you probably will need to override the
* {@link BaseFieldConverter#sqlArgToJava(FieldType, Object, int)} method as well which converts from a SQL data to
* java.
* </p>
*
* @author graywatson
*/
public abstract class BaseDataType extends BaseFieldConverter implements DataPersister {
private final static Class<?>[] NO_CLASSES = new Class<?>[0];
/**
* Type of the data as it is persisted in SQL-land. For example, if you are storing a DateTime, you might consider
* this to be a {@link SqlType#LONG} if you are storing it as epoche milliseconds.
*/
private final SqlType sqlType;
private final Class<?>[] classes;
/**
* @param sqlType
* Type of the class as it is persisted in the databases.
* @param classes
* Associated classes for this type. These should be specified if you want this type to be always used
* for these Java classes. If this is a custom persister then this array should be empty.
*/
public BaseDataType(SqlType sqlType, Class<?>[] classes) {
this.sqlType = sqlType;
this.classes = classes;
}
/**
* @param sqlType
* Type of the class as it is persisted in the databases.
*/
public BaseDataType(SqlType sqlType) {
this.sqlType = sqlType;
this.classes = NO_CLASSES;
}
public abstract Object parseDefaultString(FieldType fieldType, String defaultStr) throws SQLException;
public abstract Object resultToSqlArg(FieldType fieldType, DatabaseResults results, int columnPos)
throws SQLException;
public boolean isValidForField(Field field) {
if (classes.length == 0) {
// we can't figure out the types so we just say it is valid
return true;
}
for (Class<?> clazz : classes) {
if (clazz.isAssignableFrom(field.getType())) {
return true;
}
}
// if classes are specified and one of them should match
return false;
}
public Class<?> getPrimaryClass() {
if (classes.length == 0) {
return null;
} else {
return classes[0];
}
}
/**
* @throws SQLException
* If there are problems creating the config object. Needed for subclasses.
*/
public Object makeConfigObject(FieldType fieldType) throws SQLException {
return null;
}
public SqlType getSqlType() {
return sqlType;
}
public Class<?>[] getAssociatedClasses() {
return classes;
}
public String[] getAssociatedClassNames() {
return null;
}
public Object convertIdNumber(Number number) {
// by default the type cannot convert an id number
return null;
}
public boolean isValidGeneratedType() {
return false;
}
public boolean isEscapedDefaultValue() {
// default is to not escape the type if it is a number
return isEscapedValue();
}
public boolean isEscapedValue() {
return true;
}
public boolean isPrimitive() {
return false;
}
public boolean isComparable() {
return true;
}
public boolean isAppropriateId() {
return true;
}
public boolean isArgumentHolderRequired() {
return false;
}
public boolean isSelfGeneratedId() {
return false;
}
public Object generateId() {
throw new IllegalStateException("Should not have tried to generate this type");
}
public int getDefaultWidth() {
return 0;
}
public boolean dataIsEqual(Object fieldObj1, Object fieldObj2) {
if (fieldObj1 == null) {
return (fieldObj2 == null);
} else if (fieldObj2 == null) {
return false;
} else {
return fieldObj1.equals(fieldObj2);
}
}
public boolean isValidForVersion() {
return false;
}
/**
* Move the current-value to the next value. Used for the version field.
*
* @throws SQLException
* For sub-classes.
*/
public Object moveToNextValue(Object currentValue) throws SQLException {
return null;
}
public Object resultStringToJava(FieldType fieldType, String stringValue, int columnPos) throws SQLException {
return sqlArgToJava(fieldType, parseDefaultString(fieldType, stringValue), columnPos);
}
}
| isc |
ledrui/UCSDUnfoldingMap-Java-Project | processing-3.0b7/modes/java/src/processing/mode/java/pdex/ErrorMessageSimplifier.java | 10247 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2012-15 The Processing Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package processing.mode.java.pdex;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.TreeMap;
import org.eclipse.jdt.core.compiler.IProblem;
import org.eclipse.jdt.internal.compiler.problem.DefaultProblem;
import processing.app.Language;
import processing.core.PApplet;
import processing.data.StringList;
public class ErrorMessageSimplifier {
/**
* Mapping between ProblemID constant and the constant name. Holds about 650
* of them. Also, this is just temporary, will be used to find the common
* error types, cos you know, identifying String names is easier than
* identifying 8 digit int constants!
* TODO: this is temporary
*/
private static TreeMap<Integer, String> constantsMap;
public ErrorMessageSimplifier() {
new Thread() {
public void run() {
prepareConstantsList();
}
}.start();
}
private static void prepareConstantsList() {
constantsMap = new TreeMap<Integer, String>();
Class<DefaultProblem> probClass = DefaultProblem.class;
Field f[] = probClass.getFields();
for (Field field : f) {
if (Modifier.isStatic(field.getModifiers()))
try {
//System.out.println(field.getName() + " :" + field.get(null));
Object val = field.get(null);
if (val instanceof Integer) {
constantsMap.put((Integer) (val), field.getName());
}
} catch (Exception e) {
e.printStackTrace();
break;
}
}
//System.out.println("Total items: " + constantsMap.size());
}
public static String getIDName(int id) {
if (constantsMap == null){
prepareConstantsList();
}
return constantsMap.get(id);
}
/**
* Tones down the jargon in the ecj reported errors.
*/
public static String getSimplifiedErrorMessage(Problem problem) {
if (problem == null) return null;
IProblem iprob = problem.getIProblem();
String args[] = iprob.getArguments();
// Base.log("Simplifying message: " + problem.getMessage() + " ID: "
// + getIDName(iprob.getID()));
// Base.log("Arg count: " + args.length);
// for (int i = 0; i < args.length; i++) {
// Base.log("Arg " + args[i]);
// }
String result = null;
switch (iprob.getID()) {
case IProblem.ParsingError:
if (args.length > 0) {
result = Language.interpolate("editor.status.error_on", args[0]);
}
break;
case IProblem.ParsingErrorDeleteToken:
if (args.length > 0) {
result = Language.interpolate("editor.status.error_on", args[0]);
}
break;
case IProblem.ParsingErrorInsertToComplete:
if (args.length > 0) {
if (args[0].length() == 1) {
result = getErrorMessageForBracket(args[0].charAt(0));
} else {
if (args[0].equals("AssignmentOperator Expression")) {
result = Language.interpolate("editor.status.missing.add", "=");
} else if (args[0].equalsIgnoreCase(") Statement")) {
result = getErrorMessageForBracket(args[0].charAt(0));
} else {
result = Language.interpolate("editor.status.error_on", args[0]);
}
}
}
break;
case IProblem.ParsingErrorInvalidToken:
if (args.length > 0) {
if (args[1].equals("VariableDeclaratorId")) {
if (args[0].equals("int")) {
result = Language.text ("editor.status.reserved_words");
} else {
result = Language.interpolate("editor.status.error_on", args[0]);
}
} else {
result = Language.interpolate("editor.status.error_on", args[0]);
}
}
break;
case IProblem.ParsingErrorInsertTokenAfter:
if (args.length > 0) {
if (args[1].length() == 1) {
result = getErrorMessageForBracket(args[1].charAt(0));
}
else {
// https://github.com/processing/processing/issues/3104
if (args[1].equalsIgnoreCase("Statement")) {
result = Language.interpolate("editor.status.error_on", args[0]);
} else {
result =
Language.interpolate("editor.status.error_on", args[0]) + " " +
Language.interpolate("editor.status.missing.add", args[1]);
}
}
}
break;
case IProblem.UndefinedConstructor:
if (args.length == 2) {
String constructorName = args[0];
// For messages such as "contructor sketch_name.ClassXYZ() is undefined", change
// constructor name to "ClassXYZ()". See #3434
if (constructorName.contains(".")) {
// arg[0] contains sketch name twice: sketch_150705a.sketch_150705a.Thing
constructorName = constructorName.substring(constructorName.indexOf('.') + 1);
constructorName = constructorName.substring(constructorName.indexOf('.') + 1);
}
String constructorArgs = removePackagePrefixes(args[args.length - 1]);
result = Language.interpolate("editor.status.undefined_constructor", constructorName, constructorArgs);
}
break;
case IProblem.UndefinedMethod:
if (args.length > 2) {
String methodName = args[args.length - 2];
String methodArgs = removePackagePrefixes(args[args.length - 1]);
result = Language.interpolate("editor.status.undefined_method", methodName, methodArgs);
}
break;
case IProblem.ParameterMismatch:
if (args.length > 3) {
// 2nd arg is method name, 3rd arg is correct param list
if (args[2].trim().length() == 0) {
// the case where no params are needed.
result = Language.interpolate("editor.status.empty_param", args[1]);
} else {
result = Language.interpolate("editor.status.wrong_param",
args[1], args[1], removePackagePrefixes(args[2]));
// String method = q(args[1]);
// String methodDef = " \"" + args[1] + "(" + getSimpleName(args[2]) + ")\"";
// result = result.replace("method", method);
// result += methodDef;
}
}
break;
case IProblem.UndefinedField:
if (args.length > 0) {
result = Language.interpolate("editor.status.undef_global_var", args[0]);
}
break;
case IProblem.UndefinedType:
if (args.length > 0) {
result = Language.interpolate("editor.status.undef_class", args[0]);
}
break;
case IProblem.UnresolvedVariable:
if (args.length > 0) {
result = Language.interpolate("editor.status.undef_var", args[0]);
}
break;
case IProblem.UndefinedName:
if (args.length > 0) {
result = Language.interpolate("editor.status.undef_name", args[0]);
}
break;
case IProblem.TypeMismatch:
if (args.length > 1) {
result = Language.interpolate("editor.status.type_mismatch", args[0], args[1]);
// result = result.replace("typeA", q(args[0]));
// result = result.replace("typeB", q(args[1]));
}
break;
case IProblem.LocalVariableIsNeverUsed:
if (args.length > 0) {
result = Language.interpolate("editor.status.unused_variable", args[0]);
}
break;
case IProblem.UninitializedLocalVariable:
if (args.length > 0) {
result = Language.interpolate("editor.status.uninitialized_variable", args[0]);
}
break;
case IProblem.AssignmentHasNoEffect:
if (args.length > 0) {
result = Language.interpolate("editor.status.no_effect_assignment", args[0]);
}
break;
}
//log("Simplified Error Msg: " + result);
return (result == null) ? problem.getMessage() : result;
}
/**
* Converts java.lang.String into String, etc
*/
static private String removePackagePrefixes(String input) {
if (!input.contains(".")) {
return input;
}
String[] names = PApplet.split(input, ',');
// List<String> names = new ArrayList<String>();
// if (inp.indexOf(',') >= 0) {
// names.addAll(Arrays.asList(inp.split(",")));
// } else {
// names.add(inp);
// }
StringList result = new StringList();
for (String name : names) {
int dot = name.lastIndexOf('.');
if (dot >= 0) {
name = name.substring(dot + 1, name.length());
}
result.append(name);
}
return result.join(", ");
}
static private String getErrorMessageForBracket(char c) {
switch (c) {
case ';': return Language.text("editor.status.missing.semicolon");
case '[': return Language.text("editor.status.missing.left_sq_bracket");
case ']': return Language.text("editor.status.missing.right_sq_bracket");
case '(': return Language.text("editor.status.missing.left_paren");
case ')': return Language.text("editor.status.missing.right_paren");
case '{': return Language.text("editor.status.missing.left_curly_bracket");
case '}': return Language.text("editor.status.missing.right_curly_bracket");
}
// This seems to be unreachable and wasn't in PDE.properties.
// I've added it for 3.0a8, but that seems gross. [fry]
return Language.interpolate("editor.status.missing.default", c);
}
// static private final String q(Object quotable) {
// return "\"" + quotable + "\"";
// }
// static private final String qs(Object quotable) {
// return " " + q(quotable);
// }
}
| mit |
stachon/XChange | xchange-bitfinex/src/main/java/org/knowm/xchange/bitfinex/service/BitfinexBaseService.java | 1921 | package org.knowm.xchange.bitfinex.service;
import org.knowm.xchange.bitfinex.BitfinexExchange;
import org.knowm.xchange.bitfinex.v1.BitfinexAuthenticated;
import org.knowm.xchange.bitfinex.v1.BitfinexDigest;
import org.knowm.xchange.bitfinex.v2.BitfinexHmacSignature;
import org.knowm.xchange.client.ExchangeRestProxyBuilder;
import org.knowm.xchange.client.ResilienceRegistries;
import org.knowm.xchange.service.BaseResilientExchangeService;
import org.knowm.xchange.service.BaseService;
import si.mazi.rescu.ParamsDigest;
public class BitfinexBaseService extends BaseResilientExchangeService<BitfinexExchange>
implements BaseService {
protected final String apiKey;
protected final BitfinexAuthenticated bitfinex;
protected final ParamsDigest signatureCreator;
protected final ParamsDigest payloadCreator;
protected final org.knowm.xchange.bitfinex.v2.BitfinexAuthenticated bitfinexV2;
protected final BitfinexHmacSignature signatureV2;
/**
* Constructor
*
* @param exchange
*/
public BitfinexBaseService(BitfinexExchange exchange, ResilienceRegistries resilienceRegistries) {
super(exchange, resilienceRegistries);
this.bitfinex =
ExchangeRestProxyBuilder.forInterface(
BitfinexAuthenticated.class, exchange.getExchangeSpecification())
.build();
this.apiKey = exchange.getExchangeSpecification().getApiKey();
this.signatureCreator =
BitfinexDigest.createInstance(exchange.getExchangeSpecification().getSecretKey());
this.payloadCreator = new BitfinexPayloadDigest();
this.bitfinexV2 =
ExchangeRestProxyBuilder.forInterface(
org.knowm.xchange.bitfinex.v2.BitfinexAuthenticated.class,
exchange.getExchangeSpecification())
.build();
this.signatureV2 =
BitfinexHmacSignature.createInstance(exchange.getExchangeSpecification().getSecretKey());
}
}
| mit |
selvasingh/azure-sdk-for-java | sdk/cognitiveservices/ms-azure-cs-contentmoderator/src/main/java/com/microsoft/azure/cognitiveservices/vision/contentmoderator/models/Phone.java | 2006 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.vision.contentmoderator.models;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Phone Property details.
*/
public class Phone {
/**
* CountryCode of the detected Phone number.
*/
@JsonProperty(value = "CountryCode")
private String countryCode;
/**
* Detected Phone number.
*/
@JsonProperty(value = "Text")
private String text;
/**
* Index(Location) of the Phone number in the input text content.
*/
@JsonProperty(value = "Index")
private Integer index;
/**
* Get the countryCode value.
*
* @return the countryCode value
*/
public String countryCode() {
return this.countryCode;
}
/**
* Set the countryCode value.
*
* @param countryCode the countryCode value to set
* @return the Phone object itself.
*/
public Phone withCountryCode(String countryCode) {
this.countryCode = countryCode;
return this;
}
/**
* Get the text value.
*
* @return the text value
*/
public String text() {
return this.text;
}
/**
* Set the text value.
*
* @param text the text value to set
* @return the Phone object itself.
*/
public Phone withText(String text) {
this.text = text;
return this;
}
/**
* Get the index value.
*
* @return the index value
*/
public Integer index() {
return this.index;
}
/**
* Set the index value.
*
* @param index the index value to set
* @return the Phone object itself.
*/
public Phone withIndex(Integer index) {
this.index = index;
return this;
}
}
| mit |
aspose-cells/Aspose.Cells-for-Cloud | SDKs/Aspose.Cells-Cloud-SDK-for-Java/src/main/java/com/aspose/cells/model/CellsResponse.java | 1220 | package com.aspose.cells.model;
public class CellsResponse {
private Cells Cells = null;
private String Code = null;
private String Status = null;
/**
* getCells
* Gets Cells
* @return Cells
*/
public Cells getCells() {
return Cells;
}
/**
* setCells
* Sets Cells
* @param Cells Cells
*/
public void setCells(Cells Cells) {
this.Cells = Cells;
}
/**
* getCode
* Gets String
* @return Code
*/
public String getCode() {
return Code;
}
/**
* setCode
* Sets String
* @param Code String
*/
public void setCode(String Code) {
this.Code = Code;
}
/**
* getStatus
* Gets String
* @return Status
*/
public String getStatus() {
return Status;
}
/**
* setStatus
* Sets String
* @param Status String
*/
public void setStatus(String Status) {
this.Status = Status;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CellsResponse {\n");
sb.append(" Cells: ").append(Cells).append("\n");
sb.append(" Code: ").append(Code).append("\n");
sb.append(" Status: ").append(Status).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| mit |
TGITS/programming-workouts | exercism/java/rational-numbers/src/main/java/Rational.java | 1176 | class Rational {
Rational(int numerator, int denominator) {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
int getNumerator() {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
int getDenominator() {
throw new UnsupportedOperationException("Delete this statement and write your own implementation.");
}
@Override
public String toString() {
return String.format("%d/%d", this.getNumerator(), this.getDenominator());
}
@Override
public boolean equals(Object obj) {
if (obj == null || !this.getClass().isAssignableFrom(obj.getClass())) {
return false;
}
Rational other = (Rational) obj;
return this.getNumerator() == other.getNumerator()
&& this.getDenominator() == other.getDenominator();
}
@Override
public int hashCode() {
int prime = 31;
int result = 1;
result = prime * result + this.getNumerator();
result = prime * result + this.getDenominator();
return result;
}
}
| mit |
phil-lopreiato/the-blue-alliance-android | android/src/main/java/com/thebluealliance/androidclient/datafeed/refresh/Refreshable.java | 457 | package com.thebluealliance.androidclient.datafeed.refresh;
import com.thebluealliance.androidclient.datafeed.refresh.RefreshController.RefreshType;
/**
* An interface for an object that can be considered refreshable (it can be signaled to start
* refreshing its content).
*/
public interface Refreshable {
/**
* Indicates that this object should start refreshing its content.
*/
void onRefreshStart(@RefreshType int refreshType);
}
| mit |
hosamshahin/OpenDSA | SourceCode/Java/Lists/QueueTest.java | 908 | import java.io.*;
public class QueueTest {
static boolean SUCCESS = true;
static void test(Queue Q, Queue Q1) {
String temp;
Q.enqueue(10);
Q.enqueue(20);
Q.enqueue(15);
temp = Q.toString();
if (!temp.equals("10 20 15 "))
SUCCESS = false;
while(Q.length() > 0)
Q1.enqueue(Q.dequeue());
temp = Q1.toString();
if (!temp.equals("10 20 15 "))
SUCCESS = false;
temp = Q.toString();
if (!temp.equals(""))
SUCCESS = false;
}
public static void main(String args[]) throws IOException {
AQueue AQ = new AQueue();
AQueue AQ1 = new AQueue();
LQueue LQ = new LQueue();
LQueue LQ1 = new LQueue();
test(AQ, AQ1);
test(LQ, LQ1);
if (SUCCESS) {
PrintWriter output = new PrintWriter("success");
output.println("Success");
output.flush();
output.close();
System.out.println("Success!");
} else {
System.out.println("Testing failed");
}
}
}
| mit |
mbologna/salt-netapi-client | src/main/java/com/suse/salt/netapi/AuthModule.java | 569 | package com.suse.salt.netapi;
/**
* Salt authentication modules.
*
* @see <a href="http://docs.saltstack.com/en/latest/ref/auth/all/">Modules</a>
*/
public enum AuthModule {
AUTO("auto"),
DJANGO("django"),
FILE("file"),
KEYSTONE("keystone"),
LDAP("ldap"),
MYSQL("mysql"),
PAM("pam"),
PKI("pki"),
REST("rest"),
SHAREDSECRET("sharedsecret"),
YUBICO("yubico");
private final String value;
AuthModule(String value) {
this.value = value;
}
public String getValue() {
return value;
}
}
| mit |
sankate/WePay-Java-SDK | sample_calls/BatchCalls.java | 2002 |
public class BatchCalls {
/**
* The calls in the main function below are not intended to be executed as they are.
* Think of this code as a template for how you might structure API calls within your
* own code. Below each call is an example of the result of the call (either the call's
* response or how the call affect's the objects it is called on.
*/
public static void main() {
/****************************************************************************/
/**
* CREATE call
*/
BatchData call1 = new BatchData();
call1.callClass = "Account";
call1.callFunction = "create";
call1.authorization = myAccessToken;
JSONObject params1 = new JSONObject();
params1.put("name", "Adam");
params1.put("description", "first account created from batch");
call1.parameters = params1;
BatchData call2 = new BatchData();
call2.callClass = "Account";
call2.callFunction = "create";
call2.authorization = myAccessToken;
JSONObject params2 = new JSONObject();
params2.put("name", "Bob");
params2.put("description", "second account created from batch");
call2.parameters = params2;
BatchData[] calls = {call1, call2};
Batch[] responses = Batch.create(calls, myAccessToken);
// Print responses
for (int i=0; i < responses.length; i++) {
System.out.println(responses[i].getCall());
System.out.println(responses[i].getResponse());
}
/**
responses:
{
"call": "/account/create",
"reference_id": null,
"response": {
"account_id": 1938233429,
"account_uri": "https://stage.wepay.com/account/1938233429"
}
},
{
"call": "/account/create",
"reference_id": null,
"response": {
"account_id": 478040119,
"account_uri": "https://stage.wepay.com/account/478040119"
}
}
*/
/****************************************************************************/
}
}
| mit |
ahmedvc/umple | UmpleUIJSFProvider/src/cruise/ui/jsf/templates/impl/fragment/GUI/ImmutableEditCharacter.java | 1340 | package cruise.ui.jsf.templates.impl.fragment.GUI;
import cruise.model.abstractObjects.IGenerator;
import cruise.umple.compiler.Attribute;;
public class ImmutableEditCharacter implements IGenerator {
protected static String nl;
public static synchronized ImmutableEditCharacter create(String lineSeparator)
{
nl = lineSeparator;
ImmutableEditCharacter result = new ImmutableEditCharacter();
nl = null;
return result;
}
public final String NL = nl == null ? (System.getProperties().getProperty("line.separator")) : nl;
protected final String TEXT_1 = "<h:outputText value=\"#{#1#Bundle.";
protected final String TEXT_2 = "}\" />" + NL + "<h:outputText value=\"#{#1#Bean.edited#2#.";
protected final String TEXT_3 = "}\" >" + NL + "<f:converter converterId=\"javax.faces.Character\" />" + NL + "</h:outputText>";
protected final String TEXT_4 = NL;
public String generate(Object argument)
{
final StringBuffer stringBuffer = new StringBuffer();
Attribute attVar = (Attribute) argument;
stringBuffer.append(TEXT_1);
stringBuffer.append(attVar.getUpperCaseName());
stringBuffer.append(TEXT_2);
stringBuffer.append(attVar.getName());
stringBuffer.append(TEXT_3);
stringBuffer.append(TEXT_4);
return stringBuffer.toString();
}
} | mit |
CrackerStealth/smarthome | bundles/core/org.eclipse.smarthome.core.thing/src/main/java/org/eclipse/smarthome/core/thing/binding/builder/ChannelBuilder.java | 4674 | /**
* Copyright (c) 2014-2016 by the respective copyright holders.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.core.thing.binding.builder;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.eclipse.smarthome.config.core.Configuration;
import org.eclipse.smarthome.core.thing.Channel;
import org.eclipse.smarthome.core.thing.ChannelUID;
import org.eclipse.smarthome.core.thing.type.ChannelKind;
import org.eclipse.smarthome.core.thing.type.ChannelType;
import org.eclipse.smarthome.core.thing.type.ChannelTypeUID;
/**
* {@link ChannelBuilder} is responsible for creating {@link Channel}s.
*
* @author Dennis Nobel - Initial contribution
* @author Alex Tugarev - Extended about default tags
* @author Chris Jackson - Added properties and label/description
*/
public class ChannelBuilder {
private ChannelUID channelUID;
private String acceptedItemType;
private ChannelKind kind;
private Configuration configuration;
private Set<String> defaultTags;
private Map<String, String> properties;
private String label;
private String description;
private ChannelTypeUID channelTypeUID;
private ChannelBuilder(ChannelUID channelUID, String acceptedItemType, Set<String> defaultTags) {
this.channelUID = channelUID;
this.acceptedItemType = acceptedItemType;
this.defaultTags = defaultTags;
this.kind = ChannelKind.STATE;
}
/**
* Creates a channel builder for the given channel UID and item type.
*
* @param channelUID
* channel UID
* @param acceptedItemType
* item type that is accepted by this channel
* @return channel builder
*/
public static ChannelBuilder create(ChannelUID channelUID, String acceptedItemType) {
return new ChannelBuilder(channelUID, acceptedItemType, new HashSet<String>());
}
/**
* Appends the channel type to the channel to build
*
* @param channelTypeUID channel type UID
* @return channel builder
*/
public ChannelBuilder withType(ChannelTypeUID channelTypeUID) {
this.channelTypeUID = channelTypeUID;
return this;
}
/**
* Appends a configuration to the channel to build.
*
* @param configuration
* configuration
* @return channel builder
*/
public ChannelBuilder withConfiguration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Adds properties to the channel
*
* @param properties properties to add
* @return channel builder
*/
public ChannelBuilder withProperties(Map<String, String> properties) {
this.properties = properties;
return this;
}
/**
* Sets the channel label. This allows overriding of the default label set in the {@link ChannelType}
*
* @param label the channel label to override the label set in the {@link ChannelType}
* @return channel builder
*/
public ChannelBuilder withLabel(String label) {
this.label = label;
return this;
}
/**
* Sets the channel label. This allows overriding of the default label set in the {@link ChannelType}
*
* @param label the channel label to override the label set in the {@link ChannelType}
* @return channel builder
*/
public ChannelBuilder withDescription(String description) {
this.description = description;
return this;
}
/**
* Appends default tags to the channel to build.
*
* @param defaultTags
* default tags
* @return channel builder
*/
public ChannelBuilder withDefaultTags(Set<String> defaultTags) {
this.defaultTags = defaultTags;
return this;
}
/**
* Sets the kind of the channel.
*
* @param kind kind.
* @return channel builder
*/
public ChannelBuilder withKind(ChannelKind kind) {
if (kind == null) {
throw new IllegalArgumentException("kind must not be null");
}
this.kind = kind;
return this;
}
/**
* Builds and returns the channel.
*
* @return channel
*/
public Channel build() {
return new Channel(channelUID, channelTypeUID, acceptedItemType, kind, configuration, defaultTags, properties,
label, description);
}
}
| epl-1.0 |
afuechsel/openhab2 | addons/ui/org.openhab.ui.classic/src/main/java/org/openhab/ui/classic/internal/render/VideoRenderer.java | 3056 | /**
* Copyright (c) 2010-2019 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.ui.classic.internal.render;
import org.apache.commons.lang.StringUtils;
import org.eclipse.emf.common.util.EList;
import org.eclipse.smarthome.core.library.types.StringType;
import org.eclipse.smarthome.core.types.State;
import org.eclipse.smarthome.model.sitemap.Video;
import org.eclipse.smarthome.model.sitemap.Widget;
import org.eclipse.smarthome.ui.items.ItemUIRegistry;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
/**
* This is an implementation of the {@link WidgetRenderer} interface, which
* can produce HTML code for Video widgets.
*
* @author Kai Kreuzer - Initial contribution and API
*
*/
@Component(service = WidgetRenderer.class)
public class VideoRenderer extends AbstractWidgetRenderer {
@Override
public boolean canRender(Widget w) {
return w instanceof Video;
}
@Override
public EList<Widget> renderWidget(Widget w, StringBuilder sb) throws RenderException {
Video videoWidget = (Video) w;
String snippet = null;
String widgetId = itemUIRegistry.getWidgetId(w);
String sitemap = w.eResource().getURI().path();
if (videoWidget.getEncoding() != null && videoWidget.getEncoding().toLowerCase().contains("mjpeg")) {
// we handle mjpeg streams as an html image as browser can usually handle this
snippet = getSnippet("image");
snippet = StringUtils.replace(snippet, "%setrefresh%", "");
snippet = StringUtils.replace(snippet, "%refresh%", "");
} else {
snippet = getSnippet("video");
}
String url = "../proxy?sitemap=" + sitemap + "&widgetId=" + widgetId;
String mediaType = "";
if (videoWidget.getEncoding() != null && videoWidget.getEncoding().toLowerCase().contains("hls")) {
// For HTTP Live Stream we don't proxy the URL and we set the appropriate media type
State state = itemUIRegistry.getState(w);
url = (state instanceof StringType) ? state.toString() : videoWidget.getUrl();
mediaType = "type=\"application/vnd.apple.mpegurl\"";
}
snippet = StringUtils.replace(snippet, "%url%", url);
snippet = StringUtils.replace(snippet, "%media_type%", mediaType);
sb.append(snippet);
return null;
}
@Override
@Reference
protected void setItemUIRegistry(ItemUIRegistry ItemUIRegistry) {
super.setItemUIRegistry(ItemUIRegistry);
}
@Override
protected void unsetItemUIRegistry(ItemUIRegistry ItemUIRegistry) {
super.unsetItemUIRegistry(ItemUIRegistry);
}
}
| epl-1.0 |
TypeFox/che | plugins/plugin-java/che-plugin-java-ext-lang-client/src/main/java/org/eclipse/che/ide/ext/java/client/command/JavaCommandPageView.java | 1393 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.ide.ext.java.client.command;
import com.google.inject.ImplementedBy;
import org.eclipse.che.ide.api.mvp.View;
/**
* The view of {@link JavaCommandPagePresenter}.
*
* @author Valeriy Svydenko
*/
@ImplementedBy(JavaCommandPageViewImpl.class)
public interface JavaCommandPageView extends View<JavaCommandPageView.ActionDelegate> {
/** Returns project. */
String getProject();
/** Sets project. */
void setProject(String project);
/** Returns the path to main class. */
String getMainClass();
/** Sets the path to main class. */
void setMainClass(String mainClass);
/** Returns command line. */
String getCommandLine();
/** Sets command line. */
void setCommandLine(String commandLine);
/** Action handler for the view actions/controls. */
interface ActionDelegate {
/** Called when 'Choose Main Class' button has been clicked. */
void onAddMainClassBtnClicked();
/** Called when command line has been changed. */
void onCommandLineChanged();
}
}
| epl-1.0 |
willrogers/dawnsci | org.eclipse.dawnsci.plotting.api/src/org/eclipse/dawnsci/plotting/api/expressions/IExpressionObject.java | 3590 | /*-
*******************************************************************************
* Copyright (c) 2011, 2014 Diamond Light Source Ltd.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Matthew Gerring - initial API and implementation and/or initial documentation
*******************************************************************************/
package org.eclipse.dawnsci.plotting.api.expressions;
import java.util.List;
import java.util.Map;
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.dataset.ILazyDataset;
import org.eclipse.dawnsci.analysis.api.monitor.IMonitor;
/**
* An object which can resolve data maths and can be used
* as data in Viewer models in jface.
*/
public interface IExpressionObject {
/**
* The name of the expression, e.g. "Q"
* @return
*/
public String getExpressionName();
/**
* Method to set the expression string. For instance when the user
* types in a new string.
*
* @param expression
*/
public void setExpressionName(String name);
/**
* The string expression, e.g. "x*y"
* @return
*/
public String getExpressionString();
/**
* Method to set the expression string. For instance when the user
* types in a new string.
*
* @param expression
*/
public void setExpressionString(String expression);
/**
* If the expression value is cached, the
* clear method will nullify this cache.
*/
public void clear();
/**
* Evaluates and caches the expression if necessary.
* @param suggestedName may be null
* @param monitor
* @return the evaluated value of the expression.
* @throws Exception
*/
public IDataset getDataSet(String suggestedName, IMonitor monitor) throws Exception;
/**
* Guesses the data without evaluating the expression, instead looks for
* a reference to a concrete data set and uses the attributes (shape etc)
* of this.
*
* *WARNING* Is educated guess at lazy dataset, will not always work.
*
* @param suggestedName should not be null, should be the data name or the variable name for expressions.
* @param monitor
* @return null if the guess cannot be made or there was any kind of error.
*/
public ILazyDataset getLazyDataSet(String suggestedName, IMonitor monitor);
/**
* If the expression has been evaluated once, this will give the cached result. Otherwise
* it will return null.
*
* @return
*/
public ILazyDataset getCachedLazyDataSet();
/**
*
* @param monitor
* @return true if expression contained in the object has legal syntax.
*/
public boolean isValid(IMonitor monitor);
/**
* A string which maye be null to provide feedback as to the problem with the expression.
* @return
*/
public String getInvalidReason();
/**
* Used to give a nicer error message when an expression is bad.
* @param monitor
* @return
*/
public List<String> getInvalidVariables(IMonitor monitor);
/**
* Get all functions currently in the expression engine
*
* @returns functions
*/
public Map<String,Object> getFunctions();
/**
*
* @param variableNames
* @return true if one or more of variableNames is referenced by this expression
*/
public boolean containsVariable(String... variableNames);
/**
* The manager of all available expressions.
* @return
*/
public IVariableManager getVariableManager();
}
| epl-1.0 |
TypeFox/che | wsagent/che-core-api-git-shared/src/main/java/org/eclipse/che/api/git/shared/RemoteUpdateRequest.java | 1513 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.api.git.shared;
import java.util.List;
import org.eclipse.che.dto.shared.DTO;
/**
* Request to update tracked repositories.
*
* @author andrew00x
*/
@DTO
public interface RemoteUpdateRequest {
/** @return remote name */
String getName();
void setName(String name);
/** @return list tracked branches */
List<String> getBranches();
void setBranches(List<String> branches);
/**
* @return if <code>true</code> then {@link #branches} instead of replacing the list of currently
* tracked branches, added to that list
*/
boolean isAddBranches();
void setAddBranches(boolean isAddBranches);
/** @return remote URLs to be added */
List<String> getAddUrl();
void setAddUrl(List<String> addUrl);
/** @return remote URLs to be removed */
List<String> getRemoveUrl();
void setRemoveUrl(List<String> removeUrl);
/** @return remote push URLs to be added */
List<String> getAddPushUrl();
void setAddPushUrl(List<String> addPushUrl);
/** @return remote push URLs to be removed */
List<String> getRemovePushUrl();
void setRemovePushUrl(List<String> removePushUrl);
}
| epl-1.0 |
watou/openhab | bundles/binding/org.openhab.binding.velux/src/main/java/org/openhab/binding/velux/things/VeluxGwWLAN.java | 1198 | /**
* Copyright (c) 2010-2019 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.velux.things;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <B>Velux</B> product representation.
* <P>
* Combined set of information describing a single Velux product.
*
* @author Guenther Schreiner - initial contribution.
* @since 1.13.0
*/
public class VeluxGwWLAN {
private final Logger logger = LoggerFactory.getLogger(VeluxGwWLAN.class);
// Class internal
private String serviceSetID;
private String password;
// Constructor
public VeluxGwWLAN(String serviceSetID, String password) {
logger.trace("VeluxProduct() created.");
this.serviceSetID = serviceSetID;
this.password = password;
}
// Class access methods
public String getSSID() {
return this.serviceSetID;
}
public String getPassword() {
return this.password;
}
}
| epl-1.0 |
HolodeckOne-Minecraft/ProtocolLib | modules/ProtocolLib/src/main/java/com/comphenix/protocol/injector/LoginPackets.java | 3020 | package com.comphenix.protocol.injector;
import org.bukkit.Bukkit;
import com.comphenix.protocol.PacketType;
import com.comphenix.protocol.Packets;
import com.comphenix.protocol.concurrency.IntegerSet;
import com.comphenix.protocol.events.ConnectionSide;
import com.comphenix.protocol.utility.MinecraftReflection;
import com.comphenix.protocol.utility.MinecraftVersion;
/**
* Packets that are known to be transmitted during login.
* <p>
* This may be dynamically extended later.
* @author Kristian
*/
class LoginPackets {
private IntegerSet clientSide = new IntegerSet(Packets.PACKET_COUNT);
private IntegerSet serverSide = new IntegerSet(Packets.PACKET_COUNT);
@SuppressWarnings("deprecation")
public LoginPackets(MinecraftVersion version) {
// Ordinary login
clientSide.add(Packets.Client.HANDSHAKE);
serverSide.add(Packets.Server.KEY_REQUEST);
clientSide.add(Packets.Client.KEY_RESPONSE);
serverSide.add(Packets.Server.KEY_RESPONSE);
clientSide.add(Packets.Client.CLIENT_COMMAND);
serverSide.add(Packets.Server.LOGIN);
// List ping
clientSide.add(Packets.Client.GET_INFO);
// In 1.6.2, Minecraft started sending CUSTOM_PAYLOAD in the server list protocol
// MCPC+/Cauldron contains Forge, which uses CUSTOM_PAYLOAD during login
if (version.isAtLeast(MinecraftVersion.HORSE_UPDATE) || isCauldronOrMCPC()) {
clientSide.add(Packets.Client.CUSTOM_PAYLOAD);
}
if (isCauldronOrMCPC()) {
serverSide.add(Packets.Server.CUSTOM_PAYLOAD);
}
serverSide.add(Packets.Server.KICK_DISCONNECT);
}
/**
* Determine if we are running MCPC or Cauldron.
* @return TRUE if we are, FALSE otherwise.
*/
private static boolean isCauldronOrMCPC() {
String version = Bukkit.getServer().getVersion();
return version.contains("MCPC") || version.contains("Cauldron");
}
/**
* Determine if a packet may be sent during login from a given direction.
* @param packetId - the ID of the packet.
* @param side - the direction.
* @return TRUE if it may, FALSE otherwise.
*/
@Deprecated
public boolean isLoginPacket(int packetId, ConnectionSide side) {
switch (side) {
case CLIENT_SIDE:
return clientSide.contains(packetId);
case SERVER_SIDE:
return serverSide.contains(packetId);
case BOTH:
return clientSide.contains(packetId) ||
serverSide.contains(packetId);
default:
throw new IllegalArgumentException("Unknown connection side: " + side);
}
}
/**
* Determine if a given packet may be sent during login.
* @param type - the packet type.
* @return TRUE if it may, FALSE otherwise.
*/
public boolean isLoginPacket(PacketType type) {
if (!MinecraftReflection.isUsingNetty())
return isLoginPacket(type.getLegacyId(), type.getSender().toSide());
return PacketType.Login.Client.getInstance().hasMember(type) ||
PacketType.Login.Server.getInstance().hasMember(type) ||
PacketType.Status.Client.getInstance().hasMember(type) ||
PacketType.Status.Server.getInstance().hasMember(type);
}
}
| gpl-2.0 |
FauxFaux/jdk9-jdk | test/com/sun/management/GarbageCollectorMXBean/LastGCInfo.java | 4220 | /*
* Copyright (c) 2004, 2015 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4982301
* @summary Sanity Test for GarbageCollectorMXBean.getLastGcInfo().
* @author Mandy Chung
*
* @modules jdk.management
* @run main/othervm -XX:-ExplicitGCInvokesConcurrent LastGCInfo
*/
// Passing "-XX:-ExplicitGCInvokesConcurrent" to force System.gc()
// run on foreground when CMS is used and prevent situations when "GcInfo"
// is missing even though System.gc() was successfuly processed.
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.lang.management.MemoryPoolMXBean;
import java.util.*;
import com.sun.management.GcInfo;
import com.sun.management.GarbageCollectorMXBean;
public class LastGCInfo {
public static void main(String[] argv) throws Exception {
boolean hasGcInfo = false;
System.gc();
List mgrs = ManagementFactory.getGarbageCollectorMXBeans();
for (ListIterator iter = mgrs.listIterator(); iter.hasNext(); ) {
Object mgr = iter.next();
if (mgr instanceof GarbageCollectorMXBean) {
GarbageCollectorMXBean gc = (GarbageCollectorMXBean) mgr;
GcInfo info = gc.getLastGcInfo();
if (info != null) {
checkGcInfo(gc.getName(), info);
hasGcInfo = true;
}
}
}
if (! hasGcInfo) {
throw new RuntimeException("No GcInfo returned");
}
System.out.println("Test passed.");
}
private static void checkGcInfo(String name, GcInfo info) throws Exception {
System.out.println("GC statistic for : " + name);
System.out.print("GC #" + info.getId());
System.out.print(" start:" + info.getStartTime());
System.out.print(" end:" + info.getEndTime());
System.out.println(" (" + info.getDuration() + "ms)");
Map usage = info.getMemoryUsageBeforeGc();
List pnames = new ArrayList();
for (Iterator iter = usage.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry entry = (Map.Entry) iter.next();
String poolname = (String) entry.getKey();
pnames.add(poolname);
MemoryUsage busage = (MemoryUsage) entry.getValue();
MemoryUsage ausage = (MemoryUsage) info.getMemoryUsageAfterGc().get(poolname);
if (ausage == null) {
throw new RuntimeException("After Gc Memory does not exist" +
" for " + poolname);
}
System.out.println("Usage for pool " + poolname);
System.out.println(" Before GC: " + busage);
System.out.println(" After GC: " + ausage);
}
// check if memory usage for all memory pools are returned
List pools = ManagementFactory.getMemoryPoolMXBeans();
for (Iterator iter = pools.iterator(); iter.hasNext(); ) {
MemoryPoolMXBean p = (MemoryPoolMXBean) iter.next();
if (!pnames.contains(p.getName())) {
throw new RuntimeException("GcInfo does not contain " +
"memory usage for pool " + p.getName());
}
}
}
}
| gpl-2.0 |
hexbinary/landing | src/main/java/org/oscarehr/casemgmt/web/ClientImageAction.java | 3177 | /**
*
* Copyright (c) 2005-2012. Centre for Research on Inner City Health, St. Michael's Hospital, Toronto. All Rights Reserved.
* This software is published under the GPL GNU General Public License.
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* This software was written for
* Centre for Research on Inner City Health, St. Michael's Hospital,
* Toronto, Ontario, Canada
*/
package org.oscarehr.casemgmt.web;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.DynaActionForm;
import org.apache.struts.actions.DispatchAction;
import org.apache.struts.upload.FormFile;
import org.oscarehr.casemgmt.model.ClientImage;
import org.oscarehr.casemgmt.service.ClientImageManager;
import org.oscarehr.casemgmt.web.formbeans.ClientImageFormBean;
import org.oscarehr.util.MiscUtils;
public class ClientImageAction extends DispatchAction {
private static Logger log = MiscUtils.getLogger();
private ClientImageManager clientImageManager;
public void setClientImageManager(ClientImageManager mgr) {
this.clientImageManager = mgr;
}
public ActionForward saveImage(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) {
DynaActionForm imageForm = (DynaActionForm)form;
ClientImageFormBean formBean = (ClientImageFormBean)imageForm.get("clientImage");
HttpSession session = request.getSession(true);
String id=(String)(session.getAttribute("clientId"));
log.info("client image upload: id=" + id);
FormFile formFile = formBean.getImagefile();
String type = formFile.getFileName().substring(formFile.getFileName().lastIndexOf(".")+1);
if (type!=null) type=type.toLowerCase();
log.info("extension = " + type);
try {
byte[] imageData = formFile.getFileData();
ClientImage clientImage = new ClientImage();
clientImage.setDemographic_no(Integer.parseInt(id));
clientImage.setImage_data(imageData);
clientImage.setImage_type(type);
clientImageManager.saveClientImage(clientImage);
}catch(Exception e) {
log.error("Error", e);
//post error to page
}
request.setAttribute("success",new Boolean(true));
return mapping.findForward("success");
}
}
| gpl-2.0 |
nvoron23/opensearchserver | src/main/java/com/jaeksoft/searchlib/analysis/filter/RemoveTagFilter.java | 2319 | /**
* License Agreement for OpenSearchServer
*
* Copyright (C) 2013 Emmanuel Keller / Jaeksoft
*
* http://www.open-search-server.com
*
* This file is part of OpenSearchServer.
*
* OpenSearchServer is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenSearchServer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenSearchServer.
* If not, see <http://www.gnu.org/licenses/>.
**/
package com.jaeksoft.searchlib.analysis.filter;
import java.io.IOException;
import org.apache.lucene.analysis.TokenStream;
import com.jaeksoft.searchlib.SearchLibException;
import com.jaeksoft.searchlib.analysis.ClassPropertyEnum;
import com.jaeksoft.searchlib.analysis.FilterFactory;
import com.jaeksoft.searchlib.util.StringUtils;
public class RemoveTagFilter extends FilterFactory {
private static final String[] EMTPY_TAG_ARRAY = {};
private String[] allowedTagArray = EMTPY_TAG_ARRAY;
public class RemoveTagTokenFilter extends AbstractTermFilter {
protected RemoveTagTokenFilter(TokenStream input) {
super(input);
}
@Override
public final boolean incrementToken() throws IOException {
for (;;) {
if (!input.incrementToken())
return false;
createToken(StringUtils.removeTag(termAtt.toString(),
allowedTagArray));
return true;
}
}
}
@Override
protected void checkValue(ClassPropertyEnum prop, String value)
throws SearchLibException {
if (prop == ClassPropertyEnum.ALLOWED_TAGS) {
if (value == null)
allowedTagArray = EMTPY_TAG_ARRAY;
else
allowedTagArray = StringUtils.split(value);
}
}
@Override
protected void initProperties() throws SearchLibException {
super.initProperties();
addProperty(ClassPropertyEnum.ALLOWED_TAGS, "", null, 20, 1);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new RemoveTagTokenFilter(tokenStream);
}
}
| gpl-3.0 |
WhisperSystems/Signal-Android | app/src/main/java/org/thoughtcrime/securesms/messages/RestStrategy.java | 4475 | package org.thoughtcrime.securesms.messages;
import androidx.annotation.NonNull;
import androidx.annotation.WorkerThread;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies;
import org.thoughtcrime.securesms.jobmanager.JobManager;
import org.thoughtcrime.securesms.jobmanager.JobTracker;
import org.thoughtcrime.securesms.jobs.MarkerJob;
import org.thoughtcrime.securesms.jobs.PushDecryptMessageJob;
import org.thoughtcrime.securesms.jobs.PushProcessMessageJob;
import org.whispersystems.libsignal.util.guava.Optional;
import org.whispersystems.signalservice.api.SignalServiceMessageReceiver;
import java.io.IOException;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Retrieves messages over the REST endpoint.
*/
public class RestStrategy extends MessageRetrievalStrategy {
private static final String TAG = Log.tag(RestStrategy.class);
@WorkerThread
@Override
public boolean execute(long timeout) {
long startTime = System.currentTimeMillis();
JobManager jobManager = ApplicationDependencies.getJobManager();
QueueFindingJobListener queueListener = new QueueFindingJobListener();
try (IncomingMessageProcessor.Processor processor = ApplicationDependencies.getIncomingMessageProcessor().acquire()) {
jobManager.addListener(job -> job.getParameters().getQueue() != null && job.getParameters().getQueue().startsWith(PushProcessMessageJob.QUEUE_PREFIX), queueListener);
int jobCount = enqueuePushDecryptJobs(processor, startTime, timeout);
if (jobCount == 0) {
Log.d(TAG, "No PushDecryptMessageJobs were enqueued.");
return true;
} else {
Log.d(TAG, jobCount + " PushDecryptMessageJob(s) were enqueued.");
}
long timeRemainingMs = blockUntilQueueDrained(PushDecryptMessageJob.QUEUE, TimeUnit.SECONDS.toMillis(10));
Set<String> processQueues = queueListener.getQueues();
Log.d(TAG, "Discovered " + processQueues.size() + " queue(s): " + processQueues);
if (timeRemainingMs > 0) {
Iterator<String> iter = processQueues.iterator();
while (iter.hasNext() && timeRemainingMs > 0) {
timeRemainingMs = blockUntilQueueDrained(iter.next(), timeRemainingMs);
}
if (timeRemainingMs <= 0) {
Log.w(TAG, "Ran out of time while waiting for queues to drain.");
}
} else {
Log.w(TAG, "Ran out of time before we could even wait on individual queues!");
}
return true;
} catch (IOException e) {
Log.w(TAG, "Failed to retrieve messages. Resetting the SignalServiceMessageReceiver.", e);
ApplicationDependencies.resetSignalServiceMessageReceiver();
return false;
} finally {
jobManager.removeListener(queueListener);
}
}
private static int enqueuePushDecryptJobs(IncomingMessageProcessor.Processor processor, long startTime, long timeout)
throws IOException
{
SignalServiceMessageReceiver receiver = ApplicationDependencies.getSignalServiceMessageReceiver();
AtomicInteger jobCount = new AtomicInteger(0);
receiver.setSoTimeoutMillis(timeout);
receiver.retrieveMessages(envelope -> {
Log.i(TAG, "Retrieved an envelope." + timeSuffix(startTime));
String jobId = processor.processEnvelope(envelope);
if (jobId != null) {
jobCount.incrementAndGet();
}
Log.i(TAG, "Successfully processed an envelope." + timeSuffix(startTime));
});
return jobCount.get();
}
private static long blockUntilQueueDrained(@NonNull String queue, long timeoutMs) {
long startTime = System.currentTimeMillis();
final JobManager jobManager = ApplicationDependencies.getJobManager();
final MarkerJob markerJob = new MarkerJob(queue);
Optional<JobTracker.JobState> jobState = jobManager.runSynchronously(markerJob, timeoutMs);
if (!jobState.isPresent()) {
Log.w(TAG, "Timed out waiting for " + queue + " job(s) to finish!");
}
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
Log.d(TAG, "Waited " + duration + " ms for the " + queue + " job(s) to finish.");
return timeoutMs - duration;
}
@Override
public @NonNull String toString() {
return Log.tag(RestStrategy.class);
}
}
| gpl-3.0 |
NeuronRobotics/WalnutiQ | src/test/java/model/MARK_II/connectTypes/SensorCellsToRegionRandomConnectTest.java | 1575 | package model.MARK_II.connectTypes;
import junit.framework.TestCase;
import model.MARK_II.region.Column;
import model.MARK_II.region.Region;
import model.MARK_II.sensory.SensorCell;
import model.MARK_II.sensory.VisionCell;
/**
* @author Quinn Liu (quinnliu@vt.edu)
* @version July 14th, 2013
*/
public class SensorCellsToRegionRandomConnectTest extends TestCase {
private SensorCellsToRegionRandomConnect connectType;
public void setUp() {
this.connectType = new SensorCellsToRegionRandomConnect();
}
public void test_connect() {
Region leafRegion = new Region("leafRegion", 8, 8, 4, 20, 3);
SensorCell[][] sensorCells = new VisionCell[66][66];
for (int row = 0; row < sensorCells.length; row++) {
for (int column = 0; column < sensorCells[0].length; column++) {
sensorCells[row][column] = new VisionCell();
}
}
this.connectType.connect(sensorCells, leafRegion.getColumns(), 2, 2);
Column[][] columns = leafRegion.getColumns();
for (int parentColumnRowPosition = 0; parentColumnRowPosition < leafRegion.getNumberOfRowsAlongRegionYAxis(); parentColumnRowPosition++) {
for (int parentColumnColumnPosition = 0; parentColumnColumnPosition < leafRegion
.getNumberOfColumnsAlongRegionXAxis(); parentColumnColumnPosition++) {
assertEquals(72, columns[parentColumnRowPosition][parentColumnColumnPosition]
.getProximalSegment().getSynapses().size());
}
}
}
}
| gpl-3.0 |
jmkao/Photonic3D | host/src/main/java/org/area515/resinprinter/job/render/RenderingCache.java | 1620 | package org.area515.resinprinter.job.render;
import java.awt.image.BufferedImage;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
public class RenderingCache {
private static final Logger logger = LogManager.getLogger();
private LoadingCache<Object, RenderingContext> imageSync = CacheBuilder.newBuilder().softValues().build(
new CacheLoader<Object, RenderingContext>() {
@Override
public RenderingContext load(Object key) throws Exception {
return new RenderingContext();
}
});
private Object currentImagePointer = Boolean.TRUE;
public RenderingContext getOrCreateIfMissing(Object imageToBuild) {
try {
return imageSync.get(imageToBuild);
} catch (ExecutionException e) {
logger.error(e);
return null;
}
}
public void clearCache(Object imageToBuild) {
imageSync.invalidate(imageToBuild);
}
public ReentrantLock getCurrentLock() {
return getOrCreateIfMissing(currentImagePointer).getLock();
}
public BufferedImage getCurrentImage() {
return getOrCreateIfMissing(currentImagePointer).getPrintableImage();
}
public Double getCurrentArea() {
return getOrCreateIfMissing(currentImagePointer).getArea();
}
public Object getCurrentRenderingPointer() {
return currentImagePointer;
}
public void setCurrentRenderingPointer(Object pointer) {
currentImagePointer = pointer;
}
} | gpl-3.0 |
theprogrammingchronicles/tpc-tdd-exercises | tdd-lesson-6/tdd-6-6-integration/tdd-business-layer/src/main/java/com/programmingchronicles/tdd/addressbook/support/UUIDIdGenerator.java | 1375 | /*
* Copyright (C) 2010-2011, Pedro Ballesteros <pedro@theprogrammingchronicles.com>
*
* This file is part of The Programming Chronicles Test-Driven Development
* Exercises(http://theprogrammingchronicles.com/)
*
* This copyrighted material is free software: you can redistribute it
* and/or modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This material is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this material. This copy is available in LICENSE-GPL.txt
* file. If not, see <http://www.gnu.org/licenses/>.
*/
package com.programmingchronicles.tdd.addressbook.support;
import com.programmingchronicles.tdd.addressbook.IdGenerator;
import java.util.UUID;
/**
* Implementación de IdGenerator basada en la generación de UUIDs
*
* @author Pedro Ballesteros <pedro@theprogrammingchronicles.com>
*/
public class UUIDIdGenerator implements IdGenerator {
@Override
public String newId() {
return UUID.randomUUID().toString();
}
}
| gpl-3.0 |
SoftwareEngineeringToolDemos/FSE-2010-Ref-Finder | code/tyRuBa/engine/RBExpression.java | 4958 | /*
* Ref-Finder
* Copyright (C) <2015> <PLSE_UCLA>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package tyRuBa.engine;
import java.io.PrintStream;
import java.util.Collection;
import junit.framework.Assert;
import tyRuBa.engine.compilation.CompilationContext;
import tyRuBa.engine.compilation.Compiled;
import tyRuBa.engine.visitor.CollectFreeVarsVisitor;
import tyRuBa.engine.visitor.CollectVarsVisitor;
import tyRuBa.engine.visitor.ExpressionVisitor;
import tyRuBa.engine.visitor.SubstituteVisitor;
import tyRuBa.modes.ErrorMode;
import tyRuBa.modes.Factory;
import tyRuBa.modes.Mode;
import tyRuBa.modes.ModeCheckContext;
import tyRuBa.modes.PredInfoProvider;
import tyRuBa.modes.TypeEnv;
import tyRuBa.modes.TypeModeError;
import tyRuBa.tdbc.PreparedQuery;
public abstract class RBExpression
implements Cloneable
{
private Mode mode = null;
private ModeCheckContext newContext = null;
public abstract Compiled compile(CompilationContext paramCompilationContext);
PreparedQuery prepareForRunning(QueryEngine engine)
throws TypeModeError
{
RBExpression converted = convertToNormalForm();
TypeEnv resultEnv = converted.typecheck(engine.rulebase(), Factory.makeTypeEnv());
RBExpression result =
converted.convertToMode(Factory.makeModeCheckContext(engine.rulebase()));
if ((result.getMode() instanceof ErrorMode)) {
throw new TypeModeError(
this + " cannot be converted to any declared mode\n" +
" " + result.getMode());
}
if (!RuleBase.silent)
{
System.err.println("inferred types: " + resultEnv);
System.err.println("converted to Mode: " + result);
}
return new PreparedQuery(engine, result, resultEnv);
}
public final Collection getVariables()
{
CollectVarsVisitor visitor = new CollectVarsVisitor();
accept(visitor);
Collection vars = visitor.getVars();
vars.remove(RBIgnoredVariable.the);
return vars;
}
public final Collection getFreeVariables(ModeCheckContext context)
{
CollectFreeVarsVisitor visitor = new CollectFreeVarsVisitor(context);
accept(visitor);
return visitor.getVars();
}
public abstract TypeEnv typecheck(PredInfoProvider paramPredInfoProvider, TypeEnv paramTypeEnv)
throws TypeModeError;
public final RBExpression convertToMode(ModeCheckContext context)
throws TypeModeError
{
return convertToMode(context, true);
}
public abstract RBExpression convertToMode(ModeCheckContext paramModeCheckContext, boolean paramBoolean)
throws TypeModeError;
public RBExpression convertToNormalForm()
{
return convertToNormalForm(false);
}
public RBExpression convertToNormalForm(boolean negate)
{
RBExpression result;
RBExpression result;
if (negate) {
result = new RBNotFilter(this);
} else {
result = this;
}
return result;
}
public RBExpression crossMultiply(RBExpression other)
{
if ((other instanceof RBCompoundExpression)) {
return other.crossMultiply(this);
}
return FrontEnd.makeAnd(this, other);
}
public abstract Object accept(ExpressionVisitor paramExpressionVisitor);
public RBExpression substitute(Frame frame)
{
SubstituteVisitor visitor = new SubstituteVisitor(frame);
return (RBExpression)accept(visitor);
}
public RBExpression addExistsQuantifier(RBVariable[] newVars, boolean negate)
{
RBExistsQuantifier exists = new RBExistsQuantifier(newVars, this);
if (negate) {
return new RBNotFilter(exists);
}
return exists;
}
public RBExpression makeModed(Mode mode, ModeCheckContext context)
{
try
{
RBExpression clone = (RBExpression)clone();
clone.setMode(mode, context);
return clone;
}
catch (CloneNotSupportedException e)
{
e.printStackTrace();
throw new Error("Should not happen");
}
}
private void setMode(Mode mode, ModeCheckContext context)
{
this.mode = mode;
this.newContext = context;
}
public boolean isBetterThan(RBExpression other)
{
return getMode().isBetterThan(other.getMode());
}
protected Mode getMode()
{
return this.mode;
}
public ModeCheckContext getNewContext()
{
Assert.assertNotNull(this.newContext);
return this.newContext;
}
}
| gpl-3.0 |