repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
cedac-software/spring-security-mongodb | src/test/java/com/cedac/security/oauth2/provider/token/store/TokenStoreBaseTests.java | 13183 | /*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cedac.security.oauth2.provider.token.store;
import com.cedac.security.oauth2.provider.RequestTokenFactory;
import org.junit.Test;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.oauth2.common.DefaultExpiringOAuth2RefreshToken;
import org.springframework.security.oauth2.common.DefaultOAuth2AccessToken;
import org.springframework.security.oauth2.common.DefaultOAuth2RefreshToken;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.common.OAuth2RefreshToken;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
import org.springframework.security.oauth2.provider.OAuth2Request;
import org.springframework.security.oauth2.provider.token.TokenStore;
import java.util.Collection;
import java.util.Date;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* @author mauro.franceschini
* @since 1.0.0
*/
public abstract class TokenStoreBaseTests {
public abstract TokenStore getTokenStore();
@Test
public void testReadingAuthenticationForTokenThatDoesNotExist() {
assertNull(getTokenStore().readAuthentication("tokenThatDoesNotExist"));
}
@Test
public void testStoreAccessToken() {
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
OAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
OAuth2AccessToken actualOAuth2AccessToken = getTokenStore().readAccessToken("testToken");
assertEquals(expectedOAuth2AccessToken, actualOAuth2AccessToken);
assertEquals(expectedAuthentication, getTokenStore().readAuthentication(expectedOAuth2AccessToken));
getTokenStore().removeAccessToken(expectedOAuth2AccessToken);
assertNull(getTokenStore().readAccessToken("testToken"));
assertNull(getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()));
}
@Test
public void testStoreAccessTokenTwice() {
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
OAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
OAuth2AccessToken actualOAuth2AccessToken = getTokenStore().readAccessToken("testToken");
assertEquals(expectedOAuth2AccessToken, actualOAuth2AccessToken);
assertEquals(expectedAuthentication, getTokenStore().readAuthentication(expectedOAuth2AccessToken));
getTokenStore().removeAccessToken(expectedOAuth2AccessToken);
assertNull(getTokenStore().readAccessToken("testToken"));
assertNull(getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()));
}
@Test
public void testRetrieveAccessToken() {
//Test approved request
OAuth2Request storedOAuth2Request = RequestTokenFactory.createOAuth2Request("id", true);
OAuth2Authentication authentication = new OAuth2Authentication(storedOAuth2Request,
new TestAuthentication("test2", true));
DefaultOAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
expectedOAuth2AccessToken.setExpiration(new Date(Long.MAX_VALUE - 1));
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, authentication);
//Test unapproved request
storedOAuth2Request = RequestTokenFactory.createOAuth2Request("id", false);
authentication = new OAuth2Authentication(storedOAuth2Request, new TestAuthentication("test2", true));
OAuth2AccessToken actualOAuth2AccessToken = getTokenStore().getAccessToken(authentication);
assertEquals(expectedOAuth2AccessToken, actualOAuth2AccessToken);
assertEquals(authentication.getUserAuthentication(),
getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()).getUserAuthentication());
// The authorizationRequest does not match because it is unapproved, but the token was granted to an approved request
assertFalse(storedOAuth2Request
.equals(getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()).getOAuth2Request()));
actualOAuth2AccessToken = getTokenStore().getAccessToken(authentication);
assertEquals(expectedOAuth2AccessToken, actualOAuth2AccessToken);
getTokenStore().removeAccessToken(expectedOAuth2AccessToken);
assertNull(getTokenStore().readAccessToken("testToken"));
assertNull(getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()));
assertNull(getTokenStore().getAccessToken(authentication));
}
@Test
public void testFindAccessTokensByClientIdAndUserName() {
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
OAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
Collection<OAuth2AccessToken> actualOAuth2AccessTokens = getTokenStore()
.findTokensByClientIdAndUserName("id", "test2");
assertEquals(1, actualOAuth2AccessTokens.size());
}
@Test
public void testFindAccessTokensByClientId() {
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
OAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
Collection<OAuth2AccessToken> actualOAuth2AccessTokens = getTokenStore().findTokensByClientId("id");
assertEquals(1, actualOAuth2AccessTokens.size());
}
@Test
public void testReadingAccessTokenForTokenThatDoesNotExist() {
assertNull(getTokenStore().readAccessToken("tokenThatDoesNotExist"));
}
@Test
public void testRefreshTokenIsNotStoredDuringAccessToken() {
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
DefaultOAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
expectedOAuth2AccessToken.setRefreshToken(new DefaultOAuth2RefreshToken("refreshToken"));
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
OAuth2AccessToken actualOAuth2AccessToken = getTokenStore().readAccessToken("testToken");
assertNotNull(actualOAuth2AccessToken.getRefreshToken());
assertNull(getTokenStore().readRefreshToken("refreshToken"));
}
@Test
/**
* NB: This used to test expiring refresh tokens. That test has been moved to sub-classes since not all stores support the functionality
*/ public void testStoreRefreshToken() {
DefaultOAuth2RefreshToken expectedRefreshToken = new DefaultOAuth2RefreshToken("testToken");
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
getTokenStore().storeRefreshToken(expectedRefreshToken, expectedAuthentication);
OAuth2RefreshToken actualExpiringRefreshToken = getTokenStore().readRefreshToken("testToken");
assertEquals(expectedRefreshToken, actualExpiringRefreshToken);
assertEquals(expectedAuthentication, getTokenStore().readAuthenticationForRefreshToken(expectedRefreshToken));
getTokenStore().removeRefreshToken(expectedRefreshToken);
assertNull(getTokenStore().readRefreshToken("testToken"));
assertNull(getTokenStore().readAuthentication(expectedRefreshToken.getValue()));
}
@Test
public void testReadingRefreshTokenForTokenThatDoesNotExist() {
getTokenStore().readRefreshToken("tokenThatDoesNotExist");
}
@Test
public void testGetAccessTokenForDeletedUser() throws Exception {
//Test approved request
OAuth2Request storedOAuth2Request = RequestTokenFactory.createOAuth2Request("id", true);
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(storedOAuth2Request,
new TestAuthentication("test", true));
OAuth2AccessToken expectedOAuth2AccessToken = new DefaultOAuth2AccessToken("testToken");
getTokenStore().storeAccessToken(expectedOAuth2AccessToken, expectedAuthentication);
assertEquals(expectedOAuth2AccessToken, getTokenStore().getAccessToken(expectedAuthentication));
assertEquals(expectedAuthentication, getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()));
//Test unapproved request
storedOAuth2Request = RequestTokenFactory.createOAuth2Request("id", false);
OAuth2Authentication anotherAuthentication = new OAuth2Authentication(storedOAuth2Request,
new TestAuthentication("test", true));
assertEquals(expectedOAuth2AccessToken, getTokenStore().getAccessToken(anotherAuthentication));
// The generated key for the authentication is the same as before, but the two auths are not equal. This could
// happen if there are 2 users in a system with the same username, or (more likely), if a user account was
// deleted and re-created.
assertEquals(anotherAuthentication.getUserAuthentication(),
getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()).getUserAuthentication());
// The authorizationRequest does not match because it is unapproved, but the token was granted to an approved request
assertFalse(storedOAuth2Request
.equals(getTokenStore().readAuthentication(expectedOAuth2AccessToken.getValue()).getOAuth2Request()));
}
@Test
public void testRemoveRefreshToken() {
OAuth2RefreshToken expectedExpiringRefreshToken = new DefaultExpiringOAuth2RefreshToken("testToken",
new Date());
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
getTokenStore().storeRefreshToken(expectedExpiringRefreshToken, expectedAuthentication);
getTokenStore().removeRefreshToken(expectedExpiringRefreshToken);
assertNull(getTokenStore().readRefreshToken("testToken"));
}
@Test
public void testRemovedTokenCannotBeFoundByUsername() {
OAuth2AccessToken token = new DefaultOAuth2AccessToken("testToken");
OAuth2Authentication expectedAuthentication = new OAuth2Authentication(
RequestTokenFactory.createOAuth2Request("id", false), new TestAuthentication("test2", false));
getTokenStore().storeAccessToken(token, expectedAuthentication);
getTokenStore().removeAccessToken(token);
Collection<OAuth2AccessToken> tokens = getTokenStore().findTokensByClientIdAndUserName("id", "test2");
assertFalse(tokens.contains(token));
assertTrue(tokens.isEmpty());
}
protected static class TestAuthentication extends AbstractAuthenticationToken {
private static final long serialVersionUID = 1L;
private String principal;
public TestAuthentication(String name, boolean authenticated) {
super(null);
setAuthenticated(authenticated);
this.principal = name;
}
public Object getCredentials() {
return null;
}
public Object getPrincipal() {
return this.principal;
}
}
}
| apache-2.0 |
an3m0na/hadoop | hadoop-tools/hadoop-posum/src/main/java/org/apache/hadoop/tools/posum/common/util/GeneralUtils.java | 5199 | package org.apache.hadoop.tools.posum.common.util;
import org.apache.commons.lang.WordUtils;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class GeneralUtils {
public static void writeField(Object object, Class startClass, String name, Object value) {
try {
Field field = GeneralUtils.findField(startClass, name);
field.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
field.set(object, value);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new PosumException("Reflection error: ", e);
}
}
private static Field findField(Class startClass, String name)
throws NoSuchFieldException {
Class crtClass = startClass;
while (crtClass != null) {
Field[] fields = crtClass.getDeclaredFields();
for (Field field : fields) {
if (field.getName().equals(name)) {
return field;
}
}
crtClass = crtClass.getSuperclass();
}
throw new NoSuchFieldException(startClass.getName() + "." + name);
}
private static Method findMethod(Class startClass, String name, Class<?>... paramTypes)
throws NoSuchMethodException {
Class crtClass = startClass;
while (crtClass != null) {
Method[] methods = crtClass.getDeclaredMethods();
for (Method method : methods) {
if (method.getName().equals(name)) {
if (Arrays.equals(method.getParameterTypes(), paramTypes)) {
return method;
}
}
}
crtClass = crtClass.getSuperclass();
}
throw new NoSuchMethodException(startClass.getName() + "." + name +
(paramTypes != null ? Arrays.asList(paramTypes).toString().replace('[', '(').replace(']', ')') : ""));
}
public static <T> T readField(Object object, Class startClass, String name) {
try {
Field field = GeneralUtils.findField(startClass, name);
field.setAccessible(true);
return (T) field.get(object);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new PosumException("Reflection error: ", e);
}
}
public static <T> T invokeMethod(Object object, Class startClass, String name, Class<?>[] paramTypes, Object... args) {
try {
Method method = GeneralUtils.findMethod(startClass, name, paramTypes);
method.setAccessible(true);
return (T) method.invoke(object, args);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException | IllegalArgumentException e) {
throw new PosumException("Reflection error: ", e);
}
}
public static boolean safeEquals(Object o1, Object o2) {
if (o1 == null)
return o2 == null;
return o1.equals(o2);
}
public static int safeHashCode(Object o) {
return o == null ? 0 : o.hashCode();
}
public static Map<String, Method> getBeanPropertyReaders(Class beanClass,
Set<String> propertyNames)
throws IntrospectionException {
Map<String, Method> ret = new HashMap<>(propertyNames.size());
PropertyDescriptor[] descriptors =
Introspector.getBeanInfo(beanClass, Object.class).getPropertyDescriptors();
for (String name : propertyNames) {
Method reader = findPropertyReader(descriptors, name);
if (reader == null) {
// explore name variations
String alternatePropertyName = name.startsWith("_") ? name.substring(1) : "_" + name;
reader = findPropertyReader(descriptors, alternatePropertyName);
if (reader == null) {
if (name.contains("_"))
alternatePropertyName = WordUtils.capitalizeFully(name).replaceAll("_", "");
else
alternatePropertyName = name.replaceAll("(.)(\\p{Upper})", "$1_$2").toLowerCase();
reader = findPropertyReader(descriptors, alternatePropertyName);
}
}
if (reader == null)
throw new PosumException("Could not find property reader for " + name + " in " + beanClass);
ret.put(name, reader);
}
return ret;
}
private static Method findPropertyReader(PropertyDescriptor[] propertyDescriptors, String propertyName) {
for (PropertyDescriptor pd : propertyDescriptors) {
if (propertyName.equals(pd.getName())) {
return pd.getReadMethod();
}
}
return null;
}
public static long orZero(Long unsafeLong) {
return unsafeLong == null ? 0 : unsafeLong;
}
public static float orZero(Float unsafeFloat) {
return unsafeFloat == null ? 0 : unsafeFloat;
}
public static double orZero(Double unsafeDouble) {
return unsafeDouble == null ? 0 : unsafeDouble;
}
public static int orZero(Integer unsafeInt) {
return unsafeInt == null ? 0 : unsafeInt;
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-medialive/src/main/java/com/amazonaws/services/medialive/model/InputSource.java | 6848 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* The settings for a PULL type input.
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/InputSource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InputSource implements Serializable, Cloneable, StructuredPojo {
/** The key used to extract the password from EC2 Parameter store. */
private String passwordParam;
/**
* This represents the customer's source URL where stream is pulled from.
*/
private String url;
/** The username for the input source. */
private String username;
/**
* The key used to extract the password from EC2 Parameter store.
*
* @param passwordParam
* The key used to extract the password from EC2 Parameter store.
*/
public void setPasswordParam(String passwordParam) {
this.passwordParam = passwordParam;
}
/**
* The key used to extract the password from EC2 Parameter store.
*
* @return The key used to extract the password from EC2 Parameter store.
*/
public String getPasswordParam() {
return this.passwordParam;
}
/**
* The key used to extract the password from EC2 Parameter store.
*
* @param passwordParam
* The key used to extract the password from EC2 Parameter store.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputSource withPasswordParam(String passwordParam) {
setPasswordParam(passwordParam);
return this;
}
/**
* This represents the customer's source URL where stream is pulled from.
*
* @param url
* This represents the customer's source URL where stream is pulled from.
*/
public void setUrl(String url) {
this.url = url;
}
/**
* This represents the customer's source URL where stream is pulled from.
*
* @return This represents the customer's source URL where stream is pulled from.
*/
public String getUrl() {
return this.url;
}
/**
* This represents the customer's source URL where stream is pulled from.
*
* @param url
* This represents the customer's source URL where stream is pulled from.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputSource withUrl(String url) {
setUrl(url);
return this;
}
/**
* The username for the input source.
*
* @param username
* The username for the input source.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* The username for the input source.
*
* @return The username for the input source.
*/
public String getUsername() {
return this.username;
}
/**
* The username for the input source.
*
* @param username
* The username for the input source.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputSource withUsername(String username) {
setUsername(username);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPasswordParam() != null)
sb.append("PasswordParam: ").append(getPasswordParam()).append(",");
if (getUrl() != null)
sb.append("Url: ").append(getUrl()).append(",");
if (getUsername() != null)
sb.append("Username: ").append(getUsername());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InputSource == false)
return false;
InputSource other = (InputSource) obj;
if (other.getPasswordParam() == null ^ this.getPasswordParam() == null)
return false;
if (other.getPasswordParam() != null && other.getPasswordParam().equals(this.getPasswordParam()) == false)
return false;
if (other.getUrl() == null ^ this.getUrl() == null)
return false;
if (other.getUrl() != null && other.getUrl().equals(this.getUrl()) == false)
return false;
if (other.getUsername() == null ^ this.getUsername() == null)
return false;
if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPasswordParam() == null) ? 0 : getPasswordParam().hashCode());
hashCode = prime * hashCode + ((getUrl() == null) ? 0 : getUrl().hashCode());
hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode());
return hashCode;
}
@Override
public InputSource clone() {
try {
return (InputSource) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.medialive.model.transform.InputSourceMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
neilireson/luke | src/main/java/org/getopt/luke/decoders/SolrDecoder.java | 2243 | package org.getopt.luke.decoders;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.TreeMap;
import org.apache.lucene.document.Field;
import org.apache.solr.schema.FieldType;
import org.getopt.luke.ClassFinder;
public class SolrDecoder implements Decoder {
private static final String solr_prefix = "org.apache.solr.schema.";
private static final TreeMap<String, FieldType> types = new TreeMap<>();
private static String[] typeNames = new String[0];
private FieldType fieldType;
private String name;
static {
// initialize the types map
try {
Class[] classes = ClassFinder.getInstantiableSubclasses(FieldType.class);
if (classes == null || classes.length == 0) {
throw new ClassNotFoundException("Missing Solr types???");
}
for (Class cls : classes) {
FieldType ft = (FieldType)cls.newInstance();
String name = cls.getName();
types.put(name, ft);
}
ArrayList<String> names = new ArrayList<>();
for (String n : types.keySet()) {
if (n.startsWith(solr_prefix)) {
names.add("solr." + n.substring(solr_prefix.length()));
}
}
Collections.sort(names);
typeNames = names.toArray(new String[names.size()]);
} catch (IOException | IllegalAccessException | InstantiationException | ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static final String[] getTypes() {
return typeNames;
}
public SolrDecoder(String type) throws Exception {
fieldType = types.get(type);
if (fieldType == null && type.startsWith("solr.")) {
String name = solr_prefix + type.substring(5);
fieldType = types.get(name);
}
if (fieldType == null) {
throw new Exception("Unknown Solr FieldType: " + type);
}
name = type;
}
public String decodeTerm(String fieldName, Object value) throws Exception {
return fieldType.indexedToReadable(value.toString());
}
public String decodeStored(String fieldName, Field value) throws Exception {
return fieldType.storedToReadable(value);
}
public String toString() {
return name;
}
}
| apache-2.0 |
jyotisingh/gocd | server/src/test-integration/java/com/thoughtworks/go/server/dao/AccessTokenSqlMapDaoIntegrationTest.java | 3919 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.dao;
import com.thoughtworks.go.domain.AccessToken;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
import static com.thoughtworks.go.helper.AccessTokenMother.randomAccessToken;
import static com.thoughtworks.go.helper.AccessTokenMother.randomAccessTokenForUser;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml"
})
public class AccessTokenSqlMapDaoIntegrationTest {
@Autowired
private AccessTokenSqlMapDao accessTokenSqlMapDao;
@Autowired
private DatabaseAccessHelper dbHelper;
@Before
public void setup() throws Exception {
dbHelper.onSetUp();
}
@After
public void teardown() throws Exception {
accessTokenSqlMapDao.deleteAll();
dbHelper.onTearDown();
}
@Test
public void shouldSaveUsersIntoDatabase() {
AccessToken accessToken = randomAccessToken();
accessTokenSqlMapDao.saveOrUpdate(accessToken);
AccessToken savedAccessToken = accessTokenSqlMapDao.load(accessToken.getId());
assertThat(savedAccessToken).isEqualTo(accessToken);
}
@Test
public void shouldReturnNullWhenNoAccessTokenFound() {
AccessToken savedAccessToken = accessTokenSqlMapDao.load(-1);
assertThat(savedAccessToken).isNull();
}
@Test
public void shouldReturnAllTheAccessTokensBelongingToAUser() {
String user1 = "Bob";
String user2 = "John";
AccessToken token1 = randomAccessTokenForUser(user1);
AccessToken token2 = randomAccessTokenForUser(user1);
AccessToken token3 = randomAccessTokenForUser(user2);
accessTokenSqlMapDao.saveOrUpdate(token1);
accessTokenSqlMapDao.saveOrUpdate(token2);
accessTokenSqlMapDao.saveOrUpdate(token3);
List<AccessToken> user1AccessTokens = accessTokenSqlMapDao.findAllTokensForUser(user1);
List<AccessToken> user2AccessTokens = accessTokenSqlMapDao.findAllTokensForUser(user2);
assertThat(user1AccessTokens).hasSize(2).containsExactlyInAnyOrder(token1, token2);
assertThat(user2AccessTokens).hasSize(1).containsExactlyInAnyOrder(token3);
}
@Test
public void shouldLoadAccessTokenBasedOnSaltId() {
AccessToken accessToken = randomAccessToken();
accessTokenSqlMapDao.saveOrUpdate(accessToken);
AccessToken savedAccessToken = accessTokenSqlMapDao.findAccessTokenBySaltId(accessToken.getSaltId());
assertThat(savedAccessToken).isEqualTo(accessToken);
}
@Test
public void shouldReturnNullWhenNoAccessTokenFoundForTheSpecifiedSaltId() {
String saltId = "access-token-for-apis";
AccessToken savedAccessToken = accessTokenSqlMapDao.findAccessTokenBySaltId(saltId);
assertThat(savedAccessToken).isNull();
}
}
| apache-2.0 |
togglz/togglz | core/src/test/java/org/togglz/core/repository/jdbc/JDBCStateRepositoryTest.java | 9068 | package org.togglz.core.repository.jdbc;
import org.h2.jdbcx.JdbcConnectionPool;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.togglz.core.Feature;
import org.togglz.core.repository.FeatureState;
import org.togglz.core.repository.util.DefaultMapSerializer;
import org.togglz.core.util.DbUtils;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import static org.junit.jupiter.api.Assertions.*;
class JDBCStateRepositoryTest {
private DataSource dataSource;
private JDBCStateRepository repository;
@BeforeEach
void before() throws SQLException {
dataSource = createDataSource();
repository = JDBCStateRepository.newBuilder(dataSource).tableName("TOGGLZ").createTable(true).serializer(DefaultMapSerializer.multiline()).build();
}
@Test
void testShouldSaveStateWithoutStrategyOrParameters() {
/*
* WHEN a feature without strategy is persisted
*/
FeatureState state = new FeatureState(TestFeature.F1).disable();
repository.setFeatureState(state);
/*
* THEN there should be a corresponding entry in the database
*/
assertEquals(1L, query(dataSource, "SELECT COUNT(*) FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals(0, query(dataSource, "SELECT FEATURE_ENABLED FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertNull(query(dataSource, "SELECT STRATEGY_ID FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertNull(query(dataSource, "SELECT STRATEGY_PARAMS FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
}
@Test
void testShouldSaveStateStrategyAndParameters() {
/*
* WHEN a feature without strategy is persisted
*/
FeatureState state = new FeatureState(TestFeature.F1)
.enable()
.setStrategyId("someId")
.setParameter("param", "foo");
repository.setFeatureState(state);
/*
* THEN there should be a corresponding entry in the database
*/
assertEquals(1L, query(dataSource, "SELECT COUNT(*) FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals(1, query(dataSource, "SELECT FEATURE_ENABLED FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("someId", query(dataSource, "SELECT STRATEGY_ID FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("param=foo", query(dataSource, "SELECT STRATEGY_PARAMS FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
}
@Test
void testShouldReadStateWithoutStrategyAndParameters() {
/*
* GIVEN a database row containing a simple feature state
*/
update(dataSource, "INSERT INTO TOGGLZ VALUES ('F1', 0, NULL, NULL)");
/*
* WHEN the repository reads the state
*/
FeatureState state = repository.getFeatureState(TestFeature.F1);
/*
* THEN the properties should be set like expected
*/
assertNotNull(state);
assertEquals(TestFeature.F1, state.getFeature());
assertFalse(state.isEnabled());
assertNull(state.getStrategyId());
assertEquals(0, state.getParameterNames().size());
}
@Test
void testShouldReadStateWithStrategyAndParameters() {
/*
* GIVEN a database row containing a simple feature state
*/
update(dataSource, "INSERT INTO TOGGLZ VALUES ('F1', 1, 'myStrategy', 'param23=foobar')");
/*
* WHEN the repository reads the state
*/
FeatureState state = repository.getFeatureState(TestFeature.F1);
/*
* THEN the properties should be set like expected
*/
assertNotNull(state);
assertEquals(TestFeature.F1, state.getFeature());
assertTrue(state.isEnabled());
assertEquals("myStrategy", state.getStrategyId());
assertEquals(1, state.getParameterNames().size());
assertEquals("foobar", state.getParameter("param23"));
}
@Test
void testShouldUpdateExistingDatabaseEntry() {
/*
* GIVEN a database row containing a simple feature state
*/
update(dataSource, "INSERT INTO TOGGLZ VALUES ('F1', 1, 'myStrategy', 'param23=foobar')");
/*
* AND the database entries are like expected
*/
assertEquals(1L, query(dataSource, "SELECT COUNT(*) FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals(1, query(dataSource, "SELECT FEATURE_ENABLED FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("myStrategy", query(dataSource, "SELECT STRATEGY_ID FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("param23=foobar", query(dataSource, "SELECT STRATEGY_PARAMS FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
/*
* WHEN the repository writes new state
*/
FeatureState state = new FeatureState(TestFeature.F1)
.disable()
.setStrategyId("someId")
.setParameter("param", "foo");
repository.setFeatureState(state);
/*
* THEN the properties should be set like expected
*/
assertEquals(1L, query(dataSource, "SELECT COUNT(*) FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals(0, query(dataSource, "SELECT FEATURE_ENABLED FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("someId", query(dataSource, "SELECT STRATEGY_ID FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
assertEquals("param=foo", query(dataSource, "SELECT STRATEGY_PARAMS FROM TOGGLZ WHERE FEATURE_NAME = 'F1'"));
}
@Test
void testShouldPropagateTheExceptionWhenReadFails() throws SQLException {
/*
* GIVEN a database row containing a simple feature state
*/
update(dataSource, "INSERT INTO TOGGLZ VALUES ('F1', 0, NULL, NULL)");
/*
* AND the datasource throws an exception when we try to get a
* connection
*/
DataSource spyedDataSource = Mockito.spy(dataSource);
repository = JDBCStateRepository.newBuilder(spyedDataSource).tableName("TOGGLZ").createTable(true).serializer(DefaultMapSerializer.multiline()).build();
Mockito.when(spyedDataSource.getConnection()).thenThrow(new SQLException("Failed to get a connection"));
/*
* WHEN the repository reads the state
*/
assertThrows(IllegalStateException.class, () -> repository.getFeatureState(TestFeature.F1));
/*
* THEN an IllegalStateException is thrown
*/
}
@Test
void testShouldPropagateTheExceptionWhenWriteFails() throws SQLException {
/*
* GIVEN a feature state to persist
*/
FeatureState state = new FeatureState(TestFeature.F1).enable();
/*
* AND the datasource throws an exception when we try to get a
* connection
*/
DataSource spyedDataSource = Mockito.spy(dataSource);
repository = JDBCStateRepository.newBuilder(spyedDataSource).tableName("TOGGLZ").createTable(true).serializer(DefaultMapSerializer.multiline()).build();
Mockito.when(spyedDataSource.getConnection()).thenThrow(new SQLException("Failed to get a connection"));
/*
* WHEN the feature state is persisted
*/
assertThrows(IllegalStateException.class, () -> repository.setFeatureState(state));
/*
* THEN an IllegalStateException is thrown
*/
}
private Object query(DataSource dataSource, String sql) {
try {
Connection connection = dataSource.getConnection();
try {
Statement statement = connection.createStatement();
try {
ResultSet resultSet = statement.executeQuery(sql);
try {
if (resultSet.next()) {
return resultSet.getObject(1);
}
return null;
} finally {
DbUtils.closeQuietly(resultSet);
}
} finally {
DbUtils.closeQuietly(statement);
}
} finally {
DbUtils.closeQuietly(connection);
}
} catch (SQLException e) {
throw new IllegalStateException(e);
}
}
private void update(DataSource dataSource, String sql) {
try {
Connection connection = dataSource.getConnection();
try {
Statement statement = connection.createStatement();
try {
statement.executeUpdate(sql);
} finally {
DbUtils.closeQuietly(statement);
}
} finally {
DbUtils.closeQuietly(connection);
}
} catch (SQLException e) {
throw new IllegalStateException(e);
}
}
private DataSource createDataSource() throws SQLException {
return JdbcConnectionPool.create("jdbc:h2:mem:", "sa", "");
}
private enum TestFeature implements Feature {
F1
}
} | apache-2.0 |
mathieucarbou/terracotta-platform | offheap-resource/src/test/java/org/terracotta/offheapresource/OffHeapResourceTest.java | 2468 | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.offheapresource;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import org.junit.Test;
public class OffHeapResourceTest {
@Test
public void testNegativeResourceSize() {
try {
new OffHeapResourceImpl(-1);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
//expected;
}
}
@Test
public void testZeroSizeResourceIsUseless() {
OffHeapResource ohr = new OffHeapResourceImpl(0);
assertThat(ohr.reserve(1), is(false));
assertThat(ohr.available(), is(0L));
}
@Test
public void testAllocationReducesSize() {
OffHeapResource ohr = new OffHeapResourceImpl(20);
assertThat(ohr.available(), is(20L));
assertThat(ohr.reserve(10), is(true));
assertThat(ohr.available(), is(10L));
}
@Test
public void testNegativeAllocationFails() {
OffHeapResource ohr = new OffHeapResourceImpl(20);
try {
ohr.reserve(-1);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
//expected
}
}
@Test
public void testAllocationWhenExhaustedFails() {
OffHeapResource ohr = new OffHeapResourceImpl(20);
ohr.reserve(20);
assertThat(ohr.reserve(1), is(false));
assertThat(ohr.available(), is(0L));
}
@Test
public void testFreeIncreasesSize() {
OffHeapResource ohr = new OffHeapResourceImpl(20);
ohr.reserve(20);
assertThat(ohr.available(), is(0L));
ohr.release(10);
assertThat(ohr.available(), is(10L));
}
@Test
public void testNegativeFreeFails() {
OffHeapResource ohr = new OffHeapResourceImpl(20);
ohr.reserve(10);
try {
ohr.release(-10);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
//expected
}
}
}
| apache-2.0 |
scribble/scribble.github.io | src/main/jbake/assets/docs/scribble/modules/core/src/main/java/org/scribble/codegen/java/endpointapi/ioifaces/SelectInterfaceGenerator.java | 874 | package org.scribble.codegen.java.endpointapi.ioifaces;
import java.util.Map;
import org.scribble.codegen.java.endpointapi.StateChannelApiGenerator;
import org.scribble.codegen.java.util.InterfaceBuilder;
import org.scribble.main.ScribbleException;
import org.scribble.model.endpoint.EState;
import org.scribble.model.endpoint.actions.EAction;
public class SelectInterfaceGenerator extends IOStateInterfaceGenerator
{
public SelectInterfaceGenerator(StateChannelApiGenerator apigen, Map<EAction, InterfaceBuilder> actions, EState curr)
{
super(apigen, actions, curr);
}
@Override
public InterfaceBuilder generateType() throws ScribbleException
{
if (this.curr.getAllActions().stream().anyMatch((a) -> !a.isSend())) // TODO (connect/disconnect)
{
//return null;
throw new RuntimeException("TODO: " + this.curr);
}
return super.generateType();
}
}
| apache-2.0 |
cloudera/avro | lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java | 21455 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.generic;
import java.nio.ByteBuffer;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.AvroTypeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.UnresolvedUnionException;
import org.apache.avro.io.BinaryData;
import org.apache.avro.util.Utf8;
/** Utilities for generic Java data. */
public class GenericData {
private static final GenericData INSTANCE = new GenericData();
/** Return the singleton instance. */
public static GenericData get() { return INSTANCE; }
protected GenericData() {}
/** Default implementation of {@link GenericRecord}. */
public static class Record implements GenericRecord, Comparable<Record> {
private final Schema schema;
private final Object[] values;
public Record(Schema schema) {
if (schema == null || !Type.RECORD.equals(schema.getType()))
throw new AvroRuntimeException("Not a record schema: "+schema);
this.schema = schema;
this.values = new Object[schema.getFields().size()];
}
@Override public Schema getSchema() { return schema; }
@Override public void put(String key, Object value) {
values[schema.getField(key).pos()] = value;
}
@Override public void put(int i, Object v) { values[i] = v; }
@Override public Object get(String key) {
Field field = schema.getField(key);
if (field == null) return null;
return values[field.pos()];
}
@Override public Object get(int i) { return values[i]; }
@Override public boolean equals(Object o) {
if (o == this) return true; // identical object
if (!(o instanceof Record)) return false; // not a record
Record that = (Record)o;
if (!schema.getFullName().equals(that.schema.getFullName()))
return false; // not the same schema
return this.compareTo(that) == 0;
}
@Override public int hashCode() {
return GenericData.get().hashCode(this, schema);
}
@Override public int compareTo(Record that) {
return GenericData.get().compare(this, that, schema);
}
@Override public String toString() {
return GenericData.get().toString(this);
}
}
/** Default implementation of an array. */
@SuppressWarnings(value="unchecked")
public static class Array<T> extends AbstractList<T>
implements GenericArray<T>, Comparable<GenericArray<T>> {
private static final Object[] EMPTY = new Object[0];
private final Schema schema;
private int size;
private Object[] elements = EMPTY;
public Array(int capacity, Schema schema) {
if (schema == null || !Type.ARRAY.equals(schema.getType()))
throw new AvroRuntimeException("Not an array schema: "+schema);
this.schema = schema;
if (capacity != 0)
elements = new Object[capacity];
}
public Schema getSchema() { return schema; }
@Override public int size() { return size; }
@Override public void clear() { size = 0; }
@Override public Iterator<T> iterator() {
return new Iterator<T>() {
private int position = 0;
public boolean hasNext() { return position < size; }
public T next() { return (T)elements[position++]; }
public void remove() { throw new UnsupportedOperationException(); }
};
}
@Override public T get(int i) {
if (i >= size)
throw new IndexOutOfBoundsException("Index " + i + " out of bounds.");
return (T)elements[i];
}
@Override public boolean add(T o) {
if (size == elements.length) {
Object[] newElements = new Object[(size * 3)/2 + 1];
System.arraycopy(elements, 0, newElements, 0, size);
elements = newElements;
}
elements[size++] = o;
return true;
}
public T peek() {
return (size < elements.length) ? (T)elements[size] : null;
}
@Override
public int hashCode() {
return GenericData.get().hashCode(this, schema);
}
@Override
public boolean equals(Object o) {
if (o == this) return true; // identical object
if (!(o instanceof Array)) return false; // not an array
Array that = (Array)o;
if (!schema.equals(that.schema))
return false; // not the same schema
return this.compareTo(that) == 0;
}
public int compareTo(GenericArray<T> that) {
return GenericData.get().compare(this, that, this.getSchema());
}
public void reverse() {
int left = 0;
int right = elements.length - 1;
while (left < right) {
Object tmp = elements[left];
elements[left] = elements[right];
elements[right] = tmp;
left++;
right--;
}
}
@Override
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[");
int count = 0;
for (T e : this) {
buffer.append(e==null ? "null" : e.toString());
if (++count < size())
buffer.append(", ");
}
buffer.append("]");
return buffer.toString();
}
}
/** Default implementation of {@link GenericFixed}. */
public static class Fixed implements GenericFixed, Comparable<Fixed> {
private byte[] bytes;
public Fixed(Schema schema) { bytes(new byte[schema.getFixedSize()]); }
public Fixed(byte[] bytes) { bytes(bytes); }
protected Fixed() {}
public void bytes(byte[] bytes) { this.bytes = bytes; }
public byte[] bytes() { return bytes; }
@Override
public boolean equals(Object o) {
if (o == this) return true;
return o instanceof GenericFixed
&& Arrays.equals(bytes, ((GenericFixed)o).bytes());
}
@Override
public int hashCode() { return Arrays.hashCode(bytes); }
@Override
public String toString() { return Arrays.toString(bytes); }
public int compareTo(Fixed that) {
return BinaryData.compareBytes(this.bytes, 0, this.bytes.length,
that.bytes, 0, that.bytes.length);
}
}
/** Default implementation of {@link GenericEnumSymbol}. */
public static class EnumSymbol implements GenericEnumSymbol {
private String symbol;
public EnumSymbol(String symbol) { this.symbol = symbol; }
@Override
public boolean equals(Object o) {
if (o == this) return true;
return o instanceof GenericEnumSymbol
&& symbol.equals(o.toString());
}
@Override
public int hashCode() { return symbol.hashCode(); }
@Override
public String toString() { return symbol; }
}
/** Returns true if a Java datum matches a schema. */
public boolean validate(Schema schema, Object datum) {
switch (schema.getType()) {
case RECORD:
if (!(datum instanceof IndexedRecord)) return false;
IndexedRecord fields = (IndexedRecord)datum;
for (Field f : schema.getFields()) {
if (!validate(f.schema(), fields.get(f.pos())))
return false;
}
return true;
case ENUM:
return schema.getEnumSymbols().contains(datum.toString());
case ARRAY:
if (!(datum instanceof Collection)) return false;
for (Object element : (Collection<?>)datum)
if (!validate(schema.getElementType(), element))
return false;
return true;
case MAP:
if (!(datum instanceof Map)) return false;
@SuppressWarnings(value="unchecked")
Map<Object,Object> map = (Map<Object,Object>)datum;
for (Map.Entry<Object,Object> entry : map.entrySet())
if (!validate(schema.getValueType(), entry.getValue()))
return false;
return true;
case UNION:
for (Schema type : schema.getTypes())
if (validate(type, datum))
return true;
return false;
case FIXED:
return datum instanceof GenericFixed
&& ((GenericFixed)datum).bytes().length==schema.getFixedSize();
case STRING: return isString(datum);
case BYTES: return isBytes(datum);
case INT: return datum instanceof Integer;
case LONG: return datum instanceof Long;
case FLOAT: return datum instanceof Float;
case DOUBLE: return datum instanceof Double;
case BOOLEAN: return datum instanceof Boolean;
case NULL: return datum == null;
default: return false;
}
}
/** Renders a Java datum as <a href="http://www.json.org/">JSON</a>. */
public String toString(Object datum) {
StringBuilder buffer = new StringBuilder();
toString(datum, buffer);
return buffer.toString();
}
/** Renders a Java datum as <a href="http://www.json.org/">JSON</a>. */
protected void toString(Object datum, StringBuilder buffer) {
if (datum instanceof IndexedRecord) {
buffer.append("{");
int count = 0;
IndexedRecord record = (IndexedRecord)datum;
for (Field f : record.getSchema().getFields()) {
toString(f.name(), buffer);
buffer.append(": ");
toString(record.get(f.pos()), buffer);
if (++count < record.getSchema().getFields().size())
buffer.append(", ");
}
buffer.append("}");
} else if (datum instanceof Collection) {
Collection<?> array = (Collection<?>)datum;
buffer.append("[");
long last = array.size()-1;
int i = 0;
for (Object element : array) {
toString(element, buffer);
if (i++ < last)
buffer.append(", ");
}
buffer.append("]");
} else if (datum instanceof Map) {
buffer.append("{");
int count = 0;
@SuppressWarnings(value="unchecked")
Map<Object,Object> map = (Map<Object,Object>)datum;
for (Map.Entry<Object,Object> entry : map.entrySet()) {
toString(entry.getKey(), buffer);
buffer.append(": ");
toString(entry.getValue(), buffer);
if (++count < map.size())
buffer.append(", ");
}
buffer.append("}");
} else if (datum instanceof CharSequence
|| datum instanceof GenericEnumSymbol) {
buffer.append("\"");
writeEscapedString(datum.toString(), buffer);
buffer.append("\"");
} else if (datum instanceof ByteBuffer) {
buffer.append("{\"bytes\": \"");
ByteBuffer bytes = (ByteBuffer)datum;
for (int i = bytes.position(); i < bytes.limit(); i++)
buffer.append((char)bytes.get(i));
buffer.append("\"}");
} else {
buffer.append(datum);
}
}
/* Adapted from http://code.google.com/p/json-simple */
private void writeEscapedString(String string, StringBuilder builder) {
for(int i = 0; i < string.length(); i++){
char ch = string.charAt(i);
switch(ch){
case '"':
builder.append("\\\"");
break;
case '\\':
builder.append("\\\\");
break;
case '\b':
builder.append("\\b");
break;
case '\f':
builder.append("\\f");
break;
case '\n':
builder.append("\\n");
break;
case '\r':
builder.append("\\r");
break;
case '\t':
builder.append("\\t");
break;
case '/':
builder.append("\\/");
break;
default:
// Reference: http://www.unicode.org/versions/Unicode5.1.0/
if((ch>='\u0000' && ch<='\u001F') || (ch>='\u007F' && ch<='\u009F') || (ch>='\u2000' && ch<='\u20FF')){
String hex = Integer.toHexString(ch);
builder.append("\\u");
for(int j = 0; j < 4-builder.length(); j++)
builder.append('0');
builder.append(string.toUpperCase());
} else {
builder.append(ch);
}
}
}
}
/** Create a schema given an example datum. */
public Schema induce(Object datum) {
if (datum instanceof IndexedRecord) {
return ((IndexedRecord)datum).getSchema();
} else if (datum instanceof Collection) {
Schema elementType = null;
for (Object element : (Collection<?>)datum) {
if (elementType == null) {
elementType = induce(element);
} else if (!elementType.equals(induce(element))) {
throw new AvroTypeException("No mixed type arrays.");
}
}
if (elementType == null) {
throw new AvroTypeException("Empty array: "+datum);
}
return Schema.createArray(elementType);
} else if (datum instanceof Map) {
@SuppressWarnings(value="unchecked")
Map<Object,Object> map = (Map<Object,Object>)datum;
Schema value = null;
for (Map.Entry<Object,Object> entry : map.entrySet()) {
if (value == null) {
value = induce(entry.getValue());
} else if (!value.equals(induce(entry.getValue()))) {
throw new AvroTypeException("No mixed type map values.");
}
}
if (value == null) {
throw new AvroTypeException("Empty map: "+datum);
}
return Schema.createMap(value);
} else if (datum instanceof GenericFixed) {
return Schema.createFixed(null, null, null,
((GenericFixed)datum).bytes().length);
}
else if (datum instanceof CharSequence) return Schema.create(Type.STRING);
else if (datum instanceof ByteBuffer) return Schema.create(Type.BYTES);
else if (datum instanceof Integer) return Schema.create(Type.INT);
else if (datum instanceof Long) return Schema.create(Type.LONG);
else if (datum instanceof Float) return Schema.create(Type.FLOAT);
else if (datum instanceof Double) return Schema.create(Type.DOUBLE);
else if (datum instanceof Boolean) return Schema.create(Type.BOOLEAN);
else if (datum == null) return Schema.create(Type.NULL);
else throw new AvroTypeException("Can't create schema for: "+datum);
}
/** Called by {@link GenericDatumReader#readRecord} to set a record fields
* value to a record instance. The default implementation is for {@link
* IndexedRecord}.*/
public void setField(Object record, String name, int position, Object o) {
((IndexedRecord)record).put(position, o);
}
/** Called by {@link GenericDatumReader#readRecord} to retrieve a record
* field value from a reused instance. The default implementation is for
* {@link IndexedRecord}.*/
public Object getField(Object record, String name, int position) {
return ((IndexedRecord)record).get(position);
}
/** Return the index for a datum within a union. Implemented with {@link
* #instanceOf(Schema,Object)}.*/
public int resolveUnion(Schema union, Object datum) {
int i = 0;
for (Schema type : union.getTypes()) {
if (instanceOf(type, datum))
return i;
i++;
}
throw new UnresolvedUnionException(union, datum);
}
/** Called by {@link #resolveUnion(Schema,Object)}. May be overridden for
alternate data representations.*/
protected boolean instanceOf(Schema schema, Object datum) {
switch (schema.getType()) {
case RECORD:
if (!isRecord(datum)) return false;
return (schema.getName() == null) ||
schema.getName().equals(getRecordSchema(datum).getName());
case ENUM: return isEnum(datum);
case ARRAY: return isArray(datum);
case MAP: return isMap(datum);
case FIXED: return isFixed(datum);
case STRING: return isString(datum);
case BYTES: return isBytes(datum);
case INT: return datum instanceof Integer;
case LONG: return datum instanceof Long;
case FLOAT: return datum instanceof Float;
case DOUBLE: return datum instanceof Double;
case BOOLEAN: return datum instanceof Boolean;
case NULL: return datum == null;
default: throw new AvroRuntimeException("Unexpected type: " +schema);
}
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isArray(Object datum) {
return datum instanceof Collection;
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isRecord(Object datum) {
return datum instanceof IndexedRecord;
}
/** Called to obtain the schema of a record. By default calls
* {GenericContainer#getSchema(). May be overridden for alternate record
* representations. */
protected Schema getRecordSchema(Object record) {
return ((GenericContainer)record).getSchema();
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isEnum(Object datum) {
return datum instanceof GenericEnumSymbol;
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isMap(Object datum) {
return datum instanceof Map;
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isFixed(Object datum) {
return datum instanceof GenericFixed;
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isString(Object datum) {
return datum instanceof CharSequence;
}
/** Called by the default implementation of {@link #instanceOf}.*/
protected boolean isBytes(Object datum) {
return datum instanceof ByteBuffer;
}
/** Compute a hash code according to a schema, consistent with {@link
* #compare(Object,Object,Schema)}. */
public int hashCode(Object o, Schema s) {
if (o == null) return 0; // incomplete datum
int hashCode = 1;
switch (s.getType()) {
case RECORD:
IndexedRecord r = (IndexedRecord)o;
for (Field f : s.getFields()) {
if (f.order() == Field.Order.IGNORE)
continue;
hashCode = hashCodeAdd(hashCode, r.get(f.pos()), f.schema());
}
return hashCode;
case ARRAY:
Collection<?> a = (Collection<?>)o;
Schema elementType = s.getElementType();
for (Object e : a)
hashCode = hashCodeAdd(hashCode, e, elementType);
return hashCode;
case UNION:
return hashCode(o, s.getTypes().get(resolveUnion(s, o)));
case ENUM:
return s.getEnumOrdinal(o.toString());
case NULL:
return 0;
case STRING:
return (o instanceof Utf8 ? o : new Utf8(o.toString())).hashCode();
default:
return o.hashCode();
}
}
/** Add the hash code for an object into an accumulated hash code. */
protected int hashCodeAdd(int hashCode, Object o, Schema s) {
return 31*hashCode + hashCode(o, s);
}
/** Compare objects according to their schema. If equal, return zero. If
* greater-than, return 1, if less than return -1. Order is consistent with
* that of {@link BinaryData#compare(byte[], int, byte[], int, Schema)}.
*/
@SuppressWarnings(value="unchecked")
public int compare(Object o1, Object o2, Schema s) {
if (o1 == o2) return 0;
switch (s.getType()) {
case RECORD:
for (Field f : s.getFields()) {
if (f.order() == Field.Order.IGNORE)
continue; // ignore this field
int pos = f.pos();
String name = f.name();
int compare =
compare(getField(o1, name, pos), getField(o2, name, pos), f.schema());
if (compare != 0) // not equal
return f.order() == Field.Order.DESCENDING ? -compare : compare;
}
return 0;
case ENUM:
return s.getEnumOrdinal(o1.toString()) - s.getEnumOrdinal(o2.toString());
case ARRAY:
Collection a1 = (Collection)o1;
Collection a2 = (Collection)o2;
Iterator e1 = a1.iterator();
Iterator e2 = a2.iterator();
Schema elementType = s.getElementType();
while(e1.hasNext() && e2.hasNext()) {
int compare = compare(e1.next(), e2.next(), elementType);
if (compare != 0) return compare;
}
return e1.hasNext() ? 1 : (e2.hasNext() ? -1 : 0);
case MAP:
throw new AvroRuntimeException("Can't compare maps!");
case UNION:
int i1 = resolveUnion(s, o1);
int i2 = resolveUnion(s, o2);
return (i1 == i2)
? compare(o1, o2, s.getTypes().get(i1))
: i1 - i2;
case NULL:
return 0;
case STRING:
Utf8 u1 = o1 instanceof Utf8 ? (Utf8)o1 : new Utf8(o1.toString());
Utf8 u2 = o2 instanceof Utf8 ? (Utf8)o2 : new Utf8(o2.toString());
return u1.compareTo(u2);
default:
return ((Comparable)o1).compareTo(o2);
}
}
}
| apache-2.0 |
easycodebox/easycode | easycode-common/src/main/java/com/easycodebox/common/enums/entity/ProjectEnv.java | 693 | package com.easycodebox.common.enums.entity;
import com.easycodebox.common.enums.DetailEnum;
public enum ProjectEnv implements DetailEnum<String> {
DEFAULT("DEFAULT", "私有环境"), //私人本地开发环境
DEV("DEV", "开发环境"),
TEST("TEST", "测试环境"),
PRE("PRE", "预发环境"),
PROD("PROD", "生产环境");
private final String value;
private final String desc;
ProjectEnv(String value, String desc) {
this.value = value;
this.desc = desc;
}
public String getValue() {
return this.value;
}
public String getDesc() {
return this.desc;
}
@Override
public String getClassName() {
return this.name();
}
}
| apache-2.0 |
changyuan95/LocationGet | app/src/main/java/com/example/locationget/service/LocateService.java | 3640 | package com.example.locationget.service;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.support.annotation.IntDef;
import com.baidu.location.BDLocation;
import com.baidu.location.BDLocationListener;
import com.baidu.location.LocationClient;
import com.baidu.location.LocationClientOption;
import com.example.locationget.MainActivity;
public class LocateService extends Service {
public LocationClient mLocationClient;
private String context = "";
public LocateService() {
}
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mLocationClient = new LocationClient(getApplicationContext());
mLocationClient.registerLocationListener(new MyLocationListener());
requestLocation();
/*Handler handler = new Handler();
Runnable runnable = new Runnable() {
@Override
public void run() {
SharedPreferences.Editor editor = getSharedPreferences("data", MODE_PRIVATE).edit();
editor.putString("location", context);
editor.apply();
}
};
handler.postDelayed(runnable, 5000);*/
return super.onStartCommand(intent, flags, startId);
}
private void requestLocation(){
initLocation();
mLocationClient.start();
}
private void initLocation() {
LocationClientOption option = new LocationClientOption();
option.setScanSpan(5000);
option.setIsNeedAddress(true);
mLocationClient.setLocOption(option);
}
public class MyLocationListener implements BDLocationListener {
@Override
public void onReceiveLocation(BDLocation location) {
StringBuilder currentPosition = new StringBuilder();
currentPosition.append("经度:").append(location.getLongitude()).append("\n");
currentPosition.append("纬度:").append(location.getLatitude()).append("\n");
currentPosition.append("国家:").append(location.getCountry()).append("\n");
currentPosition.append("省:").append(location.getProvince()).append("\n");
currentPosition.append("市:").append(location.getCity()).append("\n");
currentPosition.append("区:").append(location.getDistrict()).append("\n");
currentPosition.append("街道:").append(location.getStreet()).append("\n");
currentPosition.append("GPS时间:").append(location.getTime());
context = currentPosition.toString();
StringBuilder sms = new StringBuilder();
sms.append(location.getLongitude()).append(",").append(location.getLatitude());
SharedPreferences.Editor editor = getSharedPreferences("data", MODE_PRIVATE).edit();
editor.putString("location", context);
editor.putString("longitude", "" + location.getLongitude());
editor.putString("latitude", "" + location.getLatitude());
editor.putString("sms", sms.toString());
editor.apply();
}
@Override
public void onConnectHotSpotMessage(String s, int i) {
}
}
@Override
public void onDestroy() {
mLocationClient.stop();
super.onDestroy();
}
}
| apache-2.0 |
fishercoder1534/Leetcode | src/test/java/com/fishercoder/_174Test.java | 643 | package com.fishercoder;
import com.fishercoder.solutions._174;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class _174Test {
private static _174.Solution1 solution1;
private int[][] dungeon;
@BeforeClass
public static void setup() {
solution1 = new _174.Solution1();
}
@Test
public void test1() {
dungeon = new int[][] {
{0}
};
assertEquals(1, solution1.calculateMinimumHP(dungeon));
}
@Test
public void test2() {
dungeon = new int[][] {
{-200}
};
assertEquals(201, solution1.calculateMinimumHP(dungeon));
}
}
| apache-2.0 |
mleroy26/Cosc436_DecoratorAssignment | src/decorator_assignment/Client.java | 1602 | package decorator_assignment;
import java.util.ArrayList;
import java.util.Scanner;
import interfaces.Receipt;
public class Client {
@SuppressWarnings({ "unchecked", "null", "resource" })
public static void main(String[] args){
Scanner in=new Scanner(System.in);
PurchasedItems items = null;
ReceiptDate date=new ReceiptDate(11,5,2015);
ArrayList<Item> list=new ArrayList<Item>();
list.add(new Item("1406 Macbook pro",999.99));
list.add(new Item ("1310 HP pavillion",599.99));
int entry=0;
while(entry!=4){
System.out.println("1-Start new receipt");
System.out.println("2-Add sales items");
System.out.println("3-Display Receipt");
System.out.println("4-Exit");
entry=in.nextInt();
switch(entry){
case 1:items=null;break;
case 2:addItems(list,items);break;
case 3: ReceiptFactory factory = null;
try {
factory = new ReceiptFactory(items, date);
} catch (UnknownAddOnType e) {
}
Receipt receipt = factory.getReceipt();
receipt.prtReceipt();break;
case 4: break;
}
}
}
private static void addItems(ArrayList<Item> list,PurchasedItems items){
Scanner in=new Scanner(System.in);
int i=0;
for(Item a:list){
System.out.println(i+": "+a.getDescription() +" "+a.getPrice() +"\n");
i++;
}
boolean loop=true;
int choice;
while(loop){
System.out.println("Which item would you like to add? (enter number) or 10 to exit");
choice=in.nextInt();
if(choice==10)
loop=false;
else
items.addItem(list.get(choice));
}
}
}
| apache-2.0 |
googlearchive/science-journal | OpenScienceJournal/whistlepunk_library/src/mockclasses/java/com/google/android/apps/forscience/whistlepunk/api/scalarinput/TestSensorDiscoverer.java | 5781 | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.forscience.whistlepunk.api.scalarinput;
import android.os.RemoteException;
import androidx.annotation.NonNull;
import com.google.android.apps.forscience.javalib.Consumer;
import com.google.android.apps.forscience.whistlepunk.MockScheduler;
import com.google.android.apps.forscience.whistlepunk.SensorProvider;
import com.google.android.apps.forscience.whistlepunk.devicemanager.SensorDiscoverer;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;
public class TestSensorDiscoverer extends ISensorDiscoverer.Stub {
private final Executor executor;
private String serviceName;
private List<Device> devices = new ArrayList<>();
private Multimap<String, TestSensor> sensors = HashMultimap.create();
public TestSensorDiscoverer(String serviceName) {
this(serviceName, MoreExecutors.directExecutor());
}
public TestSensorDiscoverer(String serviceName, Executor executor) {
this.serviceName = serviceName;
this.executor = executor;
}
@NonNull
public ScalarInputDiscoverer makeScalarInputDiscoverer(
final String serviceId, Executor uiThread) {
return new ScalarInputDiscoverer(
makeFinder(serviceId),
new TestStringSource(),
uiThread,
new MockScheduler(),
100,
new RecordingUsageTracker());
}
@NonNull
public Consumer<AppDiscoveryCallbacks> makeFinder(final String serviceId) {
return new Consumer<AppDiscoveryCallbacks>() {
@Override
public void take(AppDiscoveryCallbacks adc) {
adc.onServiceFound(serviceId, TestSensorDiscoverer.this);
adc.onDiscoveryDone();
}
};
}
@Override
public String getName() throws RemoteException {
return serviceName;
}
public void addDevice(String deviceId, String name) {
devices.add(new Device(deviceId, name));
}
@Override
public void scanDevices(final IDeviceConsumer c) throws RemoteException {
for (final Device device : devices) {
executor.execute(
new Runnable() {
@Override
public void run() {
device.deliverTo(c);
}
});
}
onDevicesDone(c);
}
protected void onDevicesDone(final IDeviceConsumer c) {
executor.execute(
new Runnable() {
@Override
public void run() {
try {
c.onScanDone();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
});
}
public void addSensor(String deviceId, TestSensor sensor) {
sensors.put(deviceId, sensor);
}
public void removeSensor(String deviceId, String address) {
Collection<TestSensor> testSensors = sensors.get(deviceId);
Iterator<TestSensor> iter = testSensors.iterator();
while (iter.hasNext()) {
if (iter.next().getSensorAddress().equals(address)) {
iter.remove();
}
}
}
@Override
public void scanSensors(String deviceId, final ISensorConsumer c) throws RemoteException {
for (final TestSensor sensor : sensors.get(deviceId)) {
executor.execute(
new Runnable() {
@Override
public void run() {
sensor.deliverTo(c);
}
});
}
onSensorsDone(c);
}
protected void onSensorsDone(final ISensorConsumer c) {
executor.execute(
new Runnable() {
@Override
public void run() {
try {
c.onScanDone();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
});
}
@Override
public ISensorConnector getConnector() throws RemoteException {
return null;
}
@NonNull
public Map<String, SensorDiscoverer> makeDiscovererMap(String serviceId) {
ScalarInputDiscoverer sid =
makeScalarInputDiscoverer(serviceId, MoreExecutors.directExecutor());
Map<String, SensorDiscoverer> discoverers = new HashMap<>();
discoverers.put(ScalarInputSpec.TYPE, sid);
return discoverers;
}
@NonNull
public Map<String, SensorProvider> makeProviderMap(String serviceId) {
Map<String, SensorProvider> providers = new HashMap<>();
Map<String, SensorDiscoverer> discoverers = makeDiscovererMap(serviceId);
for (Map.Entry<String, SensorDiscoverer> entry : discoverers.entrySet()) {
providers.put(entry.getKey(), entry.getValue().getProvider());
}
return providers;
}
private class Device {
private final String deviceId;
private final String name;
public Device(String deviceId, String name) {
this.deviceId = deviceId;
this.name = name;
}
public void deliverTo(IDeviceConsumer c) {
try {
c.onDeviceFound(deviceId, name, null);
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}
}
| apache-2.0 |
Clinical3PO/Platform | dev/c3po-angularjs-visualization/rule/src/main/java/org/clinical3po/backendservices/rule/dependency/AddDependencyRule.java | 4898 | /*
* Copyright 2015 Clinical Personalized Pragmatic Predictions of Outcomes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.clinical3po.backendservices.rule.dependency;
import org.clinical3po.backendservices.rule.Rule;
import org.clinical3po.backendservices.rule.page.AbstractPageRule;
import org.clinical3po.backendservices.server.DbService;
import org.clinical3po.backendservices.util.ServiceLocator;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.impls.orient.OrientGraph;
import java.util.Map;
/**
* Created by w.ding on 3/9/2015.
*
* for admin or ruleAdmin, you can select dest only belongs to the host. However, if source
* is not publishing the message, you get nothing.
*
* AccessLevel R [owner, admin, ruleAdmin]
*
*/
public class AddDependencyRule extends AbstractDependencyRule implements Rule {
public boolean execute (Object ...objects) throws Exception {
Map<String, Object> inputMap = (Map<String, Object>)objects[0];
Map<String, Object> data = (Map<String, Object>)inputMap.get("data");
String host = (String)data.get("host");
String source = (String)data.get("source");
String dest = (String)data.get("desc");
String error = null;
Map<String, Object> payload = (Map<String, Object>) inputMap.get("payload");
Map<String, Object> user = (Map<String, Object>)payload.get("user");
String userHost = (String)user.get("host");
OrientGraph graph = ServiceLocator.getInstance().getGraph();
try {
Vertex sourceRule = DbService.getVertexByRid(graph, source);
Vertex destRule = DbService.getVertexByRid(graph, dest);
if(sourceRule == null || destRule == null) {
error = "source rule or destination rule doesn't exist";
inputMap.put("responseCode", 400);
} else {
String sourceRuleClass = sourceRule.getProperty("ruleClass");
String destRuleClass = destRule.getProperty("ruleClass");
if(userHost != null) {
if (!userHost.equals(host)) {
error = "You can only add dependency from host: " + host;
inputMap.put("responseCode", 403);
} else {
// make sure dest ruleClass contains host.
if(!destRuleClass.contains(host)) {
error = "Destination rule doesn't belong to the host " + host;
inputMap.put("responseCode", 403);
} else {
// check if there is an depend edge from source to dest
boolean hasEdge = false;
for (Edge edge : (Iterable<Edge>) sourceRule.getEdges(Direction.OUT, "Own")) {
if(edge.getVertex(Direction.IN) == destRule) hasEdge = true;
}
if(hasEdge) {
error = "There is depend edge between source rule and dest rule";
inputMap.put("responseCode", 400);
} else {
Map eventMap = getEventMap(inputMap);
Map<String, Object> eventData = (Map<String, Object>)eventMap.get("data");
inputMap.put("eventMap", eventMap);
eventData.put("sourceRuleClass", sourceRuleClass);
eventData.put("destRuleClass", destRuleClass);
eventData.put("content", data.get("content"));
eventData.put("createDate", new java.util.Date());
eventData.put("createUserId", user.get("userId"));
}
}
}
}
}
} catch (Exception e) {
logger.error("Exception:", e);
throw e;
} finally {
graph.shutdown();
}
if(error != null) {
inputMap.put("result", error);
return false;
} else {
return true;
}
}
}
| apache-2.0 |
moosbusch/xbLIDO | src/org/w3/x2001/smil20/impl/RestartTimingTypeImpl.java | 1267 | /*
* Copyright 2013 Gunnar Kappei.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.w3.x2001.smil20.impl;
/**
* An XML restartTimingType(@http://www.w3.org/2001/SMIL20/).
*
* This is an atomic type that is a restriction of org.w3.x2001.smil20.RestartTimingType.
*/
public class RestartTimingTypeImpl extends org.apache.xmlbeans.impl.values.JavaStringEnumerationHolderEx implements org.w3.x2001.smil20.RestartTimingType
{
private static final long serialVersionUID = 1L;
public RestartTimingTypeImpl(org.apache.xmlbeans.SchemaType sType)
{
super(sType, false);
}
protected RestartTimingTypeImpl(org.apache.xmlbeans.SchemaType sType, boolean b)
{
super(sType, b);
}
}
| apache-2.0 |
rpgmakervx/slardar | src/main/java/org/easyarch/slardar/session/impl/MapperDBSession.java | 2969 | package org.easyarch.slardar.session.impl;
import org.easyarch.slardar.cache.ProxyCache;
import org.easyarch.slardar.cache.factory.ProxyCacheFactory;
import org.easyarch.slardar.jdbc.exec.AbstractExecutor;
import org.easyarch.slardar.jdbc.handler.BaseTypeResultSetHandler;
import org.easyarch.slardar.jdbc.handler.BeanListResultSetHadler;
import org.easyarch.slardar.jdbc.handler.BeanResultSetHadler;
import org.easyarch.slardar.jdbc.handler.MapResultHandler;
import org.easyarch.slardar.mapping.MapperProxyFactory;
import org.easyarch.slardar.session.Configuration;
import org.easyarch.slardar.session.DBSessionAdapter;
import java.util.List;
import java.util.Map;
/**
* Description :
* Created by xingtianyu on 17-1-30
* 下午6:54
* description:
*/
public class MapperDBSession extends DBSessionAdapter {
private ProxyCacheFactory factory;
private Configuration configuration;
private AbstractExecutor executor;
public MapperDBSession(Configuration configuration, AbstractExecutor executor) {
this.executor = executor;
this.configuration = configuration;
factory = ProxyCacheFactory.getInstance();
}
@Override
public <T> T selectOne(String sql, Class<T> clazz, Object... parameters) {
return executor.query(sql,new BeanResultSetHadler<T>(clazz),parameters);
}
@Override
public <E> List<E> selectList(String sql, Class<E> clazz, Object... parameters) {
List<E> list = executor.query(sql, new BeanListResultSetHadler<>(clazz), parameters);
return list;
}
@Override
public int selectCount(String sql, Object... parameters) {
return executor.query(sql, new BaseTypeResultSetHandler<>(Integer.class), parameters);
}
@Override
public List<Map<String, Object>> selectMap(String sql, Object... parameters) {
List<Map<String, Object>> list = executor.query(sql, new MapResultHandler(), parameters);
return list;
}
@Override
public int update(String sql, Object... parameter) {
return executor.alter(sql, parameter);
}
@Override
public int delete(String sql, Object... parameter) {
return update(sql, parameter);
}
@Override
public int insert(String sql, Object... parameter) {
return update(sql, parameter);
}
@Override
public <T> T getMapper(Class<T> clazz) {
ProxyCache proxyCache = factory.createCache(configuration.getCacheEntity());
if (proxyCache.isHit(clazz)){
return (T) proxyCache.get(clazz);
}
MapperProxyFactory<T> mapperProxyFactory = new MapperProxyFactory(configuration,clazz);
return mapperProxyFactory.newInstance(this);
}
@Override
public Configuration getConfiguration() {
return configuration;
}
@Override
public void close() {
executor.close();
}
@Override
public void rollback() {
executor.rollback();
}
}
| apache-2.0 |
aysenurbilgin/cww_framework | src/fcc/FCCResource.java | 13282 | /*
* Copyright (c) 2015 Aysenur Bilgin
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package fcc;
import fuzzylogic.generic.Resource;
import fuzzylogic.generic.Tuple;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
/**
* FCCResource is a data type finding its own labels
* given the range of inputs and also finding the critical values such as the min, max, most prevailing etc.
* @author abilgin
*/
public class FCCResource implements Serializable{
private Map<Double, Integer> experience; //holds the numeric past data as experience in a structured way with the count of occurrences
private Map<Integer, String> codebook;
private ArrayList<Resource> resources;
private Tuple support;
private Word linguistics;
private double min;
private double max;
public FCCResource(Word linguisticVarData, Map<Integer, String> codebook, ArrayList<Double> experienceData) {
this.linguistics = linguisticVarData;
this.experience = new TreeMap<Double,Integer>();
this.codebook = new TreeMap<Integer, String>();
this.codebook.putAll(codebook);
this.resources = new ArrayList<Resource>();
this.support = new Tuple();
setAllExperience(experienceData);
setAnalytics();
}
public FCCResource(FCCResource fccres) {
this.experience = new TreeMap<Double, Integer>();
this.experience = fccres.getExperience();
this.codebook = new TreeMap<Integer, String>();
this.codebook = fccres.getCodebook();
this.resources = new ArrayList<Resource>();
this.resources = fccres.getResources();
this.support = new Tuple(fccres.getSupport());
this.linguistics = new Word(fccres.getLinguistics());
this.min = fccres.getMin();
this.max = fccres.getMax();
}
/**
* Get the raw experience data and store it in a structured way with occurrences
* @param rawExperience
*/
private void setAllExperience(ArrayList<Double> rawExperience) {
double val;
int count = 0;
for (int i = 0; i < rawExperience.size(); i++) {
val = rawExperience.get(i);
if( !this.experience.containsKey(val)) {
this.experience.put(val, 0 );
}
this.experience.put(val, this.experience.get(val) + 1);
count++;
}
}
public void addToExperience(double val) {
if( !this.experience.containsKey(val)) {
this.experience.put(val, 0 );
}
this.experience.put(val, this.experience.get(val) + 1);
resetForAnalytics();
setAnalytics();
}
private void setAnalytics() {
//find the relevant data from the experience
if (!this.experience.isEmpty()) {
//find the weighted average
double avg = findAverage();
Map<Double, Integer> negTemp = new TreeMap();
Map<Double, Integer> posTemp = new TreeMap();
//divide experience into two perceptions according to the average value
for (Double key: this.experience.keySet()) {
int value = this.experience.get(key);
//if the value is smaller than average put it on the negative perception
if (key <= avg) {
negTemp.put(key, value);
}
else {
posTemp.put(key, value);
}
}
//then further divide the negative and positive perception into modifiers
//the criteria is to break the stream into the number of codebook contents
int minnumberofelements = (int) Math.ceil((double)negTemp.size()/(double)codebook.size());
String s;
Map<Double, Integer> partialresource = new TreeMap<Double, Integer>();
int counter = 1, modifiercode = 0;
for (Map.Entry <Double, Integer> current : negTemp.entrySet()) {
partialresource.put(current.getKey(), current.getValue());
//then it is time to change the resource
if (counter >= minnumberofelements) {
s = (new StringBuilder()).append(codebook.get(modifiercode)).append(" ").append(this.linguistics.getNegPerception()).toString();
this.resources.add(new Resource(s, partialresource));
//System.out.println("FCCResource - resources: "+this.resources.toString());
partialresource = new TreeMap<Double, Integer>();
modifiercode++;
counter = 0;
}
counter++;
}
//for the last set of resources repeat the operation
//if you have not consumed all of the modifiers!!
while (codebook.get(modifiercode)!= null) {
s = (new StringBuilder()).append(codebook.get(modifiercode)).append(" ").append(this.linguistics.getNegPerception()).toString();
//if the prevailing value is smaller than the previous one
//it means that there not enough members in the array
//so duplicate the values but not the string
if (partialresource.isEmpty()) {
partialresource = this.resources.get(this.resources.size()-1).getExperience();
}
this.resources.add(new Resource(s, partialresource));
modifiercode++;
}
//repeat for the right perception
minnumberofelements = (int) Math.ceil((double)posTemp.size()/(double)codebook.size());
partialresource = new TreeMap<Double, Integer>();
counter = 1;
modifiercode = codebook.size()-1;
for (Map.Entry <Double, Integer> current : posTemp.entrySet()) {
partialresource.put(current.getKey(), current.getValue());
//then it is time to change the resource
if (counter >= minnumberofelements) {
s = (new StringBuilder()).append(codebook.get(modifiercode)).append(" ").append(this.linguistics.getPosPerception()).toString();
this.resources.add(new Resource(s, partialresource));
partialresource = new TreeMap<Double, Integer>();
modifiercode--;
counter = 0;
}
counter++;
}
//for the last set of resources repeat the operation
while (codebook.get(modifiercode) != null) {
s = (new StringBuilder()).append(codebook.get(modifiercode)).append(" ").append(this.linguistics.getPosPerception()).toString();
if (partialresource.isEmpty()) {
partialresource = this.resources.get(this.resources.size()-1).getExperience();
}
this.resources.add(new Resource(s, partialresource));
modifiercode--;
}
reviseAndCorrectRecource();
double mintemp = Double.POSITIVE_INFINITY, maxtemp = Double.NEGATIVE_INFINITY;
//try finding the min max from experience not resource
for (Double d: this.experience.keySet()) {
if(d < mintemp) {
mintemp = d;
}
if(d > maxtemp) {
maxtemp = d;
}
}
this.min = mintemp;
this.max = maxtemp;
this.support = new Tuple(min, max);
}
}
private double findAverage() {
double sumnom = 0.0, sumdenom = 0.0;
for (Double key : this.experience.keySet()) {
sumnom += key * this.experience.get(key);
sumdenom += this.experience.get(key);
}
return sumnom/sumdenom;
}
public Map<Integer, String> getCodebook() {
return this.codebook;
}
public Map<Double, Integer> getExperience() {
return this.experience;
}
public ArrayList<Resource> getResources() {
return this.resources;
}
public int getNumberofResources() {
return this.resources.size();
}
public Tuple getSupport() {
return this.support;
}
public double getMin() {
return this.min;
}
public double getMax() {
return this.max;
}
public Word getLinguistics() {
return this.linguistics;
}
@Override
public String toString() {
String s = "";
s = (new StringBuilder()).append(s).append("FCCResource: ").append(this.experience.toString()).toString();
return s;
}
private void resetForAnalytics() {
this.resources = new ArrayList<Resource>();
this.support = new Tuple();
}
/**
* Method looks at the resource and tries to differentiate between the values for prevailing numbers
* because in the future when they are put into a map, data is lost
* also can be regarded as forcing the number of elements to be processed to be the same throughout the entire implementation
* the idea is simply assumed to add a dummy value to each
*/
private void reviseAndCorrectRecource() {
boolean duplicateflag = false;
Map<Double, Integer> occmap = new TreeMap<Double, Integer>();
for (Resource r: this.resources) {
if (!occmap.containsKey(r.getPrevailingValue())) {
occmap.put(r.getPrevailingValue(), 0);
}
else {
duplicateflag = true;
}
occmap.put(r.getPrevailingValue(), occmap.get(r.getPrevailingValue()) + 1);
}
if (!duplicateflag) {
return;
}
//now correction should be applied
//find the values having more than 1 occurrence
Iterator<Map.Entry<Double, Integer>> it = occmap.entrySet().iterator();
Map<Double, Integer> occmapcopy = new TreeMap<Double, Integer>();
//create a copy of the map
for (Map.Entry<Double, Integer> e: occmap.entrySet()) {
occmapcopy.put(e.getKey(), e.getValue());
}
Iterator<Map.Entry<Double, Integer>> innerit = occmapcopy.entrySet().iterator();
double prev = -1.0;
while (it.hasNext()) {
Map.Entry entry = (Map.Entry)it.next();
Map.Entry entryin = (Map.Entry)innerit.next();
double d = (Double)entry.getKey();
if (occmap.get(d) > 1) {
//then d has a duplicate
//find the value d in resources and adapt it
//System.out.println("Duplicate found!!");
for (int i = 0; i < this.resources.size(); i++) {
if (d == this.resources.get(i).getPrevailingValue()) {
//check whether there is a value after it
double afterval, inc;
if (it.hasNext()) {
entryin = (Map.Entry)innerit.next();
afterval = (Double)entryin.getKey();
inc = (afterval - d) / (2.0D * (occmap.get(d)-1));
for (int j = 0; j < occmap.get(d)-1; j++) {
this.resources.get(i+j+1).setPrevailingValue(d+(inc*(j+1)));
this.resources.get(i+j+1).setWeightedAvg(d+(inc*(j+1)));
}
i = i + occmap.get(d)-1;
}
else {
//since this is the last one decrease it
//apply the same idea with the prev value
inc = (d - prev) / (2.0D * (occmap.get(d)-1));
for (int j = occmap.get(d)-1; j > 0; j--) {
this.resources.get(i+j-1).setPrevailingValue(d-(inc*j));
this.resources.get(i+j-1).setWeightedAvg(d-(inc*j));
}
i = i + occmap.get(d)-1;
}
}
else {
//System.out.println("Do nothing!");
}
}//end for
//reset the iterator
innerit = occmapcopy.entrySet().iterator();
}
prev = d;
}// end while
}
}
| apache-2.0 |
Communote/communote-server | communote/persistence/src/main/java/com/communote/server/core/user/UserProfileDetails.java | 5701 | package com.communote.server.core.user;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Locale;
import com.communote.server.model.user.UserName;
import com.communote.server.model.user.UserStatus;
/**
* Details of a user
*
* @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a>
*/
public class UserProfileDetails implements UserName, Serializable {
private static final long serialVersionUID = 1L;
private Long userId = null;
private String userAlias = null;
private String firstName = null;
private String lastName = null;
private String salutation = null;
private UserStatus userStatus = null;
private Locale userLocale = null;
private String timeZoneId = null;
private final HashMap<String, String> externalUserIds;
/**
* Construct the <code>User</code> with the details
*
* @param userId
* The user id
* @param userAlias
* The user alias
*/
public UserProfileDetails(Long userId, String userAlias) {
super();
externalUserIds = new HashMap<String, String>();
this.setUserId(userId);
this.setUserAlias(userAlias);
}
/**
* Stores the user ID that a user has within an external system.
*
* @param externalSystemId
* the ID of the external system
* @param externalUserId
* the ID of the user within the external system identified by the externalSystemID
*/
public void addExternalUserId(String externalSystemId, String externalUserId) {
externalUserIds.put(externalSystemId, externalUserId);
}
/**
* Returns the ID of the user within an external system.
*
* @param externalSystemId
* the ID of the external system
* @return the ID of the user within the external system identified by the externalSystemID or
* null if the user is not a user of the external system
*/
public String getExternalUserId(String externalSystemId) {
return externalUserIds.get(externalSystemId);
}
/**
* @return the firstName
*/
@Override
public String getFirstName() {
return firstName;
}
/**
* @return the lastName
*/
@Override
public String getLastName() {
return lastName;
}
/**
* @return the salutation
*/
@Override
public String getSalutation() {
return salutation;
}
/**
* @return the timeZoneId
*/
public String getTimeZoneId() {
return timeZoneId;
}
/**
* The alias of the user
*
* @return The user alias
*/
public String getUserAlias() {
return userAlias;
}
/**
* The user id of the user
*
* @return The user
*/
public Long getUserId() {
return userId;
}
/**
* @return the userLocale
*/
public Locale getUserLocale() {
return userLocale;
}
/**
* @return the userStatus
*/
public UserStatus getUserStatus() {
return userStatus;
}
/**
* @param firstName
* the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @param lastName
* the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @param salutation
* the salutation to set
*/
public void setSalutation(String salutation) {
this.salutation = salutation;
}
/**
* @param timeZoneId
* the timeZoneId to set
*/
public void setTimeZoneId(String timeZoneId) {
this.timeZoneId = timeZoneId;
}
/**
* Sets the user alias.
*
* @param alias
* the user alias
*/
private void setUserAlias(String alias) {
if (alias == null) {
throw new IllegalArgumentException("User alias cannot be null");
}
this.userAlias = alias;
}
/**
* Sets the user id.
*
* @param id
* the user id
*/
private void setUserId(Long id) {
if (id == null) {
throw new IllegalArgumentException("User id cannot be null");
}
this.userId = id;
}
// /**
// * Because username can not be changed for userdetails create a new one
// based on the given and
// * the new username
// *
// * @param username
// * new username for the userdetails
// * @param details
// * userdetails with all other detailed information fr this user
// */
// protected UserDetails(String username, LdapKenmeiUserDetails
// details) {
// super(username, details.getPassword(), details.isEnabled(), details
// .isAccountNonExpired(), details.isCredentialsNonExpired(), details
// .isAccountNonLocked(), details.getAuthorities());
//
// this.setUserId(details.getLdapUser().getId());
// }
/**
* @param userLocale
* the userLocale to set
*/
public void setUserLocale(Locale userLocale) {
this.userLocale = userLocale;
}
/**
* @param userStatus
* the userStatus to set
*/
public void setUserStatus(UserStatus userStatus) {
this.userStatus = userStatus;
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | apis/ecs/src/main/java/org/jclouds/ecs/EcsApi.java | 2056 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.ecs;
import java.io.Closeable;
import org.jclouds.ecs.features.InstanceApi;
import org.jclouds.ecs.features.ServerApi;
import org.jclouds.ecs.features.ServerImageApi;
import org.jclouds.rest.annotations.Delegate;
import com.google.common.base.Optional;
/**
* Provides access to EC2 features, broken up by feature group. Use of the
* {@link Optional} type allows you to check to see if the underlying
* implementation supports a particular feature before attempting to use it.
* This is useful in clones like OpenStack, CloudStack, or Eucalyptus, which
* track the api, but are always behind Amazon's service. In the case of Amazon
* ({@code aws-ec2}), you can expect all features to be present.
*
*
* Example
*
* <pre>
* Optional<? extends WindowsApi> windowsOption = ec2Api.getWindowsApi();
* checkState(windowsOption.isPresent(),
* "windows feature required, but not present");
* </pre>
*/
public interface EcsApi extends Closeable {
/**
* Provides synchronous access to Windows features.
*/
@Delegate
Optional<? extends InstanceApi> getInstanceApi();
@Delegate
Optional<? extends ServerImageApi> getServerImageApi();
@Delegate
Optional<? extends ServerApi> getServerApi();
}
| apache-2.0 |
Terradue/warhol | client/cli/src/main/java/com/terradue/warhol/client/cli/Warhol.java | 10028 | package com.terradue.warhol.client.cli;
/*
* Copyright 2011-2012 Terradue srl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static java.lang.Runtime.getRuntime;
import static java.lang.System.currentTimeMillis;
import static java.lang.System.exit;
import static java.lang.System.getProperty;
import static java.lang.System.setProperty;
import static java.lang.System.out;
import static java.util.ServiceLoader.load;
import static org.slf4j.LoggerFactory.getILoggerFactory;
import static org.slf4j.LoggerFactory.getLogger;
import java.io.File;
import java.util.Date;
import java.util.Formatter;
import java.util.Map;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import org.slf4j.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import ch.qos.logback.core.joran.spi.JoranException;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.converters.FileConverter;
import com.terradue.warhol.client.CatalogueSystem;
import com.terradue.warhol.client.settings.Settings;
public final class Warhol
{
private final Logger logger = getLogger( getClass() );
@Parameter( names = { "-h", "--help" }, description = "Display help information." )
private boolean printHelp;
@Parameter( names = { "-v", "--version" }, description = "Display version information." )
private boolean showVersion;
@Parameter( names = { "-X", "--verbose" }, description = "Produce execution debug output." )
private boolean debug;
@Parameter(
names = { "-s", "--settings" },
description = "Force the use of an alternate settings file.",
converter = FileConverter.class
)
private File settingsFile = new File( getProperty( "basedir" ), "etc/settings.xml" );
public int execute( String...args )
{
JCommander commander = new JCommander( this );
commander.setProgramName( getProperty( "app.name" ) );
for ( Command command : load( Command.class ) )
{
commander.addCommand( command );
}
commander.parse( args );
if ( printHelp )
{
commander.usage();
return 0;
}
if ( showVersion )
{
printVersionInfo();
return 0;
}
// check the settings file!
if ( !settingsFile.exists() )
{
out.printf( "Specified settings file %s does not exist, please specify a valid one.%n", settingsFile );
return 1;
}
else if ( settingsFile.isDirectory() )
{
out.printf( "Specified settings file %s must not be a directory.%n", settingsFile );
return 1;
}
// check the input the command
Map<String, JCommander> commands = commander.getCommands();
String parsedCommand = commander.getParsedCommand();
if ( parsedCommand == null )
{
out.println( "No command specified, read the usage first." );
commander.usage();
return 1;
}
if ( !commands.containsKey( parsedCommand ) )
{
out.printf( "Invalid input command, read the usage first.%n" );
commander.usage();
return 1;
}
// so, init logging stuff
if ( debug )
{
setProperty( "logging.level", "DEBUG" );
}
else
{
setProperty( "logging.level", "INFO" );
}
// assume SLF4J is bound to logback in the current environment
final LoggerContext lc = (LoggerContext) getILoggerFactory();
try
{
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext( lc );
// the context was probably already configured by default configuration
// rules
lc.reset();
configurator.doConfigure( getClass().getClassLoader().getResourceAsStream( "logback-config.xml" ) );
}
catch ( JoranException je )
{
// StatusPrinter should handle this
}
logger.info( "" );
logger.info( "------------------------------------------------------------------------" );
logger.info( "T2 Warhol :: {}", parsedCommand );
logger.info( "------------------------------------------------------------------------" );
logger.info( "" );
int exit = 0;
long start = currentTimeMillis();
Throwable error = null;
try
{
JAXBContext context = JAXBContext.newInstance( "com.terradue.warhol.client.settings" );
Unmarshaller xmlUnmarshaller = context.createUnmarshaller();
Settings settings = Settings.class.cast( xmlUnmarshaller.unmarshal( settingsFile ) );
CatalogueSystem catalogueSystem = new CatalogueSystem( settings );
Command.class.cast( commands.get( parsedCommand ).getObjects().get( 0 ) ).execute( catalogueSystem );
}
catch ( Throwable t )
{
exit = 1;
error = t;
}
finally
{
logger.info( "" );
logger.info( "------------------------------------------------------------------------" );
logger.info( "T2 Warhol {}", ( exit > 0 ) ? "FAILURE" : "SUCCESS" );
if ( exit > 0 )
{
logger.info( "" );
if ( debug )
{
logger.error( "Execution terminated with errors", error );
}
else
{
logger.error( "Execution terminated with errors: {}", error.getMessage() );
}
logger.info( "" );
}
Formatter uptime = new Formatter().format( "Total uptime:" );
long uptimeInSeconds = ( currentTimeMillis() - start ) / 1000;
final long hours = uptimeInSeconds / 3600;
if ( hours > 0 )
{
uptime.format( " %s hour%s", hours, ( hours > 1 ? "s" : "" ) );
}
uptimeInSeconds = uptimeInSeconds - ( hours * 3600 );
final long minutes = uptimeInSeconds / 60;
if ( minutes > 0 )
{
uptime.format( " %s minute%s", minutes, ( minutes > 1 ? "s" : "" ) );
}
uptimeInSeconds = uptimeInSeconds - ( minutes * 60 );
if ( uptimeInSeconds > 0 )
{
uptime.format( " %s second%s", uptimeInSeconds, ( uptimeInSeconds > 1 ? "s" : "" ) );
}
logger.info( "Total time: {}", uptime.toString() );
logger.info( "Finished at: {}", new Date() );
final Runtime runtime = getRuntime();
final int megaUnit = 1024 * 1024;
logger.info( "Final Memory: {}M/{}M",
( runtime.totalMemory() - runtime.freeMemory() ) / megaUnit,
runtime.totalMemory() / megaUnit );
logger.info( "------------------------------------------------------------------------" );
}
return exit;
}
private static void printVersionInfo()
{
out.printf( "T2 Warhol %s%n",
getProperty( "warhol.version" ) );
out.printf( "Java version: %s, vendor: %s%n",
getProperty( "java.version" ),
getProperty( "java.vendor" ) );
out.printf( "Java home: %s%n",
getProperty( "java.home" ) );
out.printf( "Default locale: %s_%s, platform encoding: %s%n",
getProperty( "user.language" ),
getProperty( "user.country" ),
getProperty( "sun.jnu.encoding" ) );
out.printf( "OS name: \"%s\", version: \"%s\", arch: \"%s\", family: \"%s\"%n",
getProperty( "os.name" ),
getProperty( "os.version" ),
getProperty( "os.arch" ),
getOsFamily() );
}
private static final String getOsFamily()
{
String osName = System.getProperty( "os.name" ).toLowerCase();
String pathSep = System.getProperty( "path.separator" );
if ( osName.contains( "windows" ) )
{
return "windows";
}
else if ( osName.contains( "os/2" ) )
{
return "os/2";
}
else if ( osName.contains( "z/os" ) || osName.contains( "os/390" ) )
{
return "z/os";
}
else if ( osName.contains( "os/400" ) )
{
return "os/400";
}
else if ( pathSep.equals( ";" ) )
{
return "dos";
}
else if ( osName.contains( "mac" ) )
{
if ( osName.endsWith( "x" ) )
{
return "mac"; // MACOSX
}
return "unix";
}
else if ( osName.contains( "nonstop_kernel" ) )
{
return "tandem";
}
else if ( osName.contains( "openvms" ) )
{
return "openvms";
}
else if ( pathSep.equals( ":" ) )
{
return "unix";
}
return "undefined";
}
public static void main( String[] args )
{
exit( new Warhol().execute( args ) );
}
}
| apache-2.0 |
jexp/idea2 | java/compiler/impl/src/com/intellij/compiler/impl/CompileContextImpl.java | 14668 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author: Eugene Zhuravlev
* Date: Jan 21, 2003
* Time: 4:19:03 PM
*/
package com.intellij.compiler.impl;
import com.intellij.compiler.CompilerMessageImpl;
import com.intellij.compiler.make.DependencyCache;
import com.intellij.compiler.progress.CompilerTask;
import com.intellij.openapi.compiler.CompileScope;
import com.intellij.openapi.compiler.CompilerMessage;
import com.intellij.openapi.compiler.CompilerMessageCategory;
import com.intellij.openapi.compiler.CompilerPaths;
import com.intellij.openapi.compiler.ex.CompileContextEx;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.CompilerModuleExtension;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.HashSet;
import com.intellij.util.containers.OrderedSet;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.io.zip.JBZipFile;
import gnu.trove.TIntHashSet;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.*;
public class CompileContextImpl extends UserDataHolderBase implements CompileContextEx {
private final Project myProject;
private final CompilerTask myTask;
private final Map<CompilerMessageCategory, Collection<CompilerMessage>> myMessages = new EnumMap<CompilerMessageCategory, Collection<CompilerMessage>>(CompilerMessageCategory.class);
private CompileScope myCompileScope;
private final DependencyCache myDependencyCache;
private final boolean myMake;
private final boolean myIsRebuild;
private boolean myRebuildRequested = false;
private String myRebuildReason;
private final Map<VirtualFile, Module> myRootToModuleMap = new HashMap<VirtualFile, Module>();
private final Map<Module, Set<VirtualFile>> myModuleToRootsMap = new HashMap<Module, Set<VirtualFile>>();
private final Set<VirtualFile> myGeneratedTestRoots = new java.util.HashSet<VirtualFile>();
private VirtualFile[] myOutputDirectories;
private Set<VirtualFile> myTestOutputDirectories;
private final TIntHashSet myGeneratedSources = new TIntHashSet();
private final ProjectFileIndex myProjectFileIndex; // cached for performance reasons
private final ProjectCompileScope myProjectCompileScope;
private final long myStartCompilationStamp;
private final Map<String, JBZipFile> myOpenZipFiles = new java.util.HashMap<String, JBZipFile>();
public CompileContextImpl(Project project,
CompilerTask indicator,
CompileScope compileScope,
DependencyCache dependencyCache, boolean isMake, boolean isRebuild) {
myProject = project;
myTask = indicator;
myCompileScope = compileScope;
myDependencyCache = dependencyCache;
myMake = isMake;
myIsRebuild = isRebuild;
myStartCompilationStamp = System.currentTimeMillis();
myProjectFileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
myProjectCompileScope = new ProjectCompileScope(myProject);
recalculateOutputDirs();
}
public void recalculateOutputDirs() {
final Module[] allModules = ModuleManager.getInstance(myProject).getModules();
final Set<VirtualFile> allDirs = new OrderedSet<VirtualFile>((TObjectHashingStrategy<VirtualFile>)TObjectHashingStrategy.CANONICAL);
final Set<VirtualFile> testOutputDirs = new java.util.HashSet<VirtualFile>();
final Set<VirtualFile> productionOutputDirs = new java.util.HashSet<VirtualFile>();
for (Module module : allModules) {
final CompilerModuleExtension manager = CompilerModuleExtension.getInstance(module);
final VirtualFile output = manager.getCompilerOutputPath();
if (output != null && output.isValid()) {
allDirs.add(output);
productionOutputDirs.add(output);
}
final VirtualFile testsOutput = manager.getCompilerOutputPathForTests();
if (testsOutput != null && testsOutput.isValid()) {
allDirs.add(testsOutput);
testOutputDirs.add(testsOutput);
}
}
myOutputDirectories = allDirs.toArray(new VirtualFile[allDirs.size()]);
// need this to ensure that the sent contains only _dedicated_ test output dirs
// Directories that are configured for both test and production classes must not be added in the resulting set
testOutputDirs.removeAll(productionOutputDirs);
myTestOutputDirectories = Collections.unmodifiableSet(testOutputDirs);
}
public void markGenerated(Collection<VirtualFile> files) {
for (final VirtualFile file : files) {
myGeneratedSources.add(FileBasedIndex.getFileId(file));
}
}
public long getStartCompilationStamp() {
return myStartCompilationStamp;
}
public boolean isGenerated(VirtualFile file) {
if (myGeneratedSources.contains(FileBasedIndex.getFileId(file))) {
return true;
}
for (final VirtualFile root : myRootToModuleMap.keySet()) {
if (VfsUtil.isAncestor(root, file, false)) {
return true;
}
}
return false;
}
public void updateZippedOuput(String outputDir, String relativePath) throws IOException {
/*
final File file = new File(outputDir, relativePath);
final JBZipFile zip = lookupZip(outputDir);
final long fileStamp = file.lastModified();
if (fileStamp <= 0L) { // does not exist
final JBZipEntry entry = zip.getEntry(relativePath);
if (entry != null) {
entry.erase();
}
}
else {
final JBZipEntry entry = zip.getOrCreateEntry(relativePath);
if (entry.getTime() != fileStamp) {
entry.setData(FileUtil.loadFileBytes(file), fileStamp);
}
}
*/
}
/*
private JBZipFile lookupZip(String outputDir) {
synchronized (myOpenZipFiles) {
JBZipFile zip = myOpenZipFiles.get(outputDir);
if (zip == null) {
final File zipFile = CompilerPathsEx.getZippedOutputPath(myProject, outputDir);
try {
try {
zip = new JBZipFile(zipFile);
}
catch (FileNotFoundException e) {
try {
zipFile.createNewFile();
zip = new JBZipFile(zipFile);
}
catch (IOException e1) {
zipFile.getParentFile().mkdirs();
zipFile.createNewFile();
zip = new JBZipFile(zipFile);
}
}
myOpenZipFiles.put(outputDir, zip);
}
catch (IOException e) {
LOG.info(e);
addMessage(CompilerMessageCategory.ERROR, "Cannot create zip file " + zipFile.getPath() + ": " + e.getMessage(), null, -1, -1);
}
}
return zip;
}
}
*/
public void commitZipFiles() {
/*
synchronized (myOpenZipFiles) {
for (JBZipFile zipFile : myOpenZipFiles.values()) {
try {
zipFile.close();
}
catch (IOException e) {
LOG.info(e);
addMessage(CompilerMessageCategory.ERROR, "Cannot save zip files: " + e.getMessage(), null, -1, -1);
}
}
myOpenZipFiles.clear();
}
*/
}
public void commitZip(String outputDir) throws IOException {
/*
synchronized (myOpenZipFiles) {
JBZipFile zip = myOpenZipFiles.remove(outputDir);
if (zip != null) {
zip.close();
}
}
*/
}
public Project getProject() {
return myProject;
}
public DependencyCache getDependencyCache() {
return myDependencyCache;
}
public CompilerMessage[] getMessages(CompilerMessageCategory category) {
Collection<CompilerMessage> collection = myMessages.get(category);
if (collection == null) {
return CompilerMessage.EMPTY_ARRAY;
}
return collection.toArray(new CompilerMessage[collection.size()]);
}
public void addMessage(CompilerMessageCategory category, String message, String url, int lineNum, int columnNum) {
CompilerMessageImpl msg = new CompilerMessageImpl(myProject, category, message, url, lineNum, columnNum, null);
addMessage(msg);
}
public void addMessage(CompilerMessageCategory category, String message, String url, int lineNum, int columnNum,
Navigatable navigatable) {
CompilerMessageImpl msg = new CompilerMessageImpl(myProject, category, message, url, lineNum, columnNum, navigatable);
addMessage(msg);
}
public void addMessage(CompilerMessage msg) {
Collection<CompilerMessage> messages = myMessages.get(msg.getCategory());
if (messages == null) {
messages = new HashSet<CompilerMessage>();
myMessages.put(msg.getCategory(), messages);
}
if (messages.add(msg)) {
myTask.addMessage(msg);
}
}
public int getMessageCount(CompilerMessageCategory category) {
if (category != null) {
Collection<CompilerMessage> collection = myMessages.get(category);
return collection != null ? collection.size() : 0;
}
int count = 0;
for (Collection<CompilerMessage> collection : myMessages.values()) {
if (collection != null) {
count += collection.size();
}
}
return count;
}
public CompileScope getCompileScope() {
return myCompileScope;
}
public CompileScope getProjectCompileScope() {
return myProjectCompileScope;
}
public void requestRebuildNextTime(String message) {
if (!myRebuildRequested) {
myRebuildRequested = true;
myRebuildReason = message;
addMessage(CompilerMessageCategory.ERROR, message, null, -1, -1);
}
}
public boolean isRebuildRequested() {
return myRebuildRequested;
}
public String getRebuildReason() {
return myRebuildReason;
}
public ProgressIndicator getProgressIndicator() {
return myTask.getIndicator();
}
public void assignModule(@NotNull VirtualFile root, @NotNull Module module, final boolean isTestSource) {
try {
myRootToModuleMap.put(root, module);
Set<VirtualFile> set = myModuleToRootsMap.get(module);
if (set == null) {
set = new HashSet<VirtualFile>();
myModuleToRootsMap.put(module, set);
}
set.add(root);
if (isTestSource) {
myGeneratedTestRoots.add(root);
}
}
finally {
myModuleToRootsCache.remove(module);
}
}
@Nullable
public VirtualFile getSourceFileByOutputFile(VirtualFile outputFile) {
return TranslatingCompilerFilesMonitor.getSourceFileByOutput(outputFile);
}
public Module getModuleByFile(VirtualFile file) {
final Module module = myProjectFileIndex.getModuleForFile(file);
if (module != null) {
return module;
}
for (final VirtualFile root : myRootToModuleMap.keySet()) {
if (VfsUtil.isAncestor(root, file, false)) {
return myRootToModuleMap.get(root);
}
}
return null;
}
private final Map<Module, VirtualFile[]> myModuleToRootsCache = new HashMap<Module, VirtualFile[]>();
public VirtualFile[] getSourceRoots(Module module) {
VirtualFile[] cachedRoots = myModuleToRootsCache.get(module);
if (cachedRoots != null) {
if (areFilesValid(cachedRoots)) {
return cachedRoots;
}
else {
myModuleToRootsCache.remove(module); // clear cache for this module and rebuild list of roots
}
}
Set<VirtualFile> additionalRoots = myModuleToRootsMap.get(module);
VirtualFile[] moduleRoots = ModuleRootManager.getInstance(module).getSourceRoots();
if (additionalRoots == null || additionalRoots.isEmpty()) {
myModuleToRootsCache.put(module, moduleRoots);
return moduleRoots;
}
final VirtualFile[] allRoots = new VirtualFile[additionalRoots.size() + moduleRoots.length];
System.arraycopy(moduleRoots, 0, allRoots, 0, moduleRoots.length);
int index = moduleRoots.length;
for (final VirtualFile additionalRoot : additionalRoots) {
allRoots[index++] = additionalRoot;
}
myModuleToRootsCache.put(module, allRoots);
return allRoots;
}
private static boolean areFilesValid(VirtualFile[] files) {
for (VirtualFile file : files) {
if (!file.isValid()) {
return false;
}
}
return true;
}
public VirtualFile[] getAllOutputDirectories() {
return myOutputDirectories;
}
@NotNull
public Set<VirtualFile> getTestOutputDirectories() {
return myTestOutputDirectories;
}
public VirtualFile getModuleOutputDirectory(Module module) {
// todo: caching?
return CompilerPaths.getModuleOutputDirectory(module, false);
}
public VirtualFile getModuleOutputDirectoryForTests(Module module) {
// todo: caching?
return CompilerPaths.getModuleOutputDirectory(module, true);
}
public boolean isMake() {
return myMake;
}
public boolean isRebuild() {
return myIsRebuild;
}
public void addScope(final CompileScope additionalScope) {
myCompileScope = new CompositeScope(myCompileScope, additionalScope);
}
public boolean isInTestSourceContent(@NotNull final VirtualFile fileOrDir) {
if (myProjectFileIndex.isInTestSourceContent(fileOrDir)) {
return true;
}
for (final VirtualFile root : myGeneratedTestRoots) {
if (VfsUtil.isAncestor(root, fileOrDir, false)) {
return true;
}
}
return false;
}
public boolean isInSourceContent(@NotNull final VirtualFile fileOrDir) {
if (myProjectFileIndex.isInSourceContent(fileOrDir)) {
return true;
}
for (final VirtualFile root : myRootToModuleMap.keySet()) {
if (VfsUtil.isAncestor(root, fileOrDir, false)) {
return true;
}
}
return false;
}
}
| apache-2.0 |
macvelli/RootFramework | src/root/lang/reflect/StaticPrimitiveField.java | 3554 | /*
* Copyright 2006-2016 Edward Smith
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package root.lang.reflect;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import root.util.Root;
import sun.misc.Unsafe;
/**
*
* @author Edward Smith
* @version 0.5
* @since 0.5
*
* @param <C>
* The class type of the {@link Object}
*/
public final class StaticPrimitiveField<C> {
// <><><><><><><><><><><><><><><> Constants <><><><><><><><><><><><><><><>
private static final Unsafe unsafe = Root.getUnsafe();
// <><><><><><><><><><><><><><><> Attributes <><><><><><><><><><><><><><><>
private final long fieldOffset;
private final Object objectBase;
// <><><><><><><><><><><><><><>< Constructors ><><><><><><><><><><><><><><>
public StaticPrimitiveField(final Class<C> clazz, final String fieldName) {
try {
final Field f = clazz.getDeclaredField(fieldName);
if (!Modifier.isStatic(f.getModifiers())) {
throw new RuntimeException("StaticPrimitiveField does not support instance fields");
}
this.fieldOffset = unsafe.staticFieldOffset(f);
this.objectBase = unsafe.staticFieldBase(f);
} catch (NoSuchFieldException | SecurityException e) {
throw new RuntimeException(e);
}
}
// <><><><><><><><><><><><><><> Public Methods <><><><><><><><><><><><><><>
public final boolean getBoolean() {
return unsafe.getBoolean(this.objectBase, this.fieldOffset);
}
public final byte getByte() {
return unsafe.getByte(this.objectBase, this.fieldOffset);
}
public final char getChar() {
return unsafe.getChar(this.objectBase, this.fieldOffset);
}
public final double getDouble() {
return unsafe.getDouble(this.objectBase, this.fieldOffset);
}
public final float getFloat() {
return unsafe.getFloat(this.objectBase, this.fieldOffset);
}
public final int getInt() {
return unsafe.getInt(this.objectBase, this.fieldOffset);
}
public final long getLong() {
return unsafe.getLong(this.objectBase, this.fieldOffset);
}
public final short getShort() {
return unsafe.getShort(this.objectBase, this.fieldOffset);
}
public final void setBoolean(final boolean value) {
unsafe.putBoolean(this.objectBase, this.fieldOffset, value);
}
public final void setByte(final byte value) {
unsafe.putByte(this.objectBase, this.fieldOffset, value);
}
public final void setChar(final char value) {
unsafe.putChar(this.objectBase, this.fieldOffset, value);
}
public final void setDouble(final double value) {
unsafe.putDouble(this.objectBase, this.fieldOffset, value);
}
public final void setFloat(final float value) {
unsafe.putFloat(this.objectBase, this.fieldOffset, value);
}
public final void setInt(final int value) {
unsafe.putInt(this.objectBase, this.fieldOffset, value);
}
public final void setLong(final long value) {
unsafe.putLong(this.objectBase, this.fieldOffset, value);
}
public final void setShort(final short value) {
unsafe.putShort(this.objectBase, this.fieldOffset, value);
}
} // End StaticPrimitiveField
| apache-2.0 |
darsh2/PopularMovies | app/src/main/java/com/example/darsh/model/MovieReviews.java | 1226 | package com.example.darsh.model;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
/**
* Created by darshan on 9/6/16.
*/
public class MovieReviews {
@Expose
@SerializedName("page")
private int page;
@Expose
@SerializedName("results")
private ArrayList<MovieReview> movieReviews = new ArrayList<>();
@Expose
@SerializedName("total_pages")
private int totalPages;
@Expose
@SerializedName("total_results")
private int totalResults;
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public ArrayList<MovieReview> getMovieReviews() {
return movieReviews;
}
public void setMovieReviews(ArrayList<MovieReview> movieReviews) {
this.movieReviews = movieReviews;
}
public int getTotalPages() {
return totalPages;
}
public void setTotalPages(int totalPages) {
this.totalPages = totalPages;
}
public int getTotalResults() {
return totalResults;
}
public void setTotalResults(int totalResults) {
this.totalResults = totalResults;
}
}
| apache-2.0 |
infinitiessoft/keystone4j | keystone4j-commons/src/main/java/com/infinities/keystone4j/ssl/Base64Verifier.java | 3815 | /*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.keystone4j.ssl;
public class Base64Verifier {
private Base64Verifier() {
}
/**
* This array is a lookup table that translates Unicode characters drawn
* from the "Base64 Alphabet" (as specified in Table 1 of RFC 2045) into
* their 6-bit positive integer equivalents. Characters that are not in the
* Base64 alphabet but fall within the bounds of the array are translated to
* -1.
*
* Note: '+' and '-' both decode to 62. '/' and '_' both decode to 63. This
* means decoder seamlessly handles both URL_SAFE and STANDARD base64. (The
* encoder, on the other hand, needs to know ahead of time what to emit).
*
* Thanks to "commons" project in ws.apache.org for this code.
* http://svn.apache.org/repos/asf/webservices/commons/trunk/modules/util/
*/
private static final byte[] DECODE_TABLE = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, 62, -1,
63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, 63, -1, 26, 27, 28, 29, 30, 31, 32, 33,
34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51 };
protected static final byte PAD_DEFAULT = '=';
/**
* Returns whether or not the <code>octet</code> is in the base 64 alphabet.
*
* @param octet
* The value to test
* @return <code>true</code> if the value is defined in the the base 64
* alphabet, <code>false</code> otherwise.
* @since 1.4
*/
public static boolean isBase64(byte octet) {
return octet == PAD_DEFAULT || (octet >= 0 && octet < DECODE_TABLE.length && DECODE_TABLE[octet] != -1);
}
public static boolean isBase64(String text) {
return text.matches("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$");
}
/**
* Tests a given byte array to see if it contains only valid characters
* within the Base64 alphabet. Currently the method treats whitespace as
* valid.
*
* @param arrayOctet
* byte array to test
* @return <code>true</code> if all bytes are valid characters in the Base64
* alphabet or if the byte array is empty; <code>false</code>,
* otherwise
* @since 1.5
*/
public static boolean isBase64(byte[] arrayOctet) {
for (int i = 0; i < arrayOctet.length; i++) {
if (!isBase64(arrayOctet[i]) && !isWhiteSpace(arrayOctet[i])) {
return false;
}
}
return true;
}
/**
* Checks if a byte value is whitespace or not. Whitespace is taken to mean:
* space, tab, CR, LF
*
* @param byteToCheck
* the byte to check
* @return true if byte is whitespace, false otherwise
*/
protected static boolean isWhiteSpace(final byte byteToCheck) {
switch (byteToCheck) {
case ' ':
case '\n':
case '\r':
case '\t':
return true;
default:
return false;
}
}
}
| apache-2.0 |
entwinemedia/functional | src/main/java/com/entwinemedia/fn/data/AbstractImmutableSetBuilder.java | 983 | /*
* Copyright 2015 Entwine AG, Switzerland
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.entwinemedia.fn.data;
import java.util.Collections;
import java.util.Set;
public abstract class AbstractImmutableSetBuilder extends SetBuilder {
@SuppressWarnings("unchecked")
private static final Set EMPTY = new ImmutableSetWrapper(Collections.EMPTY_SET);
@Override
@SuppressWarnings("unchecked")
public <A> Set<A> empty() {
return EMPTY;
}
}
| apache-2.0 |
mafulafunk/wicket | wicket-core/src/main/java/org/apache/wicket/markup/html/include/Include.java | 6435 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.include;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import javax.servlet.http.HttpServletRequest;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.core.util.resource.UrlResourceStream;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.MarkupStream;
import org.apache.wicket.markup.html.WebComponent;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.request.UrlUtils;
import org.apache.wicket.resource.ResourceUtil;
/**
* <p>
* Component that includes/ renders the import result of an URL, much like JSP include.
* </p>
* <p>
* Use this to integrate non-Wicket locations in your page. <strong>This component is NOT meant for
* integrating more Wicket sources as a means of quick and dirty page composition. Use Panels,
* Borders and (Markup)inheritance for page composition instead.</strong>
* </p>
* <p>
* You can feed this component the URL directly, or use a model that should deliver a valid URL. You
* can both use absolute (e.g. http://www.theserverside.com/) and relative (e.g. mydir/mypage.html)
* urls. This component will try to resolve relative urls to resources in the same webapplication.
* </p>
* <p>
* The following example shows how to integrate a header and footer, coming from a plain HTML source
* on the same server is integrated using this component. The files footer.html and header.html
* would be located in the web application root directory
* </p>
* <p>
* Java:
*
* <pre>
* ...
* add(new Include("header", "header.html"));
* add(new Include("footer", "footer.html"));
* ...
* </pre>
*
* Html:
*
* <pre>
* ...
* <div>
* <div wicket:id="header">header comes here</div>
* <div>I am the body!</div>
* <div wicket:id="footer">footer comes here</div>
* </div>
* ...
* </pre>
*
* </p>
*
* @author Eelco Hillenius
*/
public class Include extends WebComponent
{
private static final long serialVersionUID = 1L;
/**
* <p>
* Valid characters in a scheme.
* </p>
* <p>
* RFC 1738 says the following:
* </p>
* <blockquote>Scheme names consist of a sequence of characters. The lower case letters
* "a"--"z", digits, and the characters plus ("+"), period ("."), and hyphen ("-") are allowed.
* For resiliency, programs interpreting URLs should treat upper case letters as equivalent to
* lower case in scheme names (e.g., allow "HTTP" as well as "http"). </blockquote>
* <p>
* We treat as absolute any URL that begins with such a scheme name, followed by a colon.
* </p>
*/
private static final String VALID_SCHEME_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+.-";
/**
* Construct.
*
* @param id
* component id
*/
public Include(final String id)
{
super(id);
}
/**
* Construct.
*
* @param id
* component id
* @param model
* the model
*/
public Include(String id, IModel<String> model)
{
super(id, model);
}
/**
* Construct.
*
* @param id
* component id
* @param modelObject
* the model object (will be wrapped in a model)
*/
public Include(String id, String modelObject)
{
super(id, new Model<String>(modelObject));
}
/**
* Imports the contents of the url of the model object.
*
* @return the imported contents
*/
protected String importAsString()
{
// gets the model object: should provide us with either an absolute or a
// relative url
String url = getDefaultModelObjectAsString();
if (UrlUtils.isRelative(url))
{
return importRelativeUrl(url);
}
else
{
return importAbsoluteUrl(url);
}
}
@Override
public void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag)
{
String content = importAsString();
replaceComponentTagBody(markupStream, openTag, content);
}
/**
* Imports from a relative url.
*
* @param url
* the url to import
* @return the imported url's contents
*/
private String importRelativeUrl(CharSequence url)
{
// make the url absolute
HttpServletRequest req = (HttpServletRequest)getRequest().getContainerRequest();
StringBuilder buildUrl = new StringBuilder(url.length());
String scheme = req.getScheme();
int port = req.getServerPort();
buildUrl.append(scheme); // http, https
buildUrl.append("://");
buildUrl.append(req.getServerName());
if ((scheme.equals("http") && port != 80) || (scheme.equals("https") && port != 443))
{
buildUrl.append(':');
buildUrl.append(req.getServerPort());
}
buildUrl.append(req.getContextPath()).append('/').append(url);
return importAbsoluteUrl(buildUrl);
}
/**
* Imports from an absolute url.
*
* @param url
* the url to import
* @return the imported url's contents
*/
private String importAbsoluteUrl(CharSequence url)
{
try
{
return importUrl(new URL(url.toString()));
}
catch (MalformedURLException e)
{
throw new WicketRuntimeException(e);
}
}
/**
*
* @return The charset of the text to be retrieved and included
*/
public Charset getCharset()
{
return null;
}
/**
* Imports the contents from the given url.
*
* @param url
* the url
* @return the imported contents
*/
private final String importUrl(URL url)
{
return ResourceUtil.readString(new UrlResourceStream(url), getCharset());
}
}
| apache-2.0 |
xiaoshanlin000/SLTableView | app/src/main/java/com/shanlin/sltableview/fragment/bean/DateSelectorBean.java | 812 | package com.shanlin.sltableview.fragment.bean;
/**
* Created by Shanlin on 2017/3/15.
*/
public class DateSelectorBean extends CellBaseBean {
private int icon;
private String content;
private String date;
public DateSelectorBean() {
super(CellType.CELL_TYPE_DATE_SELECTOR);
}
public int getIcon() {
return icon;
}
public DateSelectorBean setIcon(int icon) {
this.icon = icon;
return this;
}
public String getContent() {
return content;
}
public DateSelectorBean setContent(String content) {
this.content = content;
return this;
}
public String getDate() {
return date;
}
public DateSelectorBean setDate(String date) {
this.date = date;
return this;
}
}
| apache-2.0 |
alittlemind/TaxesCalculator | src/main/java/ru/evgenyhodz/CalculateTax.java | 12092 | package ru.evgenyhodz;
import java.util.Calendar;
import java.util.Date;
/**
* Класс содержит основные методы расчетов страховых платежей.
* С примерами расчетов можно ознакомиться на сайте: ipipip.ru
* <p>
* Class contains basic methods to calculate insurance payments to:
* 1. Federal Compulsory Medical Insurance Fund of Russia,
* 2. Pension Fund of Russian Federation.
* <p>
* Examples of calculations can be found on the website: ipipip.ru
*
* @author Evgeny Khodzitskiy (evgeny.hodz@gmail.com)
* @since 04.02.2017
*/
class CalculateTax {
/**
* Минимальная зарплата в РФ на 2017 год.
* Minimum salary in Russia in 2017.
*/
private int minSalary;
/**
* Тариф ПФР.
* Coefficent of Pension Fund of Russia.
*/
private double pRate = 0.26;
/**
* Тариф ФФОМС.
* Coefficient of Federal Compulsory Medical Insurance Fund of Russia.
*/
private double fRate = 0.051;
/**
* Ссылка на класс валидации дат.
* Validation class.
*/
private Validation val;
/**
* Поле результата расчета платежа в ПФР.
* Payment to Pension Fund of Russia.
*/
private double pfr;
/**
* Поле результата расчета платежа в ФФОМС.
* Payment to Federal Compulsory Medical Insurance Fund of Russia.
*/
private double ffoms;
/**
* Поле с результатом подсчета числа полных месяцев.
* Amount of full months.
*/
private int fullMonth;
/**
* Конструктор/Constructor.
*
* @param salary - минимальная зарплата/minimum salary.
* @param firstRate - коэффициент ПФР/PFR coefficient.
* @param secondRate - коэффициент ФФОМС/FFOMS coefficient.
* @param validation - класс валидации/validation class.
*/
CalculateTax(Validation validation, int salary, double firstRate, double secondRate) {
this.val = validation;
this.minSalary = salary;
this.pRate = firstRate;
this.fRate = secondRate;
}
/**
* Геттер/Getter.
*
* @return - Округленный результат платежа в ПФР/rounded result of PFR payment.
*/
double getPfr() {
return Math.round(pfr * 100D) / 100D;
}
/**
* Геттер/Getter.
*
* @return - Округленный результат платежа в ФФОМС/rounded result of FFOMS payment.
*/
double getFfoms() {
return Math.round(ffoms * 100D) / 100D;
}
/**
* Геттер/Getter.
*
* @return - Округленная сумма ПФР + ФФОМС/rounded amount PFR + FFOMS.
*/
double getTotal() {
return Math.round((pfr + ffoms) * 100D) / 100D;
}
/**
* Считаем ситуацию, когда только последний месяц периода неполный.
* * Ситуаций всего 4:
* 1) первый месяц полный, последний неполный
* 2) первый месяц неполный, последний полный
* 3) оба месяца неполные
* 4) оба месяца полные.
* <p>
* Calculate situation, when only last month of the billing period is not full.
* There are only four situations:
* <p>
* 1. First month is full, last month is not
* 2. Last month is full, first month is not
* 3. Both months are incomplete.
* 4. Both months are full.
*
* @param start - начало расчетного периода/beginning of billing period.
* @param end - конец расчетного периода/end of billing period.
*/
private void calcFirstSituation(Date start, Date end) {
val.check(start, end);
if (val.getSituation() == 1) {
int numDays = val.getEnded().getActualMaximum(Calendar.DAY_OF_MONTH);
int daysWorked = val.getEnded().get(Calendar.DAY_OF_MONTH);
countFullMonths();
if (fullMonth != 0) {
pfr = minSalary * pRate * fullMonth + minSalary * pRate * daysWorked / numDays;
ffoms = minSalary * fRate * fullMonth + minSalary * fRate * daysWorked / numDays;
}
}
}
/**
* Считаем ситуацию, когда только первый месяц периода неполный.
* * Ситуаций всего 4:
* 1) первый месяц полный, последний неполный
* 2) первый месяц неполный, последний полный
* 3) оба месяца неполные
* 4) оба месяца полные.
* <p>
* Calculate situation, when only first month the of billing period is not full.
* There are only four situations:
* <p>
* 1. First month is full, last month is not
* 2. Last month is full, first month is not
* 3. Both months are incomplete.
* 4. Both months are full.
*
* @param start - начало расчетного периода/beginning of billing period.
* @param end - конец расчетного периода/end of billing period.
*/
private void calcSecondSituation(Date start, Date end) {
val.check(start, end);
if (val.getSituation() == 2) {
int numDays = val.getStarted().getActualMaximum(Calendar.DAY_OF_MONTH);
int daysWorked = numDays - val.getStarted().get(Calendar.DAY_OF_MONTH) + 1;
countFullMonths();
if (fullMonth != 0) {
pfr = minSalary * pRate * fullMonth + minSalary * pRate * daysWorked / numDays;
ffoms = minSalary * fRate * fullMonth + minSalary * fRate * daysWorked / numDays;
}
}
}
/**
* Считаем ситуацию, когда оба месяца (первый и последний периода) неполные.
* * Ситуаций всего 4:
* 1) первый месяц полный, последний неполный
* 2) первый месяц неполный, последний полный
* 3) оба месяца неполные
* 4) оба месяца полные.
* <p>
* Calculate situation, when both months(the first one and the last one of billing period)
* are incomplete.
* There are only four situations:
* <p>
* 1. First month is full, last month is not
* 2. Last month is full, first month is not
* 3. Both months are incomplete.
* 4. Both months are full.
*
* @param start - начало расчетного периода/beginning of billing period.
* @param end - конец расчетного периода/end of billing period.
*/
private void calcThirdSituation(Date start, Date end) {
val.check(start, end);
if (val.getSituation() == 3) {
int numDays = val.getStarted().getActualMaximum(Calendar.DAY_OF_MONTH);
int daysWorked = numDays - val.getStarted().get(Calendar.DAY_OF_MONTH) + 1;
int endDays = val.getEnded().getActualMaximum(Calendar.DAY_OF_MONTH);
int endWorked = val.getEnded().get(Calendar.DAY_OF_MONTH);
countFullMonths();
if (fullMonth != 0) {
pfr = minSalary * pRate * endWorked / endDays + minSalary
* pRate * daysWorked / numDays + minSalary * pRate * fullMonth;
ffoms = minSalary * fRate * endWorked / endDays + minSalary
* fRate * daysWorked / numDays + minSalary * fRate * fullMonth;
} else {
pfr = minSalary * pRate * endWorked / endDays + minSalary
* pRate * daysWorked / numDays + minSalary * pRate;
ffoms = minSalary * fRate * endWorked / endDays + minSalary
* fRate * daysWorked / numDays + minSalary * fRate;
}
}
}
/**
* Считаем ситуацию, когда оба месяца (первый и последний периода) полные.
* * Ситуаций всего 4:
* 1) первый месяц полный, последний неполный
* 2) первый месяц неполный, последний полный
* 3) оба месяца неполные
* 4) оба месяца полные.
* <p>
* Calculate situation, when both months (the first one and the last one of billing period)
* are full.
* There are only four situations:
* <p>
* 1. First month is full, last month is not
* 2. Last month is full, first month is not
* 3. Both months are incomplete.
* 4. Both months are full.
*
* @param start - начало расчетного периода/beginning of billing period.
* @param end - конец расчетного периода/end of billing period.
*/
private void calcFourthSituation(Date start, Date end) {
val.check(start, end);
if (val.getSituation() == 4) {
countFullMonths();
pfr = minSalary * pRate * fullMonth;
ffoms = minSalary * fRate * fullMonth;
}
}
/**
* Считаем число полных месяцев в расчетном периоде в зависимости от ситуации.
* Ситуаций всего 4:
* 1) первый месяц полный, последний неполный
* 2) первый месяц неполный, последний полный
* Важно: в ситуаях 1 или 2 как минимум один месяц всегда полный!
* 3) оба месяца неполные
* 4) оба месяца полные.
* Важно: в ситуации 4 как минимум два месяца будут полными!
* <p>
* Count the number of full months in the billing period. The calculation method depends on
* the type of situation. There are only four situations:
* <p>
* 1. First month is full, last month is not
* 2. Last month is full, first month is not
* What is important: in situations 1 and 2 one month is always full.
* 3. Both months are incomplete.
* 4. Both months are full.
* What is important: in situation 4 at least two months are always full.
*/
private void countFullMonths() {
if (val.getSituation() == 2 || val.getSituation() == 1) {
fullMonth = val.getEnded().get(Calendar.MONTH) - val.getStarted().get(Calendar.MONTH);
if (fullMonth <= 0) {
fullMonth = 1;
}
} else if (val.getSituation() == 3) {
fullMonth = val.getEnded().get(Calendar.MONTH) - val.getStarted().get(Calendar.MONTH) - 1;
} else if (val.getSituation() == 4) {
fullMonth = val.getEnded().get(Calendar.MONTH) - val.getStarted().get(Calendar.MONTH) + 1;
if (fullMonth <= 1) {
fullMonth = 2;
}
}
}
/**
* Метод выполняет все 4 расчета (точнее только тот, который нужно).
* <p>
* Making calculations of all situations.
*
* @param start - начало расчетного периода/beginning of billing period.
* @param end - конец расчетного периода/end of billing period.
*/
void countAll(Date start, Date end) {
calcFirstSituation(start, end);
calcSecondSituation(start, end);
calcThirdSituation(start, end);
calcFourthSituation(start, end);
}
}
| apache-2.0 |
naver/ngrinder | ngrinder-controller/src/main/java/org/ngrinder/region/service/RegionService.java | 6162 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.region.service;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.Maps;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.cluster.Member;
import lombok.RequiredArgsConstructor;
import net.grinder.util.NetworkUtils;
import org.apache.commons.lang.StringUtils;
import org.ngrinder.common.constant.ClusterConstants;
import org.ngrinder.common.exception.NGrinderRuntimeException;
import org.ngrinder.infra.config.Config;
import org.ngrinder.infra.hazelcast.HazelcastService;
import org.ngrinder.infra.hazelcast.task.RegionInfoTask;
import org.ngrinder.region.model.RegionInfo;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static java.util.stream.Collectors.toList;
import static org.apache.commons.lang.StringUtils.isEmpty;
import static org.ngrinder.common.constant.CacheConstants.*;
import static org.ngrinder.common.util.ExceptionUtils.processException;
import static org.ngrinder.common.util.RegionUtils.convertSubregionsStringToSet;
/**
* Region service class. This class responsible to keep the status of available regions.
*
* @since 3.1
*/
@Service
@RequiredArgsConstructor
public class RegionService {
private final Config config;
private final HazelcastService hazelcastService;
private final HazelcastInstance hazelcastInstance;
private final Supplier<Map<String, RegionInfo>> allRegions = Suppliers.memoizeWithExpiration(new Supplier<Map<String, RegionInfo>>() {
@Override
public Map<String, RegionInfo> get() {
Map<String, RegionInfo> regions = Maps.newHashMap();
if (config.isClustered()) {
List<RegionInfo> regionInfos = hazelcastService.submitToAllRegion(REGION_EXECUTOR_SERVICE_NAME, new RegionInfoTask());
for (RegionInfo regionInfo : regionInfos) {
regions.put(regionInfo.getRegionName(), regionInfo);
}
} else {
final String regionIP = StringUtils.defaultIfBlank(config.getCurrentIP(), NetworkUtils.DEFAULT_LOCAL_HOST_ADDRESS);
regions.put(config.getRegion(), new RegionInfo(config.getRegion(), emptySet(), regionIP, config.getControllerPort()));
}
return regions;
}
}, REGION_CACHE_TIME_TO_LIVE_SECONDS, TimeUnit.SECONDS);
private final Supplier<List<Map<String, String>>> allRegionNames = Suppliers.memoizeWithExpiration(new Supplier<List<Map<String, String>>>() {
@Override
public List<Map<String, String>> get() {
Set<Member> members = hazelcastInstance.getCluster().getMembers();
List<Map<String, String>> regionNames = new ArrayList<>();
for (Member member : members) {
if (member.getAttributes().containsKey(REGION_ATTR_KEY)) {
Map<String, String> regionMap = new HashMap<>();
regionMap.put(REGION_ATTR_KEY, member.getAttributes().get(REGION_ATTR_KEY));
regionMap.put(SUBREGION_ATTR_KEY, member.getAttributes().get(SUBREGION_ATTR_KEY));
regionNames.add(regionMap);
}
}
return regionNames;
}
}, REGION_CACHE_TIME_TO_LIVE_SECONDS, TimeUnit.SECONDS);
@PostConstruct
public void initRegion() {
if (config.isClustered()) {
verifyDuplicatedRegion();
}
}
/**
* Verify duplicate region when starting with cluster mode.
*
* @since 3.2
*/
private void verifyDuplicatedRegion() {
Map<String, RegionInfo> regions = getAll();
String localRegion = getCurrent();
RegionInfo regionInfo = regions.get(localRegion);
if (regionInfo != null && !StringUtils.equals(regionInfo.getIp(), config.getClusterProperties().getProperty
(ClusterConstants.PROP_CLUSTER_HOST, NetworkUtils.DEFAULT_LOCAL_HOST_ADDRESS))) {
throw processException("The region name, " + localRegion
+ ", is already used by other controller " + regionInfo.getIp()
+ ". Please set the different region name in this controller.");
}
}
/**
* Get current region. This method returns where this service is running.
*
* @return current region.
*/
public String getCurrent() {
return config.getRegion();
}
/**
* Get region by region name
*
* @param regionName region name
* @return region info
*/
public RegionInfo getOne(String regionName) {
RegionInfo regionInfo = getAll().get(regionName);
if (regionInfo != null) {
return regionInfo;
}
throw new NGrinderRuntimeException(regionName + " is not exist");
}
public RegionInfo getOne(String region, String subregion) {
if (isEmpty(subregion)) {
return getOne(region);
}
RegionInfo regionInfo = getAll().get(region);
if (regionInfo != null) {
if (isEmpty(subregion) || regionInfo.getSubregion().contains(subregion)) {
return regionInfo;
}
}
throw new NGrinderRuntimeException(region + "." + subregion + " is not exist");
}
/**
* Get region list of all clustered controller.
*
* @return region list
*/
public Map<String, RegionInfo> getAll() {
return allRegions.get();
}
public List<Map<String, Object>> getAllVisibleRegionNames() {
if (config.isClustered()) {
return allRegionNames.get().stream().map(region -> {
Map<String, Object> regionInfo = new HashMap<>();
String subregionAttributes = region.get(SUBREGION_ATTR_KEY);
regionInfo.put(REGION_ATTR_KEY, region.get(REGION_ATTR_KEY));
regionInfo.put(SUBREGION_ATTR_KEY, convertSubregionsStringToSet(subregionAttributes));
return regionInfo;
}).collect(toList());
} else {
return emptyList();
}
}
public Config getConfig() {
return config;
}
}
| apache-2.0 |
pnerg/simple-pool | src/main/java/simplepool/PoolImpl.java | 5386 | /**
* Copyright 2015 Peter Nerg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package simplepool;
import static javascalautils.TryCompanion.Try;
import java.time.Duration;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Predicate;
import javascalautils.Option;
import javascalautils.ThrowableFunction0;
import javascalautils.Try;
import javascalautils.Unit;
import javascalautils.Validator;
import javascalautils.concurrent.Future;
import simplepool.Constants.PoolMode;
import static javascalautils.concurrent.FutureCompanion.Future;
/**
* The pool implementation.
*
* @author Peter Nerg
*/
final class PoolImpl<T> implements Pool<T> {
private final ThrowableFunction0<T> instanceFactory;
private final Predicate<T> validator;
private final Consumer<T> destructor;
/** The actual queue implementation. */
private final PoolQueue<T> poolQueue;
/**
* Acts as gate keeper only allowing a maximum number of concurrent users/threads for this pool.
*/
private final Semaphore getPermits;
private final Semaphore returnPermits = new Semaphore(0);
private final Option<ScheduledFuture<?>> scheduledFuture;
/**
* If this pool is valid. <br>
* I.e. {@link #destroy()} has not been invoked.
*/
private final AtomicBoolean isValid = new AtomicBoolean(true);
private final int maxSize;
PoolImpl(ThrowableFunction0<T> instanceFactory, int maxSize, Predicate<T> validator, Consumer<T> destructor, PoolMode poolMode, Duration idleTimeout, Option<ScheduledExecutorService> executor) {
this.maxSize = maxSize;
poolQueue = poolMode == PoolMode.FIFO ? new PoolQueueFIFO<>() : new PoolQueueLIFO<>();
this.instanceFactory = instanceFactory;
this.validator = validator;
this.destructor = destructor;
this.getPermits = new Semaphore(maxSize);
long delayMillis = idleTimeout.toMillis();
scheduledFuture = executor.map(ss -> {
return ss.scheduleWithFixedDelay(() -> {
poolQueue.markStaleInstances(idleTimeout, destructor);
} , delayMillis, delayMillis / 4, TimeUnit.MILLISECONDS);
});
}
/*
* (non-Javadoc)
*
* @see simplepool.Pool#getInstance(long, java.util.concurrent.TimeUnit)
*/
@Override
public Try<T> getInstance(Duration maxWaitTime) {
if (!isValid.get()) {
throw new IllegalStateException("Pool has been destroyed.");
}
return Try(() -> {
// attempt to get a go ahead by acquiring a semaphore
if (!getPermits.tryAcquire(maxWaitTime.toMillis(), TimeUnit.MILLISECONDS)) {
throw new TimeoutException("Timeout waiting for a free object in the pool");
}
returnPermits.release();
return poolQueue.head().getOrElse(() -> createInstance());
});
}
/*
* (non-Javadoc)
*
* @see simplepool.Pool#returnInstance(java.lang.Object)
*/
@Override
public Try<Unit> returnInstance(T instance) {
return Try(() -> {
Validator.requireNonNull(instance);
if (!returnPermits.tryAcquire()) {
throw new PoolException("No permits left to return object to the pool");
}
// first validate the instance
// if we fail validation the instance is destroyed and the pooled
// instance is marked as destroyed
if (validator.test(instance)) {
poolQueue.add(instance);
} else {
destructor.accept(instance);
}
// now release a permit to take a new item from the pool
getPermits.release();
});
}
/*
* (non-Javadoc)
*
* @see simplepool.Pool#destroy()
*/
@Override
public Future<Unit> destroy() {
return Future(() -> {
if (isValid.compareAndSet(true, false)) {
scheduledFuture.forEach(sf -> sf.cancel(true));
// immediately drain all free resources.
int permitsLeft = maxSize - getPermits.drainPermits();
// still outstanding resources borrowed from the pool
// we must wait until each of them has been returned
while (permitsLeft > 0) {
getPermits.acquire();
permitsLeft--;
}
// with all permits acquired we know all items in the pool have been returned (or never used)
// we can now safely destroy all items in the pool
// with negative duration we will in practice mark any item in the pool as stale and destroy it
poolQueue.markStaleInstances(Duration.ofDays(-69), destructor);
}
});
}
private T createInstance() {
try {
return instanceFactory.apply();
} catch (Throwable ex) {
// for some reason we failed to create an instance
// release the semaphore that was previously acquired otherwise
// me might drain all semaphores
getPermits.release();
returnPermits.tryAcquire();
throw new PoolException("Failed to create instance", ex);
}
}
}
| apache-2.0 |
incodehq/contactapp | backend/app/src/main/java/org/incode/eurocommercial/contactapp/app/fixture/scenarios/DemoFixture.java | 3058 | /*
* Copyright 2015-2016 Eurocommercial Properties NV
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.incode.eurocommercial.contactapp.app.fixture.scenarios;
import java.net.URL;
import java.util.List;
import java.util.Map;
import com.google.common.base.Predicates;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import org.apache.isis.applib.fixturescripts.FixtureScript;
import org.isisaddons.module.excel.dom.ExcelFixture;
import org.incode.eurocommercial.contactapp.module.contact.fixture.ContactTearDown;
import org.incode.eurocommercial.contactapp.module.contacts.dom.Contact;
import org.incode.eurocommercial.contactapp.module.country.fixture.CountryTearDown;
import org.incode.eurocommercial.contactapp.module.group.fixture.ContactGroupTearDown;
import org.incode.eurocommercial.contactapp.module.number.fixture.ContactNumberTearDown;
import org.incode.eurocommercial.contactapp.module.role.fixture.ContactRoleTearDown;
import lombok.Getter;
public class DemoFixture extends FixtureScript {
public DemoFixture() {
withDiscoverability(Discoverability.DISCOVERABLE);
}
@Getter
private final List<Contact> contacts = Lists.newArrayList();
@Override
protected void execute(final ExecutionContext ec) {
// zap everything
ec.executeChild(this, new ContactRoleTearDown());
ec.executeChild(this, new ContactNumberTearDown());
ec.executeChild(this, new ContactTearDown());
ec.executeChild(this, new ContactGroupTearDown());
ec.executeChild(this, new CountryTearDown());
// load data from spreadsheet
final URL spreadsheet = Resources.getResource(DemoFixture.class, getSpreadsheetBasename() + ".xlsx");
final ExcelFixture fs = new ExcelFixture(spreadsheet, getHandlers());
ec.executeChild(this, fs);
// make objects created by ExcelFixture available to our caller.
final Map<Class, List<Object>> objectsByClass = fs.getObjectsByClass();
getContacts().addAll(
FluentIterable
.from((List) objectsByClass.get(ContactImport.class))
.filter(Predicates.notNull())
.toList());
}
protected String getSpreadsheetBasename() {
return getClass().getSimpleName();
}
private Class[] getHandlers() {
return new Class[]{
ContactImport.class,
};
}
}
| apache-2.0 |
firejack-open/Firejack-Platform | platform/src/main/java/net/firejack/platform/core/store/version/UIDStore.java | 5531 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.firejack.platform.core.store.version;
import net.firejack.platform.core.model.BaseEntityModel;
import net.firejack.platform.core.model.UID;
import net.firejack.platform.core.model.UIDModel;
import net.firejack.platform.core.store.BaseStore;
import net.firejack.platform.core.utils.ConfigContainer;
import net.firejack.platform.core.utils.SecurityHelper;
import net.firejack.platform.model.helper.FileHelper;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class UIDStore<E extends UIDModel, ID extends Serializable> extends BaseStore<E, ID> implements IUIDStore<E, ID> {
@Autowired
protected FileHelper helper;
@Override
@Transactional(readOnly = true)
public UID uidById(Long id) {
return getHibernateTemplate().get(UID.class, id);
}
@Override
@Transactional(readOnly = true)
public UID uidById(String uid) {
Criteria criteria = getSession().createCriteria(UID.class);
criteria.add(Restrictions.eq("uid", uid));
return (UID) criteria.uniqueResult();
}
@Override
@Transactional(readOnly = true)
public E findByUIDId(Long uidId) {
List<Criterion> criterions = new ArrayList<Criterion>();
Criterion registryNodeIdCriterion = Restrictions.eq("uid.id", uidId);
criterions.add(registryNodeIdCriterion);
return findByCriteria(criterions, null, null);
}
@Override
@Transactional(readOnly = true)
public E findByUID(String uid) {
return findByUIDLocal(uid);
}
@Override
@Transactional
public void saveOrUpdate(E entity) {
BaseEntityModel uid = createUID(entity);
if (uid != null) {
getHibernateTemplate().save(uid);
}
super.saveOrUpdate(entity);
}
@Override
@Transactional
public void saveOrUpdateAll(List<E> entities) {
List<BaseEntityModel> uid = createUID(entities);
if (!uid.isEmpty()) {
getHibernateTemplate().saveOrUpdateAll(uid);
}
super.saveOrUpdateAll(entities);
}
/**
* @param entity
* @return
*/
public BaseEntityModel createUID(E entity) {
UID uid = null;
if (entity.getId() == null) {
uid = entity.getUid();
if (uid == null) {
uid = new UID(SecurityHelper.generateSecureId());
entity.setUid(uid);
} else if (ConfigContainer.isAppInstalled()) {
UIDModel uidModel = findByUIDId(uid.getId());
//todo{start}: temporary solution for diff merge
if (uidModel == null) {
UID foundUid = uidById(uid.getUid());
if (foundUid != null) {
entity.setUid(foundUid);
uid = null;
}
}
//todo[end]: temporary solution for diff merge
if (uidModel != null) {
entity.setUid(uidModel.getUid());
uid = null;
}
}
}
return uid;
}
/**
* @param entities
* @return
*/
public List<BaseEntityModel> createUID(List<E> entities) {
List<BaseEntityModel> list = new ArrayList<BaseEntityModel>();
for (E entity : entities) {
BaseEntityModel uid = createUID(entity);
if (uid != null) {
list.add(uid);
}
}
return list;
}
@Override
@Transactional
@SuppressWarnings("unchecked")
public E deleteByUID(String uid) {
if (uid == null) {
throw new IllegalArgumentException("Empty UID parameter.");
}
E model = findByUIDLocal(uid);
if (model != null) {
delete(model);
}
return model;
}
private E findByUIDLocal(String uid) {
if (uid == null) {
throw new IllegalArgumentException("Empty UID parameter.");
}
try {
Criteria criteria = getSession().createCriteria(getClazz());
criteria.createAlias("uid", "uid");
criteria.add(Restrictions.eq("uid.uid", uid));
return (E) criteria.uniqueResult();
} catch (HibernateException e) {
logger.error("Query did not return a unique result by UID:[" + uid + "]");
throw e;
}
}
}
| apache-2.0 |
gawkermedia/googleads-java-lib | modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201601/cm/DatabaseErrorReason.java | 3849 | /**
* DatabaseErrorReason.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201601.cm;
public class DatabaseErrorReason implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected DatabaseErrorReason(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _CONCURRENT_MODIFICATION = "CONCURRENT_MODIFICATION";
public static final java.lang.String _PERMISSION_DENIED = "PERMISSION_DENIED";
public static final java.lang.String _ACCESS_PROHIBITED = "ACCESS_PROHIBITED";
public static final java.lang.String _CAMPAIGN_PRODUCT_NOT_SUPPORTED = "CAMPAIGN_PRODUCT_NOT_SUPPORTED";
public static final java.lang.String _DUPLICATE_KEY = "DUPLICATE_KEY";
public static final java.lang.String _DATABASE_ERROR = "DATABASE_ERROR";
public static final java.lang.String _UNKNOWN = "UNKNOWN";
public static final DatabaseErrorReason CONCURRENT_MODIFICATION = new DatabaseErrorReason(_CONCURRENT_MODIFICATION);
public static final DatabaseErrorReason PERMISSION_DENIED = new DatabaseErrorReason(_PERMISSION_DENIED);
public static final DatabaseErrorReason ACCESS_PROHIBITED = new DatabaseErrorReason(_ACCESS_PROHIBITED);
public static final DatabaseErrorReason CAMPAIGN_PRODUCT_NOT_SUPPORTED = new DatabaseErrorReason(_CAMPAIGN_PRODUCT_NOT_SUPPORTED);
public static final DatabaseErrorReason DUPLICATE_KEY = new DatabaseErrorReason(_DUPLICATE_KEY);
public static final DatabaseErrorReason DATABASE_ERROR = new DatabaseErrorReason(_DATABASE_ERROR);
public static final DatabaseErrorReason UNKNOWN = new DatabaseErrorReason(_UNKNOWN);
public java.lang.String getValue() { return _value_;}
public static DatabaseErrorReason fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
DatabaseErrorReason enumeration = (DatabaseErrorReason)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static DatabaseErrorReason fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(DatabaseErrorReason.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201601", "DatabaseError.Reason"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
| apache-2.0 |
BURAI-team/burai | src/burai/atoms/viewer/operation/mouse/MouseEventEditorMenu.java | 1564 | /*
* Copyright (C) 2018 Satomichi Nishihara
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package burai.atoms.viewer.operation.mouse;
import burai.atoms.viewer.operation.ViewerEventEditorMenu;
import burai.atoms.viewer.operation.ViewerEventManager;
import javafx.scene.input.MouseEvent;
public class MouseEventEditorMenu extends ViewerEventEditorMenu<MouseEvent> implements MouseEventKernel {
private MouseEventProxy proxy;
public MouseEventEditorMenu(MouseEventHandler handler) {
super();
this.proxy = new MouseEventProxy(handler, this);
}
@Override
public void perform(ViewerEventManager manager, MouseEvent event) {
this.proxy.perform(manager, event);
}
@Override
public void performOnMousePressed(MouseEvent event) {
// NOP
}
@Override
public void performOnMouseDragged(MouseEvent event) {
// NOP
}
@Override
public void performOnMouseReleased(MouseEvent event) {
// NOP
}
}
| apache-2.0 |
mkolisnyk/aerial | aerial/src/main/java/com/github/mkolisnyk/aerial/core/templates/package-info.java | 100 | /**
* .
*/
/**
* @author Myk Kolisnyk
*
*/
package com.github.mkolisnyk.aerial.core.templates;
| apache-2.0 |
appium/java-client | src/main/java/io/appium/java_client/ios/options/simulator/SupportsCalendarAccessAuthorizedOption.java | 2264 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.appium.java_client.ios.options.simulator;
import io.appium.java_client.remote.options.BaseOptions;
import io.appium.java_client.remote.options.CanSetCapability;
import org.openqa.selenium.Capabilities;
import java.util.Optional;
import static io.appium.java_client.internal.CapabilityHelpers.toSafeBoolean;
public interface SupportsCalendarAccessAuthorizedOption<T extends BaseOptions<T>> extends
Capabilities, CanSetCapability<T> {
String CALENDAR_ACCESS_AUTHORIZED_OPTION = "calendarAccessAuthorized";
/**
* Enable calendar access on IOS Simulator.
*
* @return self instance for chaining.
*/
default T calendarAccessAuthorized() {
return amend(CALENDAR_ACCESS_AUTHORIZED_OPTION, true);
}
/**
* Set this to true if you want to enable calendar access on IOS Simulator
* with given bundleId. Set to false, if you want to disable calendar access
* on IOS Simulator with given bundleId. If not set, the calendar
* authorization status will not be set.
*
* @param value Whether to enable calendar access on IOS Simulator.
* @return self instance for chaining.
*/
default T setCalendarAccessAuthorized(boolean value) {
return amend(CALENDAR_ACCESS_AUTHORIZED_OPTION, value);
}
/**
* Get whether to enable calendar access on IOS Simulator.
*
* @return True or false.
*/
default Optional<Boolean> doesCalendarAccessAuthorized() {
return Optional.ofNullable(toSafeBoolean(getCapability(CALENDAR_ACCESS_AUTHORIZED_OPTION)));
}
}
| apache-2.0 |
testify-project/testify | modules/junit4/junit4-core/src/test/java/org/testifyproject/junit4/VerifyInteractionTest.java | 1938 | /*
* Copyright 2016-2017 Testify Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.testifyproject.junit4;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.testifyproject.annotation.Fake;
import org.testifyproject.annotation.Sut;
import org.testifyproject.junit4.fixture.ImplicitType;
import org.testifyproject.junit4.fixture.common.Hello;
/**
*
* @author saden
*/
@RunWith(UnitTest.class)
public class VerifyInteractionTest {
Hello delegate = new Hello();
@Sut(verify = true)
ImplicitType sut;
@Fake
Hello hello = delegate;
@Before
public void verifyInjections() {
assertThat(sut).isNotNull();
assertThat(hello).isNotNull();
assertThat(Mockito.mockingDetails(sut.getHello()).isMock()).isTrue();
assertThat(Mockito.mockingDetails(hello).isMock()).isTrue();
}
@Test
public void givenNothingClassToExecuteShouldReturnHello() {
String helloGreeting = "Hello";
given(hello.greet()).willReturn(helloGreeting);
String result = sut.execute();
assertThat(result).isEqualTo(helloGreeting);
assertThat(delegate.isCalled()).isTrue();
verify(hello).greet();
}
}
| apache-2.0 |
quarkusio/quarkus | extensions/panache/panache-common/deployment/src/main/java/io/quarkus/panache/common/deployment/visitors/KotlinPanacheClassOperationGenerationVisitor.java | 25618 | package io.quarkus.panache.common.deployment.visitors;
import static io.quarkus.deployment.util.AsmUtil.getDescriptor;
import static io.quarkus.deployment.util.AsmUtil.getLoadOpcode;
import static io.quarkus.deployment.util.AsmUtil.getSignature;
import static io.quarkus.deployment.util.AsmUtil.unboxIfRequired;
import static io.quarkus.gizmo.Gizmo.ASM_API_VERSION;
import static io.quarkus.panache.common.deployment.PanacheConstants.DOTNAME_GENERATE_BRIDGE;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
import static org.objectweb.asm.Opcodes.ACC_BRIDGE;
import static org.objectweb.asm.Opcodes.ARRAYLENGTH;
import static org.objectweb.asm.Opcodes.ATHROW;
import static org.objectweb.asm.Opcodes.CHECKCAST;
import static org.objectweb.asm.Opcodes.DUP;
import static org.objectweb.asm.Opcodes.GETSTATIC;
import static org.objectweb.asm.Opcodes.IFNONNULL;
import static org.objectweb.asm.Opcodes.ILOAD;
import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
import static org.objectweb.asm.Opcodes.INVOKESTATIC;
import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
import static org.objectweb.asm.Opcodes.NEW;
import static org.objectweb.asm.Type.ARRAY;
import static org.objectweb.asm.Type.getMethodDescriptor;
import static org.objectweb.asm.Type.getType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringJoiner;
import java.util.TreeMap;
import java.util.function.Function;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.ClassType;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.MethodInfo;
import org.jboss.jandex.ParameterizedType;
import org.jboss.jandex.Type;
import org.jboss.jandex.Type.Kind;
import org.jboss.jandex.TypeVariable;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import io.quarkus.deployment.util.AsmUtil;
import io.quarkus.deployment.util.JandexUtil;
import io.quarkus.panache.common.deployment.ByteCodeType;
import io.quarkus.panache.common.deployment.PanacheMethodCustomizer;
import io.quarkus.panache.common.deployment.TypeBundle;
/**
* kotlinc compiles default methods in to the implementing classes, so we need to elide them first, and then we can
* generate new methods like we do elsewhere.
*/
public class KotlinPanacheClassOperationGenerationVisitor extends ClassVisitor {
public static final String NOT_NULL_DESCRIPTOR = "Lorg/jetbrains/annotations/NotNull;";
public static final String NULLABLE_DESCRIPTOR = "Lorg/jetbrains/annotations/Nullable;";
public static final ByteCodeType OBJECT = new ByteCodeType(Object.class);
protected static final ByteCodeType CLASS = new ByteCodeType(Class.class);
private static final String CTOR_METHOD_NAME = "<init>";
protected final Function<String, String> argMapper;
protected final ClassInfo classInfo;
protected final ByteCodeType entityUpperBound;
protected final Map<String, ByteCodeType> typeArguments = new HashMap<>();
private final ByteCodeType baseType;
private final Map<String, MethodInfo> definedMethods = new TreeMap<>();
private final Map<String, String> erasures = new HashMap<>();
private final IndexView indexView;
protected List<PanacheMethodCustomizer> methodCustomizers;
protected TypeBundle typeBundle;
private final List<Label> labels = new ArrayList<>();
public KotlinPanacheClassOperationGenerationVisitor(ClassVisitor outputClassVisitor, ClassInfo classInfo,
IndexView indexView, TypeBundle typeBundle, ByteCodeType baseType,
List<PanacheMethodCustomizer> methodCustomizers) {
super(ASM_API_VERSION, outputClassVisitor);
this.classInfo = classInfo;
this.indexView = indexView;
this.typeBundle = typeBundle;
this.baseType = baseType;
this.methodCustomizers = methodCustomizers;
List<TypeVariable> typeVariables = indexView.getClassByName(baseType.dotName()).typeParameters();
entityUpperBound = !typeVariables.isEmpty()
? new ByteCodeType(typeVariables.get(0).bounds().get(0))
: OBJECT;
discoverTypeParameters(classInfo, indexView, typeBundle, baseType);
argMapper = type -> {
ByteCodeType byteCodeType = typeArguments.get(type);
return byteCodeType != null
? byteCodeType.descriptor()
: type;
};
collectMethods(classInfo);
filterNonOverrides();
}
public static List<ByteCodeType> recursivelyFindEntityTypeArguments(IndexView indexView, DotName clazz,
DotName repositoryDotName) {
if (clazz.equals(JandexUtil.DOTNAME_OBJECT)) {
return Collections.emptyList();
}
return JandexUtil
.resolveTypeParameters(clazz, repositoryDotName, indexView).stream()
.map(t -> new ByteCodeType(t))
.collect(toList());
}
private Label addLabel() {
Label label = new Label();
labels.add(label);
return label;
}
protected void addNullityChecks(MethodVisitor mv, MethodInfo method) {
int index = 1;
for (Type methodParameter : method.parameters()) {
org.objectweb.asm.Type parameter = asmType(methodParameter);
if (isNotPrimitiveId(methodParameter)) {
mv.visitVarInsn(parameter.getOpcode(ILOAD), index);
String value = method.parameterName(index);
mv.visitLdcInsn(value != null ? value : ("arg" + (index - 1)));
mv.visitMethodInsn(INVOKESTATIC, "kotlin/jvm/internal/Intrinsics", "checkNotNullParameter",
"(Ljava/lang/Object;Ljava/lang/String;)V", false);
}
index += parameter.getSize();
}
}
private void loadArguments(MethodVisitor mv, MethodInfo method) {
mv.visitLdcInsn(typeArguments.get("Entity").type());
int index = 1;
for (Type methodParameter : method.parameters()) {
org.objectweb.asm.Type parameter = asmType(methodParameter);
mv.visitVarInsn(parameter.getOpcode(ILOAD), index);
// long and double take two slots and have size == 2. others, size == 1
if (parameter.getSort() < ARRAY) {
org.objectweb.asm.Type wrapper = AsmUtil.autobox(parameter);
mv.visitMethodInsn(INVOKESTATIC, wrapper.getInternalName(), "valueOf",
getMethodDescriptor(wrapper, parameter), false);
} else if (parameter.getSort() == ARRAY) {
mv.visitInsn(DUP);
mv.visitInsn(ARRAYLENGTH);
mv.visitMethodInsn(INVOKESTATIC, "java/util/Arrays", "copyOf",
"([Ljava/lang/Object;I)[Ljava/lang/Object;", false);
}
index += parameter.getSize();
}
}
private void annotateParamsWithNotNull(MethodVisitor mv, MethodInfo method) {
List<Type> parameters = method.parameters();
if (parameters.size() != 0) {
mv.visitAnnotableParameterCount(parameters.size(), false);
for (int i = 0; i < parameters.size(); i++) {
if (isNotPrimitiveId(method.parameters().get(i))) {
mv.visitParameterAnnotation(i, NOT_NULL_DESCRIPTOR, false);
}
}
}
}
private boolean isNotPrimitiveId(Type type) {
boolean primitive = true;
if (type instanceof TypeVariable && ((TypeVariable) type).identifier().equals("Id")) {
String identifier = ((TypeVariable) type).identifier();
ByteCodeType idType = typeArguments.get(identifier);
primitive = idType.descriptor().length() != 1;
}
return primitive;
}
protected String bridgeMethodDescriptor(MethodInfo method, Function<String, String> mapper) {
StringJoiner joiner = new StringJoiner("", "(", ")");
descriptors(method, joiner);
AnnotationInstance annotation = method.annotation(DOTNAME_GENERATE_BRIDGE);
boolean erased;
if (annotation != null) {
AnnotationValue value = annotation.value("targetReturnTypeErased");
erased = value != null && value.asBoolean();
} else {
erased = false;
}
String returnType;
if (erased) {
returnType = entityUpperBound.descriptor();
} else {
returnType = getDescriptor(method.returnType(), mapper);
}
return joiner + returnType;
}
private void checkCast(MethodVisitor mv, Type returnType, String operationReturnType) {
String cast;
if (returnType.kind() == Type.Kind.TYPE_VARIABLE) {
ByteCodeType type = typeArguments.getOrDefault(returnType.asTypeVariable().identifier(), entityUpperBound);
cast = type.internalName();
} else {
cast = returnType.name().toString().replace('.', '/');
}
if (!cast.equals(operationReturnType)) {
mv.visitTypeInsn(CHECKCAST, cast);
}
}
private void collectMethods(ClassInfo classInfo) {
if (classInfo != null && !classInfo.name().equals(baseType.dotName())) {
classInfo.methods()
.forEach(method -> {
String descriptor = getDescriptor(method, m -> {
ByteCodeType byteCodeType = typeArguments.get(m);
return byteCodeType != null ? byteCodeType.descriptor() : OBJECT.descriptor();
});
MethodInfo prior = definedMethods.put(method.name() + descriptor, method);
if (prior != null && !isBridgeMethod(method)) {
throw new IllegalStateException(format("Should not run in to duplicate " +
"mappings: \n\t%s\n\t%s\n\t%s", method, descriptor, prior));
}
});
collectMethods(indexView.getClassByName(classInfo.superName()));
}
}
private String desc(String name) {
String s = name.replace(".", "/");
return s.startsWith("[") ? s : "L" + s + ";";
}
private void descriptors(MethodInfo method, StringJoiner joiner) {
ByteCodeType id = typeArguments.get("Id");
for (Type parameter : method.parameters()) {
if (!id.isPrimitive() && parameter.name().equals(id.dotName())) {
joiner.add(OBJECT.descriptor());
} else {
joiner.add(mapType(parameter));
}
}
}
protected void discoverTypeParameters(ClassInfo classInfo, IndexView indexView, TypeBundle types, ByteCodeType baseType) {
List<ByteCodeType> foundTypeArguments = recursivelyFindEntityTypeArguments(indexView,
classInfo.name(), baseType.dotName());
ByteCodeType entityType = (foundTypeArguments.size() > 0) ? foundTypeArguments.get(0) : OBJECT;
ByteCodeType idType = (foundTypeArguments.size() > 1) ? foundTypeArguments.get(1).unbox() : OBJECT;
typeArguments.put("Entity", entityType);
typeArguments.put("Id", idType);
typeArguments.keySet().stream()
.filter(k -> !k.equals("Id"))
.forEach(k -> erasures.put(k, OBJECT.descriptor()));
try {
erasures.put(typeArguments.get("Entity").dotName().toString(), entityUpperBound.descriptor());
erasures.put(types.queryType().dotName().toString(), OBJECT.descriptor());
erasures.put(types.updateType().dotName().toString(), OBJECT.descriptor());
} catch (UnsupportedOperationException ignored) {
}
}
private void emitNullCheck(MethodVisitor mv, String operationDescriptor) {
mv.visitInsn(DUP);
mv.visitLdcInsn(elideDescriptor(operationDescriptor));
mv.visitMethodInsn(INVOKESTATIC, "kotlin/jvm/internal/Intrinsics", "checkNotNullExpressionValue",
"(Ljava/lang/Object;Ljava/lang/String;)V", false);
}
private void emitNullCheck(MethodVisitor mv, Type returnType) {
Label label = addLabel();
mv.visitInsn(DUP);
mv.visitJumpInsn(IFNONNULL, label);
mv.visitTypeInsn(NEW, "java/lang/NullPointerException");
mv.visitInsn(DUP);
ParameterizedType parameterizedType = ParameterizedType.create(returnType.name(),
new Type[] { Type.create(typeArguments.get("Entity").dotName(), Type.Kind.CLASS) }, null);
mv.visitLdcInsn("null cannot be cast to non-null type " + (parameterizedType.toString()
.replace("java.util.List", "kotlin.collections.List")));
mv.visitMethodInsn(INVOKESPECIAL, "java/lang/NullPointerException",
CTOR_METHOD_NAME, "(Ljava/lang/String;)V", false);
mv.visitInsn(ATHROW);
mv.visitLabel(label);
mv.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] { "java/lang/Object" });
}
private String elideDescriptor(String descriptor) {
// sample kotlinc text: "KotlinMongoOperations.IN\u2026atabase(Book::class.java)"
if (descriptor.length() > 55) {
return descriptor.substring(0, 24) + '\u2026' + descriptor.substring(descriptor.length() - 24);
}
return descriptor;
}
private Label endLabel() {
return labels.get(labels.size() - 1);
}
private void filterNonOverrides() {
new ArrayList<>(definedMethods.values())
.forEach(method -> {
AnnotationInstance generateBridge = method.annotation(DOTNAME_GENERATE_BRIDGE);
if (generateBridge != null) {
definedMethods.remove(method.name() + getDescriptor(method, m -> m));
}
});
}
private void generate(MethodInfo method) {
// Note: we can't use SYNTHETIC here because otherwise Mockito will never mock these methods
MethodVisitor mv = cv.visitMethod(Opcodes.ACC_PUBLIC, method.name(),
getDescriptor(method, argMapper), getSignature(method, argMapper), null);
AsmUtil.copyParameterNames(mv, method);
for (PanacheMethodCustomizer customizer : methodCustomizers) {
org.objectweb.asm.Type thisClass = getType("L" + classInfo.name().toString().replace('.', '/') + ";");
customizer.customize(thisClass, method, mv);
}
annotateParamsWithNotNull(mv, method);
mv.visitCode();
addNullityChecks(mv, method);
loadOperationsReference(mv);
loadArguments(mv, method);
invokeOperation(mv, method);
emitLocalVariablesTable(mv, method);
mv.visitMaxs(0, 0);
}
private void emitLocalVariablesTable(MethodVisitor mv, MethodInfo method) {
mv.visitLabel(addLabel());
mv.visitLocalVariable("this", desc(classInfo.name().toString()), null, startLabel(), endLabel(), 0);
for (int i = 0; i < method.parameters().size(); i++) {
Type type = method.parameters().get(i);
String typeName = type instanceof TypeVariable
? this.typeArguments.get(((TypeVariable) type).identifier()).descriptor()
: desc(type.name().toString());
String parameterName = method.parameterName(i);
mv.visitLocalVariable(parameterName != null ? parameterName : ("arg" + 1), typeName, null, startLabel(),
endLabel(), i + 1);
}
}
private void generateBridge(MethodInfo method, String descriptor) {
MethodVisitor mv = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC | Opcodes.ACC_BRIDGE,
method.name(),
descriptor,
null,
null);
List<Type> parameters = method.parameters();
AsmUtil.copyParameterNames(mv, method);
mv.visitCode();
// this
mv.visitIntInsn(Opcodes.ALOAD, 0);
// each param
for (int i = 0; i < parameters.size(); i++) {
Type paramType = parameters.get(i);
if (paramType.kind() == Type.Kind.PRIMITIVE)
throw new IllegalStateException("BUG: Don't know how to generate JVM bridge method for " + method
+ ": has primitive parameters");
mv.visitIntInsn(getLoadOpcode(paramType), i + 1);
if (paramType.kind() == Type.Kind.TYPE_VARIABLE) {
String typeParamName = paramType.asTypeVariable().identifier();
org.objectweb.asm.Type type = getType(typeArguments.get(typeParamName).descriptor());
if (type.getSort() > org.objectweb.asm.Type.DOUBLE) {
mv.visitTypeInsn(Opcodes.CHECKCAST, type.getInternalName());
} else {
unboxIfRequired(mv, type);
}
}
}
String targetDescriptor = getDescriptor(method, name -> typeArguments.get(name).descriptor());
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL,
classInfo.name().toString().replace('.', '/'),
method.name(),
targetDescriptor, false);
String targetReturnTypeDescriptor = targetDescriptor.substring(targetDescriptor.indexOf(')') + 1);
mv.visitInsn(AsmUtil.getReturnInstruction(targetReturnTypeDescriptor));
mv.visitMaxs(0, 0);
mv.visitEnd();
}
private void generatePrimitiveBridge(MethodInfo method, String descriptor) {
String substring = descriptor.substring(0, descriptor.lastIndexOf(')') + 1);
String descriptor1 = substring + OBJECT.descriptor();
MethodVisitor mv = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC | Opcodes.ACC_BRIDGE,
method.name(),
descriptor1,
null,
null);
AsmUtil.copyParameterNames(mv, method);
mv.visitCode();
// this
mv.visitIntInsn(Opcodes.ALOAD, 0);
mv.visitIntInsn(typeArguments.get("Id").type().getOpcode(ILOAD), 1);
String targetDescriptor = getDescriptor(method, name -> typeArguments.get(name).descriptor());
mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL,
classInfo.name().toString().replace('.', '/'),
method.name(),
targetDescriptor, false);
String targetReturnTypeDescriptor = targetDescriptor.substring(targetDescriptor.indexOf(')') + 1);
mv.visitInsn(AsmUtil.getReturnInstruction(targetReturnTypeDescriptor));
mv.visitMaxs(0, 0);
mv.visitEnd();
}
private void invokeOperation(MethodVisitor mv, MethodInfo method) {
String operationDescriptor;
StringJoiner joiner = new StringJoiner("", "(", ")");
joiner.add(CLASS.descriptor());
for (Type parameter : method.parameters()) {
joiner.add(parameter.kind() == Kind.TYPE_VARIABLE ? OBJECT.descriptor() : getDescriptor(parameter, argMapper));
}
Type returnType = method.returnType();
String descriptor = getDescriptor(returnType, argMapper);
String key = returnType.kind() == Type.Kind.TYPE_VARIABLE
? returnType.asTypeVariable().identifier()
: returnType.name().toString();
String operationReturnType = erasures.getOrDefault(key, descriptor);
operationDescriptor = joiner + operationReturnType;
mv.visitMethodInsn(INVOKEVIRTUAL, typeBundle.operations().internalName(), method.name(),
operationDescriptor, false);
if (returnType.kind() != Type.Kind.PRIMITIVE && returnType.kind() != Type.Kind.VOID) {
String retType = operationReturnType.substring(1, operationReturnType.length() - 1);
String annotationDesc = NOT_NULL_DESCRIPTOR;
if ("findById".equals(method.name())) {
annotationDesc = NULLABLE_DESCRIPTOR;
} else {
nullCheckReturn(mv, returnType,
typeBundle.operations().dotName().withoutPackagePrefix() + ".INSTANCE."
+ method.name() + joiner);
}
checkCast(mv, returnType, retType);
mv.visitAnnotation(annotationDesc, false);
}
mv.visitInsn(AsmUtil.getReturnInstruction(returnType));
}
private boolean isBridgeMethod(MethodInfo method) {
return (method.flags() & ACC_BRIDGE) != ACC_BRIDGE;
}
private org.objectweb.asm.Type asmType(Type methodParameter) {
org.objectweb.asm.Type parameter;
if (methodParameter.kind() == Type.Kind.TYPE_VARIABLE) {
parameter = typeArguments.get(methodParameter.asTypeVariable().identifier()).type();
} else {
parameter = getType(getDescriptor(methodParameter, s -> null));
}
return parameter;
}
protected void loadOperationsReference(MethodVisitor mv) {
mv.visitLabel(addLabel());
mv.visitFieldInsn(GETSTATIC, typeBundle.operations().internalName(), "INSTANCE",
typeBundle.operations().descriptor());
}
private String mapType(Type parameter) {
switch (parameter.kind()) {
case PRIMITIVE:
case TYPE_VARIABLE:
return OBJECT.descriptor();
default:
String value = getDescriptor(parameter, argMapper);
return erasures.getOrDefault(value, value);
}
}
private boolean needsJvmBridge(MethodInfo method) {
if (needsJvmBridge(method.returnType()))
return true;
for (Type paramType : method.parameters()) {
if (needsJvmBridge(paramType))
return true;
}
return false;
}
private boolean needsJvmBridge(Type type) {
if (type.kind() == Type.Kind.TYPE_VARIABLE) {
String typeParamName = type.asTypeVariable().identifier();
return typeArguments.containsKey(typeParamName);
}
return false;
}
private void nullCheckReturn(MethodVisitor mv, Type returnType, String operationDescriptor) {
if (returnType instanceof ParameterizedType) {
emitNullCheck(mv, returnType);
} else if (returnType instanceof ClassType) {
emitNullCheck(mv, operationDescriptor);
}
}
private Label startLabel() {
return labels.get(0);
}
@Override
public String toString() {
return new StringJoiner(", ", getClass().getSimpleName() + "[", "]")
.add(classInfo.name().toString())
.toString();
}
@Override
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature,
String[] exceptions) {
MethodInfo methodInfo = definedMethods.entrySet().stream()
.filter(e -> e.getKey().equals(name + descriptor))
.map(e -> e.getValue())
.findFirst()
.orElse(null);
if (methodInfo != null && !methodInfo.hasAnnotation(DOTNAME_GENERATE_BRIDGE)) {
return super.visitMethod(access, name, descriptor, signature, exceptions);
} else if (name.contains("$")) {
//some agents such as jacoco add new methods, they generally have $ in the name
return super.visitMethod(access, name, descriptor, signature, exceptions);
} else if (name.equals(CTOR_METHOD_NAME)) {
//Arc can add no-args constructors to support intercepted beans
return super.visitMethod(access, name, descriptor, signature, exceptions);
}
return null;
}
@Override
public void visitEnd() {
for (MethodInfo method : indexView.getClassByName(baseType.dotName()).methods()) {
String descriptor = getDescriptor(method, type -> typeArguments.getOrDefault(type, OBJECT).descriptor());
AnnotationInstance bridge = method.annotation(DOTNAME_GENERATE_BRIDGE);
if (!definedMethods.containsKey(method.name() + descriptor) && bridge != null) {
generate(method);
if (needsJvmBridge(method)) {
String bridgeDescriptor = bridgeMethodDescriptor(method, type -> {
ByteCodeType mapped = typeArguments.get(type);
return mapped != null ? mapped.descriptor() : type;
});
if (!definedMethods.containsKey(method.name() + bridgeDescriptor)) {
generateBridge(method, bridgeDescriptor);
}
AnnotationValue targetReturnTypeErased = bridge.value("targetReturnTypeErased");
if (typeArguments.get("Id").isPrimitive() && targetReturnTypeErased != null
&& targetReturnTypeErased.asBoolean()) {
if (method.parameters().size() == 1
&& method.parameters().get(0).asTypeVariable().identifier().equals("Id")) {
generatePrimitiveBridge(method, descriptor);
}
}
}
}
}
super.visitEnd();
}
}
| apache-2.0 |
slipperyseal/B9 | src/main/java/net/catchpole/B9/math/LocationHistory.java | 637 | package net.catchpole.B9.math;
import net.catchpole.B9.spacial.Location;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class LocationHistory implements Iterable<Location> {
private List<Location> list = new ArrayList<Location>();
private final int capasity;
public LocationHistory(int capasity) {
this.capasity = capasity;
}
public void addLocation(Location location) {
if (list.size() >= capasity) {
list.remove(0);
}
list.add(location);
}
public Iterator<Location> iterator() {
return list.iterator();
}
}
| apache-2.0 |
RaghavendraSingh/lens | lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcStatement.java | 9775 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.client.jdbc;
import java.sql.*;
import org.apache.lens.client.LensStatement;
import org.apache.lens.client.exceptions.LensAPIException;
import lombok.extern.slf4j.Slf4j;
/**
* The Class LensJdbcStatement.
*/
@Slf4j
public class LensJdbcStatement implements Statement {
/** The connection. */
private final LensJdbcConnection connection;
/** The statement. */
private final LensStatement statement;
/** The closed. */
private boolean closed;
/**
* Instantiates a new lens jdbc statement.
*
* @param connection the connection
*/
public LensJdbcStatement(LensJdbcConnection connection) {
this.connection = connection;
statement = new LensStatement(connection.getConnection());
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeQuery(java.lang.String)
*/
@Override
public ResultSet executeQuery(String s) throws SQLException {
try {
statement.executeQuery(s, true, null);
} catch (LensAPIException e) {
log.error("Execution Failed for Statement:{}", s, e);
}
return new LensJdbcResultSet(statement.getResultSet(), statement.getResultSetMetaData(), this);
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#close()
*/
@Override
public void close() throws SQLException {
killUnderlyingLensQuery();
this.closed = true;
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#cancel()
*/
@Override
public void cancel() throws SQLException {
killUnderlyingLensQuery();
}
/**
* Kill underlying lens query.
*
* @throws SQLException the SQL exception
*/
private void killUnderlyingLensQuery() throws SQLException {
if (closed) {
return;
}
if (statement.isIdle()) {
return;
}
boolean status = statement.kill();
if (!status) {
throw new SQLException("Unable to close the Statement on lens server");
}
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#execute(java.lang.String)
*/
@Override
public boolean execute(String s) throws SQLException {
if (closed) {
throw new SQLException("Cannot execute statemes on closed statements");
}
try {
statement.executeQuery(s, true, null);
} catch (Throwable t) {
throw new SQLException(t);
}
return statement.wasQuerySuccessful();
}
@Override
public ResultSet getResultSet() throws SQLException {
if (closed) {
throw new SQLException("Cannot get resultset for closed statements");
}
return new LensJdbcResultSet(statement.getResultSet(), statement.getResultSetMetaData(), this);
}
@Override
public boolean isClosed() throws SQLException {
return closed;
}
@Override
public int getFetchDirection() throws SQLException {
return ResultSet.FETCH_FORWARD;
}
@Override
public int getResultSetConcurrency() throws SQLException {
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getResultSetType() throws SQLException {
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public Connection getConnection() throws SQLException {
return this.connection;
}
/**
* Close result set.
*/
void closeResultSet() {
this.statement.closeResultSet();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return null;
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#clearWarnings()
*/
@Override
public void clearWarnings() throws SQLException {
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#unwrap(java.lang.Class)
*/
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
return null;
}
/*
* (non-Javadoc)
*
* @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
*/
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
return false;
}
@Override
public void setCursorName(String s) throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public int getUpdateCount() throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public boolean getMoreResults() throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public void setFetchDirection(int i) throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public void setFetchSize(int i) throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public int getFetchSize() throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeUpdate(java.lang.String)
*/
@Override
public int executeUpdate(String s) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#addBatch(java.lang.String)
*/
@Override
public void addBatch(String s) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#clearBatch()
*/
@Override
public void clearBatch() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeBatch()
*/
@Override
public int[] executeBatch() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#getMoreResults(int)
*/
@Override
public boolean getMoreResults(int i) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeUpdate(java.lang.String, int)
*/
@Override
public int executeUpdate(String s, int i) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeUpdate(java.lang.String, int[])
*/
@Override
public int executeUpdate(String s, int[] ints) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
*/
@Override
public int executeUpdate(String s, String[] strings) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#execute(java.lang.String, int)
*/
@Override
public boolean execute(String s, int i) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#execute(java.lang.String, int[])
*/
@Override
public boolean execute(String s, int[] ints) throws SQLException {
throw new SQLException("Operation not supported");
}
/*
* (non-Javadoc)
*
* @see java.sql.Statement#execute(java.lang.String, java.lang.String[])
*/
@Override
public boolean execute(String s, String[] strings) throws SQLException {
throw new SQLException("Operation not supported");
}
@Override
public int getResultSetHoldability() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public void setPoolable(boolean b) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public boolean isPoolable() throws SQLException {
return false;
}
/**
* Close on completion.
*
* @throws SQLException the SQL exception
*/
public void closeOnCompletion() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
public boolean isCloseOnCompletion() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public int getMaxFieldSize() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public void setMaxFieldSize(int i) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public int getMaxRows() throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public void setMaxRows(int i) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public void setEscapeProcessing(boolean b) throws SQLException {
throw new SQLException("Operation not supported!!!");
}
@Override
public int getQueryTimeout() throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
@Override
public void setQueryTimeout(int i) throws SQLException {
throw new SQLException("Operation not supported!!!!");
}
}
| apache-2.0 |
daisy/pipeline-issues | libs/dotify/dotify.hyphenator.impl/src/org/daisy/dotify/hyphenator/impl/CWHyphenatorCore.java | 1820 | package org.daisy.dotify.hyphenator.impl;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Logger;
import org.daisy.dotify.api.hyphenator.HyphenatorConfigurationException;
class CWHyphenatorCore {
private static CWHyphenatorCore instance;
private final Properties tables;
private final Map<String, CWHyphenatorAtom> map;
private final Logger logger;
private CWHyphenatorCore() {
tables = loadProperties("compound-catalog.xml");
map = new HashMap<>();
logger = Logger.getLogger(this.getClass().getCanonicalName());
}
boolean supportsLocale(String locale) {
return tables.getProperty(locale) != null;
}
synchronized static CWHyphenatorCore getInstance() {
if (instance==null) {
instance = new CWHyphenatorCore();
}
return instance;
}
private Properties loadProperties(String path) {
Properties ret = new Properties();
try {
URL propertiesURL = this.getClass().getResource(path);
if (propertiesURL!=null){
ret.loadFromXML(propertiesURL.openStream());
} else {
logger.warning("Cannot locate properties file: " + path);
}
} catch (IOException e) {
logger.warning("Failed to load properties file: " + path);
}
return ret;
}
CWHyphenatorAtom getHyphenator(String locale) throws HyphenatorConfigurationException {
CWHyphenatorAtom hyph = map.get(locale);
if (hyph==null) {
String subPath = tables.getProperty(locale);
if (subPath==null) {
throw new LatexHyphenatorConfigurationException("No definition for locale: " + locale);
}
logger.fine("Loading hyphenation definition: " + subPath);
hyph = new CWHyphenatorAtom(subPath, locale);
map.put(locale, hyph);
}
return hyph;
}
}
| apache-2.0 |
DAASI/HyperImage3 | HIImage/src/main/java/org/hyperimage/client/image/HiImageImplDigilib.java | 2276 | package org.hyperimage.client.image;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.imgscalr.Scalr;
import org.imgscalr.Scalr.Mode;
public class HiImageImplDigilib extends HiImageAbstract {
private static final Logger logger = Logger.getLogger(HiImageImplDigilib.class.getName());
private static HiImageImplDigilib hiImageImpl;
private HiImageImplDigilib() {
}
public static HiImageImplDigilib getInstance() {
if (hiImageImpl == null) {
synchronized (HiImageImplDigilib.class) {
if (hiImageImpl == null) {
hiImageImpl = new HiImageImplDigilib();
}
}
}
return hiImageImpl;
}
public BufferedImage scaleImage(BufferedImage image, Dimension dimension, String repositoryID) {
logger.info("Scaling image to " + dimension);
if (repositoryID == null) {
logger.warning("No repositoryID provided. Trying to scale locally.");
return Scalr.resize(image, Scalr.Method.AUTOMATIC, Mode.FIT_EXACT, dimension.width, dimension.height);
} else {
try {
String restPath = repositoryID + "&dw=" + dimension.width + "&dh=" + dimension.height;
logger.info("Trying to get image from " + restPath);
URL url = new URL(restPath);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
// conn.setRequestMethod("GET");
// conn.setRequestProperty("Accept", "image/jpeg");
if (conn.getResponseCode() != 200) {
logger.log(Level.SEVERE, "Failed to get data. HTTP error code : " + conn.getResponseCode() + ", " + conn.getResponseMessage());
throw new HiImageException("Failed to get data. HTTP error code : " + conn.getResponseCode() + ", " + conn.getResponseMessage());
}
BufferedImage result = createImageFromStream(conn.getInputStream());
conn.disconnect();
return result;
} catch (MalformedURLException e) {
logger.log(Level.SEVERE, "Scaling failed", e);
throw new HiImageException("Scaling failed", e);
} catch (IOException e) {
logger.log(Level.SEVERE, "Scaling failed", e);
throw new HiImageException("Scaling failed", e);
}
}
}
}
| apache-2.0 |
BBVA-CIB/APIRestGenerator | core/src/test/java/com/bbva/kltt/apirest/core/generator/velocity/VelocityManagerTest.java | 357 | package com.bbva.kltt.apirest.core.generator.velocity;
import org.junit.Test;
/**
* ------------------------------------------------
* @author Francisco Manuel Benitez Chico
* ------------------------------------------------
*/
public class VelocityManagerTest
{
@Test
public void fullTest()
{
VelocityManager.getEngine() ;
}
}
| apache-2.0 |
neoramon/camel | components-starter/camel-atom-starter/src/main/java/org/apache/camel/component/atom/springboot/AtomComponentAutoConfiguration.java | 1887 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.atom.springboot;
import org.apache.camel.CamelContext;
import org.apache.camel.component.atom.AtomComponent;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Configuration
@ConditionalOnBean(type = "org.apache.camel.springboot.CamelAutoConfiguration")
public class AtomComponentAutoConfiguration {
@Bean(name = "atom-component")
@ConditionalOnClass(CamelContext.class)
@ConditionalOnMissingBean(AtomComponent.class)
public AtomComponent configureAtomComponent(CamelContext camelContext)
throws Exception {
AtomComponent component = new AtomComponent();
component.setCamelContext(camelContext);
return component;
}
} | apache-2.0 |
datastax/java-driver | core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java | 1557 | /*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.dse.driver.internal.core.insights.schema;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
public class Insight<T> {
@JsonProperty("metadata")
private final InsightMetadata metadata;
@JsonProperty("data")
private final T insightData;
@JsonCreator
public Insight(@JsonProperty("metadata") InsightMetadata metadata, @JsonProperty("data") T data) {
this.metadata = metadata;
this.insightData = data;
}
public InsightMetadata getMetadata() {
return metadata;
}
public T getInsightData() {
return insightData;
}
@Override
public String toString() {
return "Insight{" + "metadata=" + metadata + ", insightData=" + insightData + '}';
}
}
| apache-2.0 |
pinterest/pinlater | src/main/java/com/pinterest/pinlater/example/PinLaterExampleWorker.java | 8243 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.pinlater.example;
import com.pinterest.pinlater.client.PinLaterClient;
import com.pinterest.pinlater.commons.config.ConfigFileServerSet;
import com.pinterest.pinlater.commons.util.BytesUtil;
import com.pinterest.pinlater.thrift.PinLaterDequeueMetadata;
import com.pinterest.pinlater.thrift.PinLaterDequeueRequest;
import com.pinterest.pinlater.thrift.PinLaterDequeueResponse;
import com.pinterest.pinlater.thrift.PinLaterJobAckInfo;
import com.pinterest.pinlater.thrift.PinLaterJobAckRequest;
import com.pinterest.pinlater.thrift.RequestContext;
import com.twitter.common.zookeeper.ServerSet;
import com.twitter.util.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.runtime.BoxedUnit;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* An example PinLater worker implementation.
*
* It uses two separate threads for dequeue and ACK, and a thread pool for executing the jobs.
* Completed jobs will be pushed into two queues depending on whether the execution succeeded or
* failed. Note that the dequeue thread can also send ACK along with the dequeue request. The
* worker also implements a linear backoff retry policy, where the retry delay is calculated with
* the number of retry allowed and remained.
*
* File-based serverset is used for service discovery. It uses a local file that stores the
* servers' [HOST_IP]:[PORT] pairs instead of talking to Zookeeper directly.
*/
public class PinLaterExampleWorker {
private static final int DEQUEUE_BATCH_SIZE = 10;
private static final int NUM_WORKER_THREADS = 10;
private static final int DEQUEUE_INTEVAL_MS = 1000;
private static final int ACK_INTEVAL_MS = 1000;
private static final int PENDING_JOB_LIMIT = 50;
private static final Logger LOG = LoggerFactory.getLogger(PinLaterExampleWorker.class);
private static final RequestContext REQUEST_CONTEXT;
static {
try {
REQUEST_CONTEXT = new RequestContext(
"pinlaterexampleworker:" + InetAddress.getLocalHost().getHostName());
} catch (UnknownHostException e) {
LOG.error("Failed to initializer PinLaterExampleWorker", e);
throw new RuntimeException(e);
}
}
// Max number of pending/running jobs. The worker will stop dequeue when reaches this limit.
private final AtomicInteger numPendingJobs = new AtomicInteger(0);
// Thread pool for executing PinLater jobs.
private final ExecutorService workerExecutor = Executors.newFixedThreadPool(NUM_WORKER_THREADS);
// Local buffer for succeeded and failed jobs, waiting for ACK
private final BlockingQueue<PinLaterJobAckInfo> succeededJobQueue =
new LinkedBlockingQueue<PinLaterJobAckInfo>();
private final BlockingQueue<PinLaterJobAckInfo> failedJobQueue =
new LinkedBlockingQueue<PinLaterJobAckInfo>();
private PinLaterClient client;
public PinLaterExampleWorker() {
String fullServerSetPath =
getClass().getResource("/" + System.getProperty("serverset_path")).getPath();
ServerSet serverSet = new ConfigFileServerSet(fullServerSetPath);
this.client = new PinLaterClient(serverSet, 10);
ScheduledExecutorService dequeueAckExecutor = Executors.newScheduledThreadPool(2);
dequeueAckExecutor.scheduleWithFixedDelay(
new DequeueThread(), 0, DEQUEUE_INTEVAL_MS, TimeUnit.MILLISECONDS);
dequeueAckExecutor.scheduleWithFixedDelay(
new AckThread(), 0, ACK_INTEVAL_MS, TimeUnit.MILLISECONDS);
}
public static void main(String[] args) {
new PinLaterExampleWorker();
}
private PinLaterJobAckRequest buildAckRequest() {
List<PinLaterJobAckInfo> succeededJobs = new ArrayList<PinLaterJobAckInfo>();
List<PinLaterJobAckInfo> failedJobs = new ArrayList<PinLaterJobAckInfo>();
succeededJobQueue.drainTo(succeededJobs);
failedJobQueue.drainTo(failedJobs);
if (succeededJobs.size() > 0 || failedJobs.size() > 0) {
LOG.info("ACK {}: {} succeeded, {} failed", PinLaterExampleJob.QUEUE_NAME,
succeededJobs.size(), failedJobs.size());
PinLaterJobAckRequest ackRequest =
new PinLaterJobAckRequest(PinLaterExampleJob.QUEUE_NAME);
ackRequest.setJobsSucceeded(succeededJobs);
ackRequest.setJobsFailed(failedJobs);
return ackRequest;
} else {
return null;
}
}
class DequeueThread implements Runnable {
@Override
public void run() {
if (numPendingJobs.get() > PENDING_JOB_LIMIT) {
return;
}
PinLaterDequeueRequest dequeueRequest =
new PinLaterDequeueRequest(PinLaterExampleJob.QUEUE_NAME, DEQUEUE_BATCH_SIZE);
// Ack completed jobs along with dequeue request
PinLaterJobAckRequest ackRequest = buildAckRequest();
if (ackRequest != null) {
dequeueRequest.setJobAckRequest(ackRequest);
}
client.getIface().dequeueJobs(REQUEST_CONTEXT, dequeueRequest).onSuccess(
new Function<PinLaterDequeueResponse, BoxedUnit>() {
@Override
public BoxedUnit apply(final PinLaterDequeueResponse response) {
LOG.info("DEQUEUE {}: {} jobs, {} jobs pending",
PinLaterExampleJob.QUEUE_NAME, response.getJobsSize(), numPendingJobs.get());
for (final Map.Entry<String, ByteBuffer> job : response.getJobs().entrySet()) {
numPendingJobs.incrementAndGet();
workerExecutor.submit(new Runnable() {
@Override
public void run() {
try {
PinLaterExampleJob.process(
new String(BytesUtil.readBytesFromByteBuffer(job.getValue())));
succeededJobQueue.add(new PinLaterJobAckInfo(job.getKey()));
} catch (Exception e) {
PinLaterJobAckInfo ackInfo = new PinLaterJobAckInfo(job.getKey());
// Append exception message to the custom status
ackInfo.setAppendCustomStatus(e.getMessage());
// Retry with linear backoff, e.g. 1s, 2s, 3s ...
PinLaterDequeueMetadata metaData =
response.getJobMetadata().get(job.getKey());
int attemptsAllowed = metaData.getAttemptsAllowed();
int attemptsRemained = metaData.getAttemptsRemaining();
ackInfo.setRetryDelayMillis(1000 * (attemptsAllowed - attemptsRemained));
failedJobQueue.add(ackInfo);
} finally {
numPendingJobs.decrementAndGet();
}
}
});
}
return BoxedUnit.UNIT;
}
}
);
}
}
class AckThread implements Runnable {
@Override
public void run() {
PinLaterJobAckRequest ackRequest = buildAckRequest();
if (ackRequest != null) {
client.getIface().ackDequeuedJobs(REQUEST_CONTEXT, ackRequest);
}
}
}
}
| apache-2.0 |
coderplay/redpoll | core/src/java/redpoll/clusterer/canopy/Canopy.java | 6785 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package redpoll.clusterer.canopy;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import redpoll.core.LabeledWritableVector;
import redpoll.core.Vector;
import redpoll.core.WritableSparseVector;
import redpoll.core.WritableVector;
import redpoll.util.DistanceMeasure;
/**
* This class models a canopy as a center point.
*/
public class Canopy {
// keys used by Driver, Mapper & Reducer
public static final String DISTANCE_MEASURE_KEY = "redpoll.clusterer.canopy.measure";
public static final String T1_KEY = "redpoll.clusterer.canopy.t1";
public static final String T2_KEY = "redpoll.clusterer.canopy.t2";
public static final String CANOPY_PATH_KEY = "redpoll.clusterer.canopy.path";
private static final Log log = LogFactory.getLog(Canopy.class.getName());
// the T1 distance threshold
private static double t1;
// the T2 distance threshold
private static double t2;
// the distance measure
private static DistanceMeasure measure;
// this canopy's canopyId
private final Text canopyId;
// the current center
private WritableVector center = new WritableSparseVector(0);
public Canopy(String idString, WritableVector point) {
super();
this.canopyId = new Text(idString);
this.center = point;
}
/**
* Create a new Canopy containing the given labeled point.
* @param point a labeled point in vector space
*/
public Canopy(LabeledWritableVector point) {
super();
this.canopyId = point.getLabel();
this.center = point;
}
/**
* Configure the Canopy and its distance measure
* @param job the JobConf for this job
*/
public static void configure(JobConf job) {
try {
final ClassLoader ccl = Thread.currentThread().getContextClassLoader();
Class<?> cl = ccl.loadClass(job.get(DISTANCE_MEASURE_KEY));
measure = (DistanceMeasure) cl.newInstance();
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InstantiationException e) {
throw new RuntimeException(e);
}
t1 = Double.parseDouble(job.get(T1_KEY));
t2 = Double.parseDouble(job.get(T2_KEY));
}
/**
* Configure the Canopy for unit tests
* @param aMeasure
* @param aT1
* @param aT2
*/
public static void config(DistanceMeasure aMeasure, double aT1, double aT2) {
measure = aMeasure;
t1 = aT1;
t2 = aT2;
}
/**
* This method is used by the CanopyMapper to perform canopy inclusion tests
* and to emit the point and its covering canopies to the output.
*
* @param point the point to be added
* @param canopies the List<Canopy> to be appended
* @param collector an OutputCollector in which to emit the point
*/
public static void emitPointToNewCanopies(LabeledWritableVector point,
List<Canopy> canopies, OutputCollector<Text, WritableVector> collector)
throws IOException {
boolean pointStronglyBound = false;
for (Canopy canopy : canopies) {
double dist = measure.distance(canopy.getCenter(), point);
pointStronglyBound = pointStronglyBound || (dist < t2);
}
if (!pointStronglyBound) {
// strong bound
Canopy newCanopy = new Canopy(point.copy());
canopies.add(newCanopy);
collector.collect(new Text("canopy"), newCanopy.getCenter());
}
}
/**
* This method is used by the ClusterMapper to perform canopy inclusion tests
* and to emit the point keyed by its covering canopies to the output. if the
* point is not covered by any canopies (due to canopy centroid clustering),
* emit the point to the closest covering canopy.
*
* @param point the point to be added
* @param canopies the List<Canopy> to be appended
* @param writable the original Writable from the input, may include arbitrary
* payload information after the point [...]<payload>
* @param collector an OutputCollector in which to emit the point
*/
public static void emitPointToExistingCanopies(Text key,
List<Canopy> canopies, WritableVector point,
OutputCollector<Text, WritableVector> collector) throws IOException {
double minDist = Double.MAX_VALUE;
Canopy closest = null;
boolean isCovered = false;
StringBuilder builder = new StringBuilder();
for (Canopy canopy : canopies) {
double dist = measure.distance(canopy.getCenter(), point);
if (dist < t1) {
isCovered = true;
builder.append(canopy.getIdentifier()).append(":");
} else if (dist < minDist) {
minDist = dist;
closest = canopy;
}
}
// if the point is not contained in any canopies (due to canopy centroid
// clustering), emit the point to the closest covering canopy.
Text label = isCovered ? new Text(builder.toString()) : closest.getCanopyId();
collector.collect(key, new LabeledWritableVector(label, point));
}
@Override
public String toString() {
return getIdentifier() + " - " + getCenter().asFormatString();
}
public String getIdentifier() {
return canopyId.toString();
}
public Text getCanopyId() {
return canopyId;
}
/**
* Return the center point
*
* @return the center of the Canopy
*/
public WritableVector getCenter() {
return center;
}
/**
* Return if the point is covered by this canopy
*
* @param point a point
* @return if the point is covered
*/
public boolean covers(Vector point) {
return measure.distance(center, point) < t1;
}
}
| apache-2.0 |
alien6u/spring-in-action-learning | src/com/springinaction/springidol/StringedInstrument.java | 462 | package com.springinaction.springidol;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.beans.factory.annotation.Qualifier;
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Qualifier
public @interface StringedInstrument {
}
| apache-2.0 |
bclozel/initializr | initializr-generator/src/test/java/io/spring/initializr/generator/ProjectGeneratorTests.java | 35659 | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.spring.initializr.generator;
import java.util.Collections;
import io.spring.initializr.metadata.BillOfMaterials;
import io.spring.initializr.metadata.Dependency;
import io.spring.initializr.metadata.InitializrMetadata;
import io.spring.initializr.test.generator.ProjectAssert;
import io.spring.initializr.test.metadata.InitializrMetadataTestBuilder;
import io.spring.initializr.util.VersionProperty;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for {@link ProjectGenerator}
*
* @author Stephane Nicoll
* @author Andy Wilkinson
*/
public class ProjectGeneratorTests extends AbstractProjectGeneratorTests {
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Test
public void defaultMavenPom() {
ProjectRequest request = createProjectRequest("web");
generateMavenPom(request).hasNoRepository().hasSpringBootStarterDependency("web");
verifyProjectSuccessfulEventFor(request);
}
@Test
public void defaultGradleBuild() {
ProjectRequest request = createProjectRequest("web");
generateGradleBuild(request).doesNotContain("import");
verifyProjectSuccessfulEventFor(request);
}
@Test
public void defaultProject() {
ProjectRequest request = createProjectRequest("web");
generateProject(request).isJavaProject().isMavenProject().pomAssert()
.hasNoRepository().hasSpringBootStarterDependency("web");
verifyProjectSuccessfulEventFor(request);
}
@Test
public void defaultProjectWithGradle() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-build");
ProjectAssert gradleProject = generateProject(request).isGradleProject();
gradleProject.gradleBuildAssert()
.contains("compile('org.springframework.boot:spring-boot-starter-web')")
.contains("testCompile('org.springframework.boot:spring-boot-starter-test')");
gradleProject.gradleSettingsAssert().hasProjectName("demo");
verifyProjectSuccessfulEventFor(request);
}
@Test
public void noDependencyAddsRootStarter() {
ProjectRequest request = createProjectRequest();
generateProject(request).isJavaProject().isMavenProject().pomAssert()
.hasSpringBootStarterRootDependency();
}
@Test
public void mavenPomWithBootSnapshot() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.0.1.BUILD-SNAPSHOT");
generateMavenPom(request).hasSnapshotRepository()
.hasSpringBootParent("1.0.1.BUILD-SNAPSHOT")
.hasSpringBootStarterDependency("web");
}
@Test
public void mavenPomWithTarDependency() {
Dependency dependency = Dependency.withId("custom-artifact", "org.foo",
"custom-artifact");
dependency.setType("tar.gz");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("test", dependency).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("custom-artifact");
generateMavenPom(request).hasDependency(dependency).hasDependenciesCount(2);
}
@Test
public void gradleBuildWithTarDependency() {
Dependency dependency = Dependency.withId("custom-artifact", "org.foo",
"custom-artifact");
dependency.setType("tar.gz");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("test", dependency).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("custom-artifact");
generateGradleBuild(request)
.contains("compile('org.foo:custom-artifact@tar.gz')");
}
@Test
public void mavenPomWithWebFacet() {
Dependency dependency = Dependency.withId("thymeleaf", "org.foo", "thymeleaf");
dependency.getFacets().add("web");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("test", dependency).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("thymeleaf");
generateMavenPom(request).hasDependency("org.foo", "thymeleaf")
.hasDependenciesCount(2);
}
@Test
public void mavenWarWithWebFacet() {
Dependency dependency = Dependency.withId("thymeleaf", "org.foo", "thymeleaf");
dependency.getFacets().add("web");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("test", dependency).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("thymeleaf");
request.setPackaging("war");
generateProject(request).isJavaWarProject().isMavenProject().pomAssert()
.hasSpringBootStarterTomcat()
// This is tagged as web facet so it brings the web one
.hasDependency("org.foo", "thymeleaf").hasSpringBootStarterTest()
.hasDependenciesCount(3);
}
@Test
public void mavenWarPomWithoutWebFacet() {
ProjectRequest request = createProjectRequest("data-jpa");
request.setPackaging("war");
generateMavenPom(request).hasSpringBootStarterTomcat()
.hasSpringBootStarterDependency("data-jpa")
.hasSpringBootStarterDependency("web") // Added by war packaging
.hasSpringBootStarterTest().hasDependenciesCount(4);
}
@Test
public void gradleWarWithWebFacet() {
Dependency dependency = Dependency.withId("thymeleaf", "org.foo", "thymeleaf");
dependency.getFacets().add("web");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("test", dependency).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("thymeleaf");
request.setPackaging("war");
request.setType("gradle-project");
generateProject(request).isJavaWarProject().isGradleProject().gradleBuildAssert()
// This is tagged as web facet so it brings the web one
.contains("apply plugin: 'war'")
.contains("compile('org.foo:thymeleaf')")
.doesNotContain(
"compile('org.springframework.boot:spring-boot-starter-web')")
.contains(
"testCompile('org.springframework.boot:spring-boot-starter-test')")
.contains("configurations {") // declare providedRuntime config
.contains("providedRuntime").contains(
"providedRuntime('org.springframework.boot:spring-boot-starter-tomcat')");
}
@Test
public void gradleWarPomWithoutWebFacet() {
ProjectRequest request = createProjectRequest("data-jpa");
request.setPackaging("war");
generateGradleBuild(request)
.contains(
"compile('org.springframework.boot:spring-boot-starter-data-jpa')")
// Added by warpackaging
.contains("compile('org.springframework.boot:spring-boot-starter-web')")
.contains(
"testCompile('org.springframework.boot:spring-boot-starter-test')")
.contains("configurations {") // declare providedRuntime config
.contains("providedRuntime").contains(
"providedRuntime('org.springframework.boot:spring-boot-starter-tomcat')");
}
@Test
public void groupIdAndArtifactIdInferPackageName() {
ProjectRequest request = createProjectRequest("web");
request.setGroupId("org.acme");
request.setArtifactId("42foo");
generateProject(request)
.isJavaProject("org/acme/foo", "DemoApplication");
}
@Test
public void cleanPackageNameWithGroupIdAndArtifactIdWithVersion() {
ProjectRequest request = createProjectRequest("web");
request.setGroupId("org.acme");
request.setArtifactId("foo-1.4.5");
assertProjectWithPackageNameWithVersion(request);
}
@Test
public void cleanPackageNameWithInvalidPackageName() {
ProjectRequest request = createProjectRequest("web");
request.setGroupId("org.acme");
request.setArtifactId("foo");
request.setPackageName("org.acme.foo-1.4.5");
assertProjectWithPackageNameWithVersion(request);
}
private void assertProjectWithPackageNameWithVersion(ProjectRequest request) {
generateProject(request)
.isJavaProject("org/acme/foo145", "DemoApplication")
.sourceCodeAssert(
"src/main/java/org/acme/foo145/DemoApplication.java")
.contains("package org.acme.foo145;");
}
@Test
public void gradleProjectWithCustomArtifactId() {
ProjectRequest request = createProjectRequest();
request.setType("gradle-build");
request.setArtifactId("my-application");
generateProject(request).isGradleProject().gradleSettingsAssert()
.hasProjectName("my-application");
verifyProjectSuccessfulEventFor(request);
}
@Test
public void springBoot11UseEnableAutoConfigurationJava() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.1.9.RELEASE");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/java/foo/MyDemoApplication.java")
.hasImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.doesNotHaveImports(SpringBootApplication.class.getName())
.contains("@EnableAutoConfiguration", "@Configuration", "@ComponentScan")
.doesNotContain("@SpringBootApplication");
}
@Test
public void springBootUseSpringBootApplicationJava() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.2.0.RC1");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/java/foo/MyDemoApplication.java")
.hasImports(SpringBootApplication.class.getName())
.doesNotHaveImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.contains("@SpringBootApplication").doesNotContain(
"@EnableAutoConfiguration", "@Configuration", "@ComponentScan");
}
@Test
public void springBoot11UseEnableAutoConfigurationGroovy() {
ProjectRequest request = createProjectRequest("web");
request.setLanguage("groovy");
request.setBootVersion("1.1.9.RELEASE");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/groovy/foo/MyDemoApplication.groovy")
.hasImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.doesNotHaveImports(SpringBootApplication.class.getName())
.contains("@EnableAutoConfiguration", "@Configuration", "@ComponentScan")
.doesNotContain("@SpringBootApplication");
}
@Test
public void springBootUseSpringBootApplicationGroovy() {
ProjectRequest request = createProjectRequest("web");
request.setLanguage("groovy");
request.setBootVersion("1.2.0.RC1");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/groovy/foo/MyDemoApplication.groovy")
.hasImports(SpringBootApplication.class.getName())
.doesNotHaveImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.contains("@SpringBootApplication").doesNotContain(
"@EnableAutoConfiguration", "@Configuration", "@ComponentScan");
}
@Test
public void springBoot11UseEnableAutoConfigurationKotlin() {
ProjectRequest request = createProjectRequest("web");
request.setLanguage("kotlin");
request.setBootVersion("1.1.9.RELEASE");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/kotlin/foo/MyDemoApplication.kt")
.hasImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.doesNotHaveImports(SpringBootApplication.class.getName())
.contains("@EnableAutoConfiguration", "@Configuration", "@ComponentScan")
.doesNotContain("@SpringBootApplication");
}
@Test
public void springBootUseSpringBootApplicationKotlin() {
ProjectRequest request = createProjectRequest("web");
request.setLanguage("kotlin");
request.setBootVersion("1.2.0.RC1");
request.setName("MyDemo");
request.setPackageName("foo");
generateProject(request)
.sourceCodeAssert("src/main/kotlin/foo/MyDemoApplication.kt")
.hasImports(SpringBootApplication.class.getName())
.doesNotHaveImports(EnableAutoConfiguration.class.getName(),
ComponentScan.class.getName(), Configuration.class.getName())
.contains("@SpringBootApplication").doesNotContain(
"@EnableAutoConfiguration", "@Configuration", "@ComponentScan");
}
@Test
public void springBootUseGradle2() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
generateProject(request).isGradleProject("2.13");
}
@Test
public void springBoot15UseGradle3() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
request.setBootVersion("1.5.0.RELEASE");
generateProject(request).isGradleProject("3.5.1");
}
@Test
public void springBoot20M3UseGradle3() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
request.setBootVersion("2.0.0.M3");
generateProject(request).isGradleProject("3.5.1");
}
@Test
public void springBoot20M4UsesGradle4() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
request.setBootVersion("2.0.0.M4");
generateProject(request).isGradleProject("4.5.1");
}
@Test
public void springBoot20SnapshotsUseGradle4() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
request.setBootVersion("2.0.0.BUILD-SNAPSHOT");
generateProject(request).isGradleProject("4.5.1");
}
@Test
public void customBaseDirectory() {
ProjectRequest request = createProjectRequest();
request.setBaseDir("my-project");
generateProject(request).hasBaseDir("my-project").isJavaProject()
.isMavenProject();
}
@Test
public void customBaseDirectoryNested() {
ProjectRequest request = createProjectRequest();
request.setBaseDir("foo-bar/my-project");
generateProject(request).hasBaseDir("foo-bar/my-project").isJavaProject()
.isMavenProject();
}
@Test
public void groovyWithMavenUsesGroovyDir() {
ProjectRequest request = createProjectRequest("web");
request.setType("maven-project");
request.setLanguage("groovy");
generateProject(request).isMavenProject().isGroovyProject();
}
@Test
public void groovyWithGradleUsesGroovyDir() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-project");
request.setLanguage("groovy");
generateProject(request).isGradleProject().isGroovyProject();
}
@Test
public void mavenPomWithCustomVersion() {
Dependency whatever = Dependency.withId("whatever", "org.acme", "whatever",
"1.2.3");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("foo", whatever).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("whatever", "data-jpa", "web");
generateMavenPom(request).hasDependency(whatever)
.hasSpringBootStarterDependency("data-jpa")
.hasSpringBootStarterDependency("web");
}
@Test
public void defaultMavenPomHasSpringBootParent() {
ProjectRequest request = createProjectRequest("web");
generateMavenPom(request).hasSpringBootParent(request.getBootVersion());
}
@Test
public void mavenPomWithCustomParentPom() {
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.setMavenParent("com.foo", "foo-parent", "1.0.0-SNAPSHOT", false).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("web");
generateMavenPom(request).hasParent("com.foo", "foo-parent", "1.0.0-SNAPSHOT")
.hasBomsCount(0);
}
@Test
public void mavenPomWithCustomParentPomAndSpringBootBom() {
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.setMavenParent("com.foo", "foo-parent", "1.0.0-SNAPSHOT", true).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.0.2.RELEASE");
generateMavenPom(request).hasParent("com.foo", "foo-parent", "1.0.0-SNAPSHOT")
.hasProperty("spring-boot.version", "1.0.2.RELEASE")
.hasBom("org.springframework.boot", "spring-boot-dependencies",
"${spring-boot.version}")
.hasBomsCount(1);
}
@Test
public void gradleBuildWithCustomParentPomAndSpringBootBom() {
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.setMavenParent("com.foo", "foo-parent", "1.0.0-SNAPSHOT", true).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.0.2.RELEASE");
generateGradleBuild(request)
.doesNotContain("ext['spring-boot.version'] = '1.0.2.RELEASE'")
.doesNotContain(
"mavenBom \"org.springframework.boot:spring-boot-dependencies:1.0.2.RELEASE\"");
}
@Test
public void gradleBuildWithBootSnapshot() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.0.1.BUILD-SNAPSHOT");
generateGradleBuild(request).hasSnapshotRepository();
}
@Test
public void gradleBuildWithCustomVersion() {
Dependency whatever = Dependency.withId("whatever", "org.acme", "whatever",
"1.2.3");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("foo", whatever).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("whatever", "data-jpa", "web");
generateGradleBuild(request)
.contains("compile('org.springframework.boot:spring-boot-starter-web')")
.contains(
"compile('org.springframework.boot:spring-boot-starter-data-jpa')")
.contains("compile('org.acme:whatever:1.2.3')");
}
@Test
public void mavenPomWithCustomScope() {
Dependency h2 = Dependency.withId("h2", "org.h2", "h2");
h2.setScope("runtime");
Dependency hamcrest = Dependency.withId("hamcrest", "org.hamcrest", "hamcrest");
hamcrest.setScope("test");
Dependency servlet = Dependency.withId("servlet-api", "javax.servlet",
"servlet-api");
servlet.setScope("provided");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("database", h2)
.addDependencyGroup("container", servlet)
.addDependencyGroup("test", hamcrest).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("hamcrest", "h2", "servlet-api",
"data-jpa", "web");
generateMavenPom(request).hasDependency(h2).hasDependency(hamcrest)
.hasDependency(servlet).hasSpringBootStarterDependency("data-jpa")
.hasSpringBootStarterDependency("web");
}
@Test
public void gradleBuildWithCustomScope() {
Dependency h2 = Dependency.withId("h2", "org.h2", "h2");
h2.setScope("runtime");
Dependency hamcrest = Dependency.withId("hamcrest", "org.hamcrest", "hamcrest");
hamcrest.setScope("test");
Dependency servlet = Dependency.withId("servlet-api", "javax.servlet",
"servlet-api");
servlet.setScope("provided");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("database", h2)
.addDependencyGroup("container", servlet)
.addDependencyGroup("test", hamcrest).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("hamcrest", "h2", "servlet-api",
"data-jpa", "web");
generateGradleBuild(request)
.contains("compile('org.springframework.boot:spring-boot-starter-web')")
.contains(
"compile('org.springframework.boot:spring-boot-starter-data-jpa')")
// declare providedRuntime config
.contains("runtime('org.h2:h2')").contains("configurations {")
.contains("providedRuntime")
.contains("providedRuntime('javax.servlet:servlet-api')")
.contains("testCompile('org.hamcrest:hamcrest')");
}
@Test
public void gradleBuildBeforeWithSpringBoot13() {
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "jpa").setGradleEnv("0.5.9.RELEASE")
.build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.2.3.RELEASE");
generateGradleBuild(request).contains("springBootVersion = '1.2.3.RELEASE'")
.contains(
"classpath('io.spring.gradle:dependency-management-plugin:0.5.9.RELEASE')")
.contains("apply plugin: 'spring-boot'")
.contains("apply plugin: 'io.spring.dependency-management'");
}
@Test
public void gradleBuildAsFromSpringBoot13() {
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "jpa").setGradleEnv("0.5.9.RELEASE")
.build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.3.0.BUILD-SNAPSHOT");
generateGradleBuild(request)
.contains("springBootVersion = '1.3.0.BUILD-SNAPSHOT'")
.contains("apply plugin: 'spring-boot'")
.doesNotContain(
"classpath('io.spring.gradle:dependency-management-plugin:0.5.9.RELEASE')")
.doesNotContain("apply plugin: 'io.spring.dependency-management'");
}
@Test
public void gradleBuildAsFromSpringBoot142() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("1.4.2.BUILD-SNAPSHOT");
generateGradleBuild(request)
.contains("springBootVersion = '1.4.2.BUILD-SNAPSHOT'")
.contains("apply plugin: 'org.springframework.boot'")
.doesNotContain("apply plugin: 'spring-boot'");
}
@Test
public void gradleBuildAsFromSpringBoot20() {
ProjectRequest request = createProjectRequest("web");
request.setBootVersion("2.0.0.BUILD-SNAPSHOT");
generateGradleBuild(request)
.contains("springBootVersion = '2.0.0.BUILD-SNAPSHOT'")
.contains("apply plugin: 'org.springframework.boot'")
.doesNotContain("apply plugin: 'spring-boot'")
.contains("apply plugin: 'io.spring.dependency-management'");
}
@Test
public void mavenBom() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setBom("foo-bom");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo)
.addBom("foo-bom", "org.acme", "foo-bom", "1.2.3").build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateMavenPom(request).hasDependency(foo).hasBom("org.acme", "foo-bom",
"1.2.3");
}
@Test
public void mavenBomWithSeveralDependenciesOnSameBom() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setBom("the-bom");
Dependency bar = Dependency.withId("bar", "org.acme", "bar");
bar.setBom("the-bom");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("group", foo, bar)
.addBom("the-bom", "org.acme", "the-bom", "1.2.3").build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo", "bar");
generateMavenPom(request).hasDependency(foo)
.hasBom("org.acme", "the-bom", "1.2.3").hasBomsCount(1);
}
@Test
public void mavenBomWithVersionMapping() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setBom("the-bom");
BillOfMaterials bom = BillOfMaterials.create("org.acme", "foo-bom");
bom.getMappings()
.add(BillOfMaterials.Mapping.create("[1.2.0.RELEASE,1.3.0.M1)", "1.0.0"));
bom.getMappings().add(BillOfMaterials.Mapping.create("1.3.0.M1", "1.2.0"));
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo).addBom("the-bom", bom).build();
applyMetadata(metadata);
// First version
ProjectRequest request = createProjectRequest("foo");
request.setBootVersion("1.2.5.RELEASE");
generateMavenPom(request).hasDependency(foo).hasSpringBootParent("1.2.5.RELEASE")
.hasBom("org.acme", "foo-bom", "1.0.0");
// Second version
ProjectRequest request2 = createProjectRequest("foo");
request2.setBootVersion("1.3.0.M1");
generateMavenPom(request2).hasDependency(foo).hasSpringBootParent("1.3.0.M1")
.hasBom("org.acme", "foo-bom", "1.2.0");
}
@Test
public void mavenBomWithVersionMappingAndExtraRepositories() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setBom("the-bom");
BillOfMaterials bom = BillOfMaterials.create("org.acme", "foo-bom");
bom.getRepositories().add("foo-repo");
bom.getMappings()
.add(BillOfMaterials.Mapping.create("[1.2.0.RELEASE,1.3.0.M1)", "1.0.0"));
bom.getMappings().add(BillOfMaterials.Mapping.create("1.3.0.M1", "1.2.0",
"foo-repo", "bar-repo"));
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo).addBom("the-bom", bom)
.addRepository("foo-repo", "repo", "http://example.com/foo", true)
.addRepository("bar-repo", "repo", "http://example.com/bar", false)
.build();
applyMetadata(metadata);
// Second version
ProjectRequest request = createProjectRequest("foo");
request.setBootVersion("1.3.0.RELEASE");
generateMavenPom(request).hasDependency(foo).hasSpringBootParent("1.3.0.RELEASE")
.hasBom("org.acme", "foo-bom", "1.2.0")
.hasRepository("foo-repo", "repo", "http://example.com/foo", true)
.hasRepository("bar-repo", "repo", "http://example.com/bar", false)
.hasRepositoriesCount(2);
}
@Test
public void gradleBom() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setBom("foo-bom");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo)
.addBom("foo-bom", "org.acme", "foo-bom", "1.2.3").build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateGradleBuild(request).contains("dependencyManagement {")
.contains("imports {").contains("mavenBom \"org.acme:foo-bom:1.2.3\"");
}
@Test
public void mavenRepository() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setRepository("foo-repo");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo)
.addRepository("foo-repo", "foo", "http://example.com/repo", false)
.build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateMavenPom(request).hasDependency(foo).hasRepository("foo-repo", "foo",
"http://example.com/repo", false);
}
@Test
public void mavenRepositoryWithSeveralDependenciesOnSameRepository() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setRepository("the-repo");
Dependency bar = Dependency.withId("bar", "org.acme", "bar");
foo.setRepository("the-repo");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("group", foo, bar)
.addRepository("the-repo", "repo", "http://example.com/repo", true)
.build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo", "bar");
generateMavenPom(request).hasDependency(foo)
.hasRepository("the-repo", "repo", "http://example.com/repo", true)
.hasRepositoriesCount(1);
}
@Test
public void gradleRepository() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.setRepository("foo-repo");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo)
.addRepository("foo-repo", "foo", "http://example.com/repo", false)
.build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateGradleBuild(request).hasRepository("http://example.com/repo");
}
@Test
public void projectWithOnlyStarterDependency() {
Dependency foo = Dependency.withId("foo", "org.foo", "custom-my-starter");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateMavenPom(request).hasDependency("org.foo", "custom-my-starter")
.hasSpringBootStarterTest().hasDependenciesCount(2);
}
@Test
public void projectWithOnlyNonStarterDependency() {
Dependency foo = Dependency.withId("foo", "org.foo", "foo");
foo.setStarter(false);
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("foo");
generateMavenPom(request).hasDependency("org.foo", "foo")
.hasSpringBootStarterRootDependency().hasSpringBootStarterTest()
.hasDependenciesCount(3);
}
@Test
public void buildPropertiesMaven() {
ProjectRequest request = createProjectRequest("web");
request.getBuildProperties().getMaven().put("name", () -> "test");
request.getBuildProperties().getVersions().put(
new VersionProperty("foo.version"), () -> "1.2.3");
request.getBuildProperties().getGradle().put("ignore.property", () -> "yes");
generateMavenPom(request).hasProperty("name", "test")
.hasProperty("foo.version", "1.2.3").hasNoProperty("ignore.property");
}
@Test
public void buildPropertiesGradle() {
ProjectRequest request = createProjectRequest("web");
request.getBuildProperties().getGradle().put("name", () -> "test");
request.getBuildProperties().getVersions().put(
new VersionProperty("foo.version"), () -> "1.2.3");
request.getBuildProperties().getMaven().put("ignore.property", () -> "yes");
generateGradleBuild(request).contains("name = 'test'")
.contains("ext {")
.contains("fooVersion = '1.2.3'")
.doesNotContain("ignore.property");
}
@Test
public void versionRangeWithPostProcessor() {
Dependency foo = Dependency.withId("foo", "org.acme", "foo");
foo.getMappings().add(Dependency.Mapping.create("[1.2.0.RELEASE,1.3.0.M1)", null,
null, "1.0.0"));
foo.getMappings().add(Dependency.Mapping.create("1.3.0.M1", null, null, "1.2.0"));
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("foo", foo).build();
applyMetadata(metadata);
// First without processor, get the correct version
ProjectRequest request = createProjectRequest("foo");
request.setBootVersion("1.2.5.RELEASE");
generateMavenPom(request)
.hasDependency(Dependency.withId("foo", "org.acme", "foo", "1.0.0"));
// First after processor that flips Spring Boot version
projectGenerator.setRequestResolver(new ProjectRequestResolver(
Collections.singletonList(new ProjectRequestPostProcessor() {
@Override
public void postProcessBeforeResolution(ProjectRequest r,
InitializrMetadata m) {
r.setBootVersion("1.3.0.M2");
}
})));
generateMavenPom(request)
.hasDependency(Dependency.withId("foo", "org.acme", "foo", "1.2.0"));
}
@Test
public void gitIgnoreMaven() {
ProjectRequest request = createProjectRequest();
request.setType("maven-project");
ProjectAssert project = generateProject(request);
project.sourceCodeAssert(".gitignore")
.equalsTo(new ClassPathResource("project/maven/gitignore.gen"));
}
@Test
public void gitIgnoreGradle() {
ProjectRequest request = createProjectRequest();
request.setType("gradle-project");
ProjectAssert project = generateProject(request);
project.sourceCodeAssert(".gitignore")
.equalsTo(new ClassPathResource("project/gradle/gitignore.gen"));
}
@Test
public void dependencyOrderSpringBootTakesPrecedence() {
Dependency depOne = Dependency.withId("one", "org.acme", "first", "1.2.3");
Dependency depTwo = Dependency.withId("two", "com.example", "second", "1.2.3");
InitializrMetadata metadata = InitializrMetadataTestBuilder.withDefaults()
.addDependencyGroup("core", "web", "security", "data-jpa")
.addDependencyGroup("sample", depOne, depTwo).build();
applyMetadata(metadata);
ProjectRequest request = createProjectRequest("one", "web", "two", "data-jpa");
assertThat(generateGradleBuild(request).getGradleBuild())
.containsSubsequence(
"compile('org.springframework.boot:spring-boot-starter-data-jpa')",
"compile('org.springframework.boot:spring-boot-starter-web')",
"compile('com.example:second:1.2.3')",
"compile('org.acme:first:1.2.3')");
}
@Test
public void invalidProjectTypeMavenPom() {
ProjectRequest request = createProjectRequest("web");
request.setType("gradle-build");
this.thrown.expect(InvalidProjectRequestException.class);
this.thrown.expectMessage("gradle-build");
projectGenerator.generateMavenPom(request);
}
@Test
public void invalidProjectTypeGradleBuild() {
ProjectRequest request = createProjectRequest("web");
request.setType("maven-build");
this.thrown.expect(InvalidProjectRequestException.class);
this.thrown.expectMessage("maven-build");
projectGenerator.generateGradleBuild(request);
}
@Test
public void invalidDependency() {
ProjectRequest request = createProjectRequest("foo-bar");
try {
generateMavenPom(request);
fail("Should have failed to generate project");
}
catch (InvalidProjectRequestException ex) {
assertThat(ex.getMessage()).contains("foo-bar");
verifyProjectFailedEventFor(request, ex);
}
}
@Test
public void invalidType() {
ProjectRequest request = createProjectRequest("web");
request.setType("foo-bar");
try {
generateProject(request);
fail("Should have failed to generate project");
}
catch (InvalidProjectRequestException ex) {
assertThat(ex.getMessage()).contains("foo-bar");
verifyProjectFailedEventFor(request, ex);
}
}
@Test
public void invalidPackaging() {
ProjectRequest request = createProjectRequest("web");
request.setPackaging("foo-bar");
try {
generateGradleBuild(request);
fail("Should have failed to generate project");
}
catch (InvalidProjectRequestException ex) {
assertThat(ex.getMessage()).contains("foo-bar");
verifyProjectFailedEventFor(request, ex);
}
}
@Test
public void invalidLanguage() {
ProjectRequest request = createProjectRequest("web");
request.setLanguage("foo-bar");
try {
generateProject(request);
fail("Should have failed to generate project");
}
catch (InvalidProjectRequestException ex) {
assertThat(ex.getMessage()).contains("foo-bar");
verifyProjectFailedEventFor(request, ex);
}
}
}
| apache-2.0 |
somnusdear/coolweather | app/src/main/java/com/somnusdear/coolweather/gson/Basic.java | 402 | package com.somnusdear.coolweather.gson;
import com.google.gson.annotations.SerializedName;
/**
* Created by Administrator on 2017/9/4.
*/
public class Basic {
@SerializedName("city")
public String cityName;
@SerializedName("id")
public String weatherId;
public Update update;
public class Update{
@SerializedName("loc")
public String updateTime;
}
}
| apache-2.0 |
kuvic16/CostAnalyzer | src/com/vagabondlab/costanalyzer/MonthlyReportActivity.java | 11524 | package com.vagabondlab.costanalyzer;
import java.sql.SQLException;
import java.util.List;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import android.annotation.SuppressLint;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.widget.DrawerLayout;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
import com.daimajia.androidanimations.library.Techniques;
import com.daimajia.androidanimations.library.YoYo;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.Animator.AnimatorListener;
import com.vagabondlab.costanalyzer.database.service.CostService;
import com.vagabondlab.costanalyzer.utilities.DatePickerFragment;
import com.vagabondlab.costanalyzer.utilities.IConstant;
import com.vagabondlab.costanalyzer.utilities.IUtil;
import com.vagabondlab.costanalyzer.utilities.ViewUtil;
@SuppressLint({ "ClickableViewAccessibility", "DefaultLocale" })
public class MonthlyReportActivity extends CActivity{
private NavigationDrawerFragment mNavigationDrawerFragment;
private CostService costService;
private TextView mCategoryWiseCostStatus;
private TextView mDayWiseCostStatus;
private GestureDetector mGestureDetector;
private RelativeLayout mRLShortSummary;
private DateTime mCurrentDate;
private ProgressDialog mProgressDialog = null;
private ScrollView mSvTable;
private TextView mSummaryStatusView;
private TextView mTotalCostView;
private TextView mProductiveCostView;
private TextView mWastageCostView;
private Double productiveCost = 0.0;
private Double wastageCost = 0.0;
private Double totalCost = 0.0;
private DateTimeFormatter dateFormatter = DateTimeFormat.forPattern(IUtil.DATE_FORMAT_YYYY_MM_DD);
private int action = 0;
@SuppressWarnings("deprecation")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_monthly_report);
setTitle(getString(R.string.title_monthly_report));
mTitle = getTitle();
mNavigationDrawerFragment = (NavigationDrawerFragment) getSupportFragmentManager().findFragmentById(R.id.navigation_drawer);
mNavigationDrawerFragment.setUp(R.id.navigation_drawer,(DrawerLayout) findViewById(R.id.drawer_layout));
try {
costService = new CostService(getHelper().getCostDao());
mGestureDetector = new GestureDetector(this);
mCategoryWiseCostStatus = (TextView)findViewById(R.id.textView_category_wise_cost_status);
mDayWiseCostStatus = (TextView)findViewById(R.id.textView_day_wise_cost_status);
mRLShortSummary = (RelativeLayout)findViewById(R.id.relative_layout_summary);
mRLShortSummary.setOnTouchListener(shortSummarySwipeListener);
mSvTable = (ScrollView)findViewById(R.id.scroll_view_table);
mSvTable.setOnTouchListener(shortSummarySwipeListener);
mSummaryStatusView = (TextView)findViewById(R.id.textView_summary_status);
mTotalCostView = (TextView)findViewById(R.id.textView_summary_total_cost);
mTotalCostView.setOnTouchListener(shortSummarySwipeListener);
mProductiveCostView = (TextView)findViewById(R.id.textView_summary_effective_cost);
mProductiveCostView.setOnTouchListener(shortSummarySwipeListener);
mWastageCostView = (TextView)findViewById(R.id.textView_summary_wastage_cost);
mWastageCostView.setOnTouchListener(shortSummarySwipeListener);
DateTime date = new DateTime();
loadCostList(date);
//google analytics
((CostAnalyzer) getApplication()).getTracker(CostAnalyzer.TrackerName.APP_TRACKER);
} catch (SQLException e) {
e.printStackTrace();
}
}
private void loadCostList(DateTime date){
try {
mCurrentDate = date;
String startDate = dateFormatter.print(date.dayOfMonth().withMinimumValue());
String endDate = dateFormatter.print(date.dayOfMonth().withMaximumValue());
loadQuickView(startDate, endDate);
List<String[]> costList = costService.getTotalCostGroupByCategory(startDate, endDate);
loadCategoryWiseCostListViewUI(costList);
costList = costService.getCostListGroupByDate(startDate, endDate, getString(R.string.productive), getString(R.string.wastage));
loadDayWiseCostListViewUI(costList);
} catch (Exception ex) {
ViewUtil.showMessage(getApplicationContext(), getString(R.string.error, ex));
}
}
private void loadQuickView(String startDate, String endDate){
try{
productiveCost = 0.0;
wastageCost = 0.0;
totalCost = 0.0;
List<String[]> costListGroupByType = costService.getTotalCostGroupByType(startDate, endDate);
if(IUtil.isNotBlank(costListGroupByType)){
for(String[] costs : costListGroupByType){
try{
if(costs[0].equalsIgnoreCase(getString(R.string.productive))){
productiveCost = Double.valueOf(costs[1]);
}else if(costs[0].equalsIgnoreCase(getString(R.string.wastage))){
wastageCost = Double.valueOf(costs[1]);
}
}catch(Throwable t){
t.printStackTrace();
}
}
}
totalCost = productiveCost + wastageCost;
mTotalCostView.setText(String.valueOf(totalCost.intValue()));
mProductiveCostView.setText(String.valueOf(productiveCost.intValue()));
mWastageCostView.setText(String.valueOf(wastageCost.intValue()));
mSummaryStatusView.setText(IUtil.theMonth(mCurrentDate.getMonthOfYear()) + " " + mCurrentDate.getYear());
}catch(Throwable t){
t.printStackTrace();
}
}
private void loadCategoryWiseCostListViewUI(List<String[]> costList) {
try {
TableLayout table = (TableLayout)findViewById(R.id.categoryWiseCostTable);
table.removeAllViews();
table.addView(ViewUtil.getCategoryWiseCostTableHeader(this));
for(String[] cost : costList){
TableRow tr = new TableRow(this);
tr.addView(ViewUtil.getTableColumn(this, cost[0] + "(" + cost[1] + ")", Gravity.LEFT));
// tr.addView(ViewUtil.getTableColumn(this, cost[2], Gravity.CENTER));
Double ccost = Double.valueOf(cost[2]);
tr.addView(ViewUtil.getTableColumn(this, String.format("%.1f", ccost), Gravity.CENTER));
Double costPercantage = 0.0;
if (totalCost != 0 && ccost != 0) {
costPercantage = (ccost * 100)/ totalCost;
}
tr.addView(ViewUtil.getTableColumn(this, String.format("%.1f", costPercantage) + "%", Gravity.CENTER));
table.addView(tr);
table.addView(ViewUtil.getDividerView(getApplicationContext()));
}
mCategoryWiseCostStatus.setText(getString(R.string.category_wise_cost_status, costList.size()));
} catch (Exception ex) {
ViewUtil.showMessage(getApplicationContext(), getString(R.string.error, ex));
}
}
private void loadDayWiseCostListViewUI(List<String[]> costList) {
try {
TableLayout table = (TableLayout)findViewById(R.id.dayWiseCostTable);
table.removeAllViews();
table.addView(ViewUtil.getWeekDayTableHeader(this));
for(String[] cost : costList){
TableRow tr = new TableRow(this);
//String day = IUtil.changeDateFormat(cost[0], IUtil.DATE_FORMAT_YYYY_MM_DD, "E");
tr.addView(ViewUtil.getTableColumn(this, cost[0], Gravity.LEFT));
tr.addView(ViewUtil.getTableColumn(this, String.format("%.1f", Double.valueOf(cost[3])), Gravity.CENTER));
tr.addView(ViewUtil.getTableColumn(this, String.format("%.1f", Double.valueOf(cost[1])), Gravity.CENTER));
tr.addView(ViewUtil.getTableColumn(this, String.format("%.1f", Double.valueOf(cost[2])), Gravity.CENTER));
table.addView(tr);
table.addView(ViewUtil.getDividerView(getApplicationContext()));
}
mDayWiseCostStatus.setText(getString(R.string.day_wise_cost_status, costList.size()));
} catch (Exception ex) {
ViewUtil.showMessage(getApplicationContext(), getString(R.string.error, ex));
}
}
// 1. Listener
OnTouchListener shortSummarySwipeListener = new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (mGestureDetector.onTouchEvent(event)) {
return false;
} else {
return false;
}
}
};
AnimatorListener animatorListener = new AnimatorListener() {
@Override
public void onAnimationStart(Animator arg0) {
mProgressDialog = ProgressDialog.show(MonthlyReportActivity.this, "Please wait ...", "Loading...", true);
mProgressDialog.setCancelable(true);
}
@Override
public void onAnimationRepeat(Animator arg0) {
}
@Override
public void onAnimationEnd(Animator arg0) {
if(mProgressDialog != null){
mProgressDialog.dismiss();
}
}
@Override
public void onAnimationCancel(Animator arg0) {
if(mProgressDialog != null){
mProgressDialog.dismiss();
}
}
};
// 2. override
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (!mNavigationDrawerFragment.isDrawerOpen()) {
getMenuInflater().inflate(R.menu.report, menu);
restoreActionBar();
return true;
}
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.search) {
DialogFragment newFragment = new DatePickerFragment();
newFragment.show(getSupportFragmentManager(), "datePicker");
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode==IConstant.PARENT_ACTIVITY_REQUEST_CODE){
firstTime = true;
onNavigationDrawerItemSelected(0);
}
}
@Override
public void nextView(){
YoYo.with(Techniques.SlideInRight)
.duration(500)
.interpolate(new AccelerateDecelerateInterpolator())
.withListener(animatorListener)
.playOn(findViewById(R.id.relative_layout_root));
DateTime prevDate = mCurrentDate.plusMonths(-1);
loadCostList(prevDate);
}
@Override
public void prevView(){
YoYo.with(Techniques.SlideInLeft)
.duration(500)
.interpolate(new AccelerateDecelerateInterpolator())
.withListener(animatorListener)
.playOn(findViewById(R.id.relative_layout_root));
DateTime nextDate = mCurrentDate.plusMonths(1);
loadCostList(nextDate);
}
@Override
public void returnDate(String date) {
action = IConstant.ACTION_SEARCH;
closeProgressDialog();
YoYo.with(Techniques.SlideInDown)
.duration(500)
.interpolate(new AccelerateDecelerateInterpolator())
.withListener(animatorListener)
.playOn(findViewById(R.id.relative_layout_root));
loadCostList(dateFormatter.parseDateTime(date));
}
@Override
public ListView getListView() {
return null;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK){
if(action == IConstant.ACTION_SEARCH){
showProgressDialog();
DateTime date = new DateTime();
loadCostList(date);
action = IConstant.ACTION_NONE;
closeProgressDialog();
}else{
Intent i = new Intent(getApplicationContext(),HomeActivity.class);
startActivity(i);
}
return true;
}
return super.onKeyDown(keyCode, event);
}
}
| apache-2.0 |
gilesp/taskhelper | hakken-server/src/main/java/uk/co/vurt/hakken/security/auth/AllowAlwaysAuthenticator.java | 407 | package uk.co.vurt.hakken.security.auth;
/**
* Test authenticator that does not rely on any external infrastructure
*
* User will always be authenticated, regardless of credentials passed.
*
*/
public class AllowAlwaysAuthenticator implements Authenticator {
public boolean authenticate(String username, String password) {
return true;
}
public String getErrorMessage() {
return null;
}
}
| apache-2.0 |
Rimbit/rimbitj | core/src/main/java/com/rimbit/rimbit/crypto/TransactionSignature.java | 7938 | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rimbit.rimbit.crypto;
import com.rimbit.rimbit.core.ECKey;
import com.rimbit.rimbit.core.Transaction;
import com.rimbit.rimbit.core.VerificationException;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
/**
* A TransactionSignature wraps an {@link com.rimbit.rimbit.core.ECKey.ECDSASignature} and adds methods for handling
* the additional SIGHASH mode byte that is used.
*/
public class TransactionSignature extends ECKey.ECDSASignature {
/**
* A byte that controls which parts of a transaction are signed. This is exposed because signatures
* parsed off the wire may have sighash flags that aren't "normal" serializations of the enum values.
* Because Satoshi's code works via bit testing, we must not lose the exact value when round-tripping
* otherwise we'll fail to verify signature hashes.
*/
public int sighashFlags = Transaction.SigHash.ALL.ordinal() + 1;
/** Constructs a signature with the given components and SIGHASH_ALL. */
public TransactionSignature(BigInteger r, BigInteger s) {
super(r, s);
}
/** Constructs a transaction signature based on the ECDSA signature. */
public TransactionSignature(ECKey.ECDSASignature signature, Transaction.SigHash mode, boolean anyoneCanPay) {
super(signature.r, signature.s);
setSigHash(mode, anyoneCanPay);
}
/**
* Returns a dummy invalid signature whose R/S values are set such that they will take up the same number of
* encoded bytes as a real signature. This can be useful when you want to fill out a transaction to be of the
* right size (e.g. for fee calculations) but don't have the requisite signing key yet and will fill out the
* real signature later.
*/
public static TransactionSignature dummy() {
BigInteger val = ECKey.HALF_CURVE_ORDER;
return new TransactionSignature(val, val);
}
/** Calculates the byte used in the protocol to represent the combination of mode and anyoneCanPay. */
public static int calcSigHashValue(Transaction.SigHash mode, boolean anyoneCanPay) {
int sighashFlags = mode.ordinal() + 1;
if (anyoneCanPay)
sighashFlags |= Transaction.SIGHASH_ANYONECANPAY_VALUE;
return sighashFlags;
}
/**
* Returns true if the given signature is has canonical encoding, and will thus be accepted as standard by
* the reference client. DER and the SIGHASH encoding allow for quite some flexibility in how the same structures
* are encoded, and this can open up novel attacks in which a man in the middle takes a transaction and then
* changes its signature such that the transaction hash is different but it's still valid. This can confuse wallets
* and generally violates people's mental model of how Rimbit should work, thus, non-canonical signatures are now
* not relayed by default.
*/
public static boolean isEncodingCanonical(byte[] signature) {
// See reference client's IsCanonicalSignature, https://rimbittalk.org/index.php?topic=8392.msg127623#msg127623
// A canonical signature exists of: <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
// Where R and S are not negative (their first byte has its highest bit not set), and not
// excessively padded (do not start with a 0 byte, unless an otherwise negative number follows,
// in which case a single 0 byte is necessary and even required).
if (signature.length < 9 || signature.length > 73)
return false;
int hashType = signature[signature.length-1] & ((int)(~Transaction.SIGHASH_ANYONECANPAY_VALUE));
if (hashType < (Transaction.SigHash.ALL.ordinal() + 1) || hashType > (Transaction.SigHash.SINGLE.ordinal() + 1))
return false;
// "wrong type" "wrong length marker"
if ((signature[0] & 0xff) != 0x30 || (signature[1] & 0xff) != signature.length-3)
return false;
int lenR = signature[3] & 0xff;
if (5 + lenR >= signature.length || lenR == 0)
return false;
int lenS = signature[5+lenR] & 0xff;
if (lenR + lenS + 7 != signature.length || lenS == 0)
return false;
// R value type mismatch R value negative
if (signature[4-2] != 0x02 || (signature[4] & 0x80) == 0x80)
return false;
if (lenR > 1 && signature[4] == 0x00 && (signature[4+1] & 0x80) != 0x80)
return false; // R value excessively padded
// S value type mismatch S value negative
if (signature[6 + lenR - 2] != 0x02 || (signature[6 + lenR] & 0x80) == 0x80)
return false;
if (lenS > 1 && signature[6 + lenR] == 0x00 && (signature[6 + lenR + 1] & 0x80) != 0x80)
return false; // S value excessively padded
return true;
}
/** Configures the sighashFlags field as appropriate. */
public void setSigHash(Transaction.SigHash mode, boolean anyoneCanPay) {
sighashFlags = calcSigHashValue(mode, anyoneCanPay);
}
public boolean anyoneCanPay() {
return (sighashFlags & Transaction.SIGHASH_ANYONECANPAY_VALUE) != 0;
}
public Transaction.SigHash sigHashMode() {
final int mode = sighashFlags & 0x1f;
if (mode == Transaction.SigHash.NONE.ordinal() + 1)
return Transaction.SigHash.NONE;
else if (mode == Transaction.SigHash.SINGLE.ordinal() + 1)
return Transaction.SigHash.SINGLE;
else
return Transaction.SigHash.ALL;
}
/**
* What we get back from the signer are the two components of a signature, r and s. To get a flat byte stream
* of the type used by Rimbit we have to encode them using DER encoding, which is just a way to pack the two
* components into a structure, and then we append a byte to the end for the sighash flags.
*/
public byte[] encodeToRimbit() {
try {
ByteArrayOutputStream bos = derByteStream();
bos.write(sighashFlags);
return bos.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
/**
* Returns a decoded signature.
* @throws RuntimeException if the signature is invalid or unparseable in some way.
*/
public static TransactionSignature decodeFromRimbit(byte[] bytes, boolean requireCanonical) throws VerificationException {
// Rimbit encoding is DER signature + sighash byte.
if (requireCanonical && !isEncodingCanonical(bytes))
throw new VerificationException("Signature encoding is not canonical.");
ECKey.ECDSASignature sig;
try {
sig = ECKey.ECDSASignature.decodeFromDER(bytes);
} catch (IllegalArgumentException e) {
throw new VerificationException("Could not decode DER", e);
}
TransactionSignature tsig = new TransactionSignature(sig.r, sig.s);
// In Rimbit, any value of the final byte is valid, but not necessarily canonical. See javadocs for
// isEncodingCanonical to learn more about this.
tsig.sighashFlags = bytes[bytes.length - 1];
return tsig;
}
}
| apache-2.0 |
googleapis/java-accesscontextmanager | proto-google-identity-accesscontextmanager-v1/src/main/java/com/google/identity/accesscontextmanager/v1/UpdateAccessPolicyRequest.java | 34451 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/identity/accesscontextmanager/v1/access_context_manager.proto
package com.google.identity.accesscontextmanager.v1;
/**
*
*
* <pre>
* A request to update an `AccessPolicy`.
* </pre>
*
* Protobuf type {@code google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest}
*/
public final class UpdateAccessPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)
UpdateAccessPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAccessPolicyRequest.newBuilder() to construct.
private UpdateAccessPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAccessPolicyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAccessPolicyRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UpdateAccessPolicyRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder subBuilder = null;
if (policy_ != null) {
subBuilder = policy_.toBuilder();
}
policy_ =
input.readMessage(
com.google.identity.accesscontextmanager.v1.AccessPolicy.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(policy_);
policy_ = subBuilder.buildPartial();
}
break;
}
case 18:
{
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ =
input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_UpdateAccessPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_UpdateAccessPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest.class,
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest.Builder.class);
}
public static final int POLICY_FIELD_NUMBER = 1;
private com.google.identity.accesscontextmanager.v1.AccessPolicy policy_;
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the policy field is set.
*/
@java.lang.Override
public boolean hasPolicy() {
return policy_ != null;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The policy.
*/
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.AccessPolicy getPolicy() {
return policy_ == null
? com.google.identity.accesscontextmanager.v1.AccessPolicy.getDefaultInstance()
: policy_;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.AccessPolicyOrBuilder getPolicyOrBuilder() {
return getPolicy();
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (policy_ != null) {
output.writeMessage(1, getPolicy());
}
if (updateMask_ != null) {
output.writeMessage(2, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (policy_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPolicy());
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)) {
return super.equals(obj);
}
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest other =
(com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest) obj;
if (hasPolicy() != other.hasPolicy()) return false;
if (hasPolicy()) {
if (!getPolicy().equals(other.getPolicy())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPolicy()) {
hash = (37 * hash) + POLICY_FIELD_NUMBER;
hash = (53 * hash) + getPolicy().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request to update an `AccessPolicy`.
* </pre>
*
* Protobuf type {@code google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_UpdateAccessPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_UpdateAccessPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest.class,
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest.Builder.class);
}
// Construct using
// com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (policyBuilder_ == null) {
policy_ = null;
} else {
policy_ = null;
policyBuilder_ = null;
}
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_UpdateAccessPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
getDefaultInstanceForType() {
return com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest build() {
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest buildPartial() {
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest result =
new com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest(this);
if (policyBuilder_ == null) {
result.policy_ = policy_;
} else {
result.policy_ = policyBuilder_.build();
}
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest) {
return mergeFrom(
(com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest other) {
if (other
== com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
.getDefaultInstance()) return this;
if (other.hasPolicy()) {
mergePolicy(other.getPolicy());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.identity.accesscontextmanager.v1.AccessPolicy policy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.AccessPolicy,
com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder,
com.google.identity.accesscontextmanager.v1.AccessPolicyOrBuilder>
policyBuilder_;
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the policy field is set.
*/
public boolean hasPolicy() {
return policyBuilder_ != null || policy_ != null;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The policy.
*/
public com.google.identity.accesscontextmanager.v1.AccessPolicy getPolicy() {
if (policyBuilder_ == null) {
return policy_ == null
? com.google.identity.accesscontextmanager.v1.AccessPolicy.getDefaultInstance()
: policy_;
} else {
return policyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPolicy(com.google.identity.accesscontextmanager.v1.AccessPolicy value) {
if (policyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
policy_ = value;
onChanged();
} else {
policyBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPolicy(
com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder builderForValue) {
if (policyBuilder_ == null) {
policy_ = builderForValue.build();
onChanged();
} else {
policyBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePolicy(com.google.identity.accesscontextmanager.v1.AccessPolicy value) {
if (policyBuilder_ == null) {
if (policy_ != null) {
policy_ =
com.google.identity.accesscontextmanager.v1.AccessPolicy.newBuilder(policy_)
.mergeFrom(value)
.buildPartial();
} else {
policy_ = value;
}
onChanged();
} else {
policyBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPolicy() {
if (policyBuilder_ == null) {
policy_ = null;
onChanged();
} else {
policy_ = null;
policyBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder getPolicyBuilder() {
onChanged();
return getPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.identity.accesscontextmanager.v1.AccessPolicyOrBuilder getPolicyOrBuilder() {
if (policyBuilder_ != null) {
return policyBuilder_.getMessageOrBuilder();
} else {
return policy_ == null
? com.google.identity.accesscontextmanager.v1.AccessPolicy.getDefaultInstance()
: policy_;
}
}
/**
*
*
* <pre>
* Required. The updated AccessPolicy.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.AccessPolicy policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.AccessPolicy,
com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder,
com.google.identity.accesscontextmanager.v1.AccessPolicyOrBuilder>
getPolicyFieldBuilder() {
if (policyBuilder_ == null) {
policyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.AccessPolicy,
com.google.identity.accesscontextmanager.v1.AccessPolicy.Builder,
com.google.identity.accesscontextmanager.v1.AccessPolicyOrBuilder>(
getPolicy(), getParentForChildren(), isClean());
policy_ = null;
}
return policyBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask to control which fields get updated. Must be non-empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest)
private static final com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest();
}
public static com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAccessPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAccessPolicyRequest>() {
@java.lang.Override
public UpdateAccessPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateAccessPolicyRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UpdateAccessPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAccessPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
PlanetWaves/clockworkengine | branches/3.0/engine/src/core/com/clockwork/light/LightList.java | 8354 |
package com.clockwork.light;
import com.clockwork.export.*;
import com.clockwork.scene.Spatial;
import com.clockwork.util.SortUtil;
import java.io.IOException;
import java.util.*;
/**
* LightList is used internally by Spatial}s to manage
* lights that are attached to them.
*
*/
public final class LightList implements Iterable<Light>, Savable, Cloneable {
private Light[] list, tlist;
private float[] distToOwner;
private int listSize;
private Spatial owner;
private static final int DEFAULT_SIZE = 1;
private static final Comparator<Light> c = new Comparator<Light>() {
/**
* This assumes lastDistance have been computed in a previous step.
*/
public int compare(Light l1, Light l2) {
if (l1.lastDistance < l2.lastDistance)
return -1;
else if (l1.lastDistance > l2.lastDistance)
return 1;
else
return 0;
}
};
/**
* Default constructor for serialisation. Do not use
*/
public LightList(){
}
/**
* Creates a LightList for the given Spatial}.
*
* @param owner The spatial owner
*/
public LightList(Spatial owner) {
listSize = 0;
list = new Light[DEFAULT_SIZE];
distToOwner = new float[DEFAULT_SIZE];
Arrays.fill(distToOwner, Float.NEGATIVE_INFINITY);
this.owner = owner;
}
/**
* Set the owner of the LightList. Only used for cloning.
* @param owner
*/
public void setOwner(Spatial owner){
this.owner = owner;
}
private void doubleSize(){
Light[] temp = new Light[list.length * 2];
float[] temp2 = new float[list.length * 2];
System.arraycopy(list, 0, temp, 0, list.length);
System.arraycopy(distToOwner, 0, temp2, 0, list.length);
list = temp;
distToOwner = temp2;
}
/**
* Adds a light to the list. List size is doubled if there is no room.
*
* @param l
* The light to add.
*/
public void add(Light l) {
if (listSize == list.length) {
doubleSize();
}
list[listSize] = l;
distToOwner[listSize++] = Float.NEGATIVE_INFINITY;
}
/**
* Remove the light at the given index.
*
* @param index
*/
public void remove(int index){
if (index >= listSize || index < 0)
throw new IndexOutOfBoundsException();
listSize --;
if (index == listSize){
list[listSize] = null;
return;
}
for (int i = index; i < listSize; i++){
list[i] = list[i+1];
}
list[listSize] = null;
}
/**
* Removes the given light from the LightList.
*
* @param l the light to remove
*/
public void remove(Light l){
for (int i = 0; i < listSize; i++){
if (list[i] == l){
remove(i);
return;
}
}
}
/**
* @return The size of the list.
*/
public int size(){
return listSize;
}
/**
* @return the light at the given index.
* @throws IndexOutOfBoundsException If the given index is outside bounds.
*/
public Light get(int num){
if (num >= listSize || num < 0)
throw new IndexOutOfBoundsException();
return list[num];
}
/**
* Resets list size to 0.
*/
public void clear() {
if (listSize == 0)
return;
for (int i = 0; i < listSize; i++)
list[i] = null;
if (tlist != null)
Arrays.fill(tlist, null);
listSize = 0;
}
/**
* Sorts the elements in the list acording to their Comparator.
* There are two reasons why lights should be resorted.
* First, if the lights have moved, that means their distance to
* the spatial changed.
* Second, if the spatial itself moved, it means the distance from it to
* the individual lights might have changed.
*
*
* @param transformChanged Whether the spatial's transform has changed
*/
public void sort(boolean transformChanged) {
if (listSize > 1) {
// resize or populate our temporary array as necessary
if (tlist == null || tlist.length != list.length) {
tlist = list.clone();
} else {
System.arraycopy(list, 0, tlist, 0, list.length);
}
if (transformChanged){
// check distance of each light
for (int i = 0; i < listSize; i++){
list[i].computeLastDistance(owner);
}
}
// now merge sort tlist into list
SortUtil.msort(tlist, list, 0, listSize - 1, c);
}
}
/**
* Updates a "world-space" light list, using the spatial's local-space
* light list and its parent's world-space light list.
*
* @param local
* @param parent
*/
public void update(LightList local, LightList parent){
// clear the list as it will be reconstructed
// using the arguments
clear();
while (list.length <= local.listSize){
doubleSize();
}
// add the lights from the local list
System.arraycopy(local.list, 0, list, 0, local.listSize);
for (int i = 0; i < local.listSize; i++){
// list[i] = local.list[i];
distToOwner[i] = Float.NEGATIVE_INFINITY;
}
// if the spatial has a parent node, add the lights
// from the parent list as well
if (parent != null){
int sz = local.listSize + parent.listSize;
while (list.length <= sz)
doubleSize();
for (int i = 0; i < parent.listSize; i++){
int p = i + local.listSize;
list[p] = parent.list[i];
distToOwner[p] = Float.NEGATIVE_INFINITY;
}
listSize = local.listSize + parent.listSize;
}else{
listSize = local.listSize;
}
}
/**
* Returns an iterator that can be used to iterate over this LightList.
*
* @return an iterator that can be used to iterate over this LightList.
*/
public Iterator<Light> iterator() {
return new Iterator<Light>(){
int index = 0;
public boolean hasNext() {
return index < size();
}
public Light next() {
if (!hasNext())
throw new NoSuchElementException();
return list[index++];
}
public void remove() {
LightList.this.remove(--index);
}
};
}
@Override
public LightList clone(){
try{
LightList clone = (LightList) super.clone();
clone.owner = null;
clone.list = list.clone();
clone.distToOwner = distToOwner.clone();
clone.tlist = null; // list used for sorting only
return clone;
}catch (CloneNotSupportedException ex){
throw new AssertionError();
}
}
public void write(CWExporter ex) throws IOException {
OutputCapsule oc = ex.getCapsule(this);
// oc.write(owner, "owner", null);
ArrayList<Light> lights = new ArrayList<Light>();
for (int i = 0; i < listSize; i++){
lights.add(list[i]);
}
oc.writeSavableArrayList(lights, "lights", null);
}
public void read(CWImporter im) throws IOException {
InputCapsule ic = im.getCapsule(this);
// owner = (Spatial) ic.readSavable("owner", null);
List<Light> lights = ic.readSavableArrayList("lights", null);
listSize = lights.size();
// NOTE: make sure the array has a length of at least 1
int arraySize = Math.max(DEFAULT_SIZE, listSize);
list = new Light[arraySize];
distToOwner = new float[arraySize];
for (int i = 0; i < listSize; i++){
list[i] = lights.get(i);
}
Arrays.fill(distToOwner, Float.NEGATIVE_INFINITY);
}
}
| apache-2.0 |
chendong0120/com.heytz.ble | src/android/heytzBle/HeytzUUIDHelper.java | 1718 | // (c) 2104 Don Coleman
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.heytz.ble;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class HeytzUUIDHelper {
// base UUID used to build 128 bit Bluetooth UUIDs
public static final String UUID_BASE = "0000XXXX-0000-1000-8000-00805f9b34fb";
// handle 16 and 128 bit UUIDs
public static UUID uuidFromString(String uuid) {
//剔除前面的 0x
if (uuid.length() > 4 && "0x".equals(uuid.substring(0, 2))) {
uuid = uuid.substring(2);
}
if (uuid.length() == 4) {
uuid = UUID_BASE.replace("XXXX", uuid.toLowerCase());
}
return UUID.fromString(uuid);
}
// return 16 bit UUIDs where possible
public static String uuidToString(UUID uuid) {
String longUUID = uuid.toString();
Pattern pattern = Pattern.compile("0000(.{4})-0000-1000-8000-00805f9b34fb", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(longUUID);
if (matcher.matches()) {
// 16 bit UUID
return matcher.group(1);
} else {
return longUUID;
}
}
}
| apache-2.0 |
dremio/dremio-oss | sabot/kernel/src/main/java/com/dremio/sabot/op/join/hash/HashJoinProbe.java | 2680 | /*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.sabot.op.join.hash;
import java.util.BitSet;
import java.util.List;
import org.apache.arrow.memory.ArrowBuf;
import org.apache.calcite.rel.core.JoinRelType;
import com.dremio.exec.compile.TemplateClassDefinition;
import com.dremio.exec.record.VectorAccessible;
import com.dremio.sabot.exec.context.FunctionContext;
import com.dremio.sabot.op.common.hashtable.HashTable;
public interface HashJoinProbe {
public static TemplateClassDefinition<HashJoinProbe> TEMPLATE_DEFINITION = new TemplateClassDefinition<HashJoinProbe>(HashJoinProbe.class, HashJoinProbeTemplate.class);
/* The probe side of the hash join can be in the following two states
* 1. PROBE_PROJECT: Inner join case, we probe our hash table to see if we have a
* key match and if we do we project the record
* 2. PROJECT_RIGHT: Right Outer or Full Outer joins where we are projecting the records
* from the build side that did not match any records on the probe side. For Left outer
* case we handle it internally by projecting the record if there isn't a match on the build side
* 3. DONE: Once we have projected all possible records we are done
*/
public static enum ProbeState {
PROBE_PROJECT, PROJECT_RIGHT, DONE
}
void setupHashJoinProbe(
FunctionContext functionContext,
VectorAccessible buildBatch,
VectorAccessible probeBatch,
VectorAccessible outgoing,
HashTable hashTable,
JoinRelType joinRelType,
List<BuildInfo> buildInfos,
List<ArrowBuf> startIndices,
List<BitSet> keyMatchBitVectors,
int maxHashTableIndex,
int targetRecordsPerBatch);
/**
* Project any remaining build items that were not matched. Only used when doing a FULL or RIGHT join.
* @return Negative output if records were output but batch wasn't completed. Postive output if batch was completed.
*/
int projectBuildNonMatches();
/**
* Probe with current batch.
* @return number of records matched. negative if we failed to output all records.
*/
int probeBatch();
}
| apache-2.0 |
hasithalakmal/RIP | RIP_API/Swagger_REST_API/modules/swagger-codegen/src/test/java/io/swagger/codegen/swaggeryaml/SwaggerYamlOptionsTest.java | 912 | package io.swagger.codegen.swaggeryaml;
import io.swagger.codegen.AbstractOptionsTest;
import io.swagger.codegen.CodegenConfig;
import io.swagger.codegen.languages.SwaggerYamlGenerator;
import io.swagger.codegen.options.SwaggerYamlOptionsProvider;
import mockit.Expectations;
import mockit.Tested;
public class SwaggerYamlOptionsTest extends AbstractOptionsTest {
@Tested
private SwaggerYamlGenerator clientCodegen;
public SwaggerYamlOptionsTest() {
super(new SwaggerYamlOptionsProvider());
}
@Override
protected CodegenConfig getCodegenConfig() {
return clientCodegen;
}
@SuppressWarnings("unused")
@Override
protected void setExpectations() {
new Expectations(clientCodegen) {{
clientCodegen.setSortParamsByRequiredFlag(Boolean.valueOf(SwaggerYamlOptionsProvider.SORT_PARAMS_VALUE));
times = 1;
}};
}
}
| apache-2.0 |
dadarom/dubbo | dubbo-rpc/dubbo-rpc-rmi/src/test/java/com/alibaba/dubbo/rpc/protocol/rmi/Type.java | 911 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.protocol.rmi;
public enum Type {
High, Normal, Lower
} | apache-2.0 |
mabrek/jetty | jetty-plus/src/main/java/org/eclipse/jetty/plus/webapp/PlusDecorator.java | 5551 | // ========================================================================
// Copyright (c) 2006-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.plus.webapp;
import java.util.EventListener;
import javax.servlet.Filter;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import org.eclipse.jetty.plus.annotation.InjectionCollection;
import org.eclipse.jetty.plus.annotation.LifeCycleCallbackCollection;
import org.eclipse.jetty.plus.annotation.RunAsCollection;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.servlet.ServletContextHandler.Decorator;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.webapp.WebAppContext;
/**
* WebAppDecorator
*
*
*/
public class PlusDecorator implements Decorator
{
protected WebAppContext _context;
public PlusDecorator (WebAppContext context)
{
_context = context;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#decorateFilterHolder(org.eclipse.jetty.servlet.FilterHolder)
*/
public void decorateFilterHolder(FilterHolder filter) throws ServletException
{
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#decorateFilterInstance(javax.servlet.Filter)
*/
public <T extends Filter> T decorateFilterInstance(T filter) throws ServletException
{
decorate(filter);
return filter;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#decorateListenerInstance(java.util.EventListener)
*/
public <T extends EventListener> T decorateListenerInstance(T listener) throws ServletException
{
decorate(listener);
return listener;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#decorateServletHolder(org.eclipse.jetty.servlet.ServletHolder)
*/
public void decorateServletHolder(ServletHolder holder) throws ServletException
{
decorate(holder);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#decorateServletInstance(javax.servlet.Servlet)
*/
public <T extends Servlet> T decorateServletInstance(T servlet) throws ServletException
{
decorate(servlet);
return servlet;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#destroyFilterInstance(javax.servlet.Filter)
*/
public void destroyFilterInstance(Filter f)
{
destroy(f);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#destroyServletInstance(javax.servlet.Servlet)
*/
public void destroyServletInstance(Servlet s)
{
destroy(s);
}
/**
* @see org.eclipse.jetty.servlet.ServletContextHandler.Decorator#destroyListenerInstance(java.util.EventListener)
*/
public void destroyListenerInstance(EventListener l)
{
destroy(l);
}
protected void decorate (Object o)
throws ServletException
{
RunAsCollection runAses = (RunAsCollection)_context.getAttribute(RunAsCollection.RUNAS_COLLECTION);
if (runAses != null)
runAses.setRunAs(o);
InjectionCollection injections = (InjectionCollection)_context.getAttribute(InjectionCollection.INJECTION_COLLECTION);
if (injections != null)
injections.inject(o);
LifeCycleCallbackCollection callbacks = (LifeCycleCallbackCollection)_context.getAttribute(LifeCycleCallbackCollection.LIFECYCLE_CALLBACK_COLLECTION);
if (callbacks != null)
{
try
{
callbacks.callPostConstructCallback(o);
}
catch (Exception e)
{
throw new ServletException(e);
}
}
}
protected void destroy (Object o)
{
LifeCycleCallbackCollection callbacks = (LifeCycleCallbackCollection)_context.getAttribute(LifeCycleCallbackCollection.LIFECYCLE_CALLBACK_COLLECTION);
if (callbacks != null)
{
try
{
callbacks.callPreDestroyCallback(o);
}
catch (Exception e)
{
Log.warn("Destroying instance of "+o.getClass(), e);
}
}
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-robomaker/src/main/java/com/amazonaws/services/robomaker/model/transform/DescribeSimulationApplicationRequestProtocolMarshaller.java | 2864 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.robomaker.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.robomaker.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DescribeSimulationApplicationRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DescribeSimulationApplicationRequestProtocolMarshaller implements
Marshaller<Request<DescribeSimulationApplicationRequest>, DescribeSimulationApplicationRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/describeSimulationApplication").httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.serviceName("AWSRoboMaker").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DescribeSimulationApplicationRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DescribeSimulationApplicationRequest> marshall(DescribeSimulationApplicationRequest describeSimulationApplicationRequest) {
if (describeSimulationApplicationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DescribeSimulationApplicationRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(
SDK_OPERATION_BINDING, describeSimulationApplicationRequest);
protocolMarshaller.startMarshalling();
DescribeSimulationApplicationRequestMarshaller.getInstance().marshall(describeSimulationApplicationRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
czmao/QRCard | qrcard_android/src/com/zxing/LocaleManager.java | 7109 | /*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zxing;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import java.util.Arrays;
import java.util.Collection;
import java.util.Locale;
import java.util.Map;
import java.util.HashMap;
/**
* Handles any locale-specific logic for the client.
*
* @author Sean Owen
*/
public final class LocaleManager {
private static final String DEFAULT_TLD = "com";
private static final String DEFAULT_COUNTRY = "US";
private static final String DEFAULT_LANGUAGE = "en";
/**
* Locales (well, countries) where Google web search is available.
* These should be kept in sync with our translations.
*/
private static final Map<String,String> GOOGLE_COUNTRY_TLD;
static {
GOOGLE_COUNTRY_TLD = new HashMap<String,String>();
GOOGLE_COUNTRY_TLD.put("AR", "com.ar"); // ARGENTINA
GOOGLE_COUNTRY_TLD.put("AU", "com.au"); // AUSTRALIA
GOOGLE_COUNTRY_TLD.put("BR", "com.br"); // BRAZIL
GOOGLE_COUNTRY_TLD.put("BG", "bg"); // BULGARIA
GOOGLE_COUNTRY_TLD.put(Locale.CANADA.getCountry(), "ca");
GOOGLE_COUNTRY_TLD.put(Locale.CHINA.getCountry(), "cn");
GOOGLE_COUNTRY_TLD.put("CZ", "cz"); // CZECH REPUBLIC
GOOGLE_COUNTRY_TLD.put("DK", "dk"); // DENMARK
GOOGLE_COUNTRY_TLD.put("FI", "fi"); // FINLAND
GOOGLE_COUNTRY_TLD.put(Locale.FRANCE.getCountry(), "fr");
GOOGLE_COUNTRY_TLD.put(Locale.GERMANY.getCountry(), "de");
GOOGLE_COUNTRY_TLD.put("GR", "gr"); // GREECE
GOOGLE_COUNTRY_TLD.put("HU", "hu"); // HUNGARY
GOOGLE_COUNTRY_TLD.put("ID", "co.id"); // INDONESIA
GOOGLE_COUNTRY_TLD.put("IL", "co.il"); // ISRAEL
GOOGLE_COUNTRY_TLD.put(Locale.ITALY.getCountry(), "it");
GOOGLE_COUNTRY_TLD.put(Locale.JAPAN.getCountry(), "co.jp");
GOOGLE_COUNTRY_TLD.put(Locale.KOREA.getCountry(), "co.kr");
GOOGLE_COUNTRY_TLD.put("NL", "nl"); // NETHERLANDS
GOOGLE_COUNTRY_TLD.put("PL", "pl"); // POLAND
GOOGLE_COUNTRY_TLD.put("PT", "pt"); // PORTUGAL
GOOGLE_COUNTRY_TLD.put("RU", "ru"); // RUSSIA
GOOGLE_COUNTRY_TLD.put("SK", "sk"); // SLOVAK REPUBLIC
GOOGLE_COUNTRY_TLD.put("SI", "si"); // SLOVENIA
GOOGLE_COUNTRY_TLD.put("ES", "es"); // SPAIN
GOOGLE_COUNTRY_TLD.put("SE", "se"); // SWEDEN
GOOGLE_COUNTRY_TLD.put("CH", "ch"); // SWITZERLAND
GOOGLE_COUNTRY_TLD.put(Locale.TAIWAN.getCountry(), "tw");
GOOGLE_COUNTRY_TLD.put("TR", "com.tr"); // TURKEY
GOOGLE_COUNTRY_TLD.put(Locale.UK.getCountry(), "co.uk");
GOOGLE_COUNTRY_TLD.put(Locale.US.getCountry(), "com");
}
/**
* Google Product Search for mobile is available in fewer countries than web search. See here:
* http://support.google.com/merchants/bin/answer.py?hl=en-GB&answer=160619
*/
private static final Map<String,String> GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD;
static {
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD = new HashMap<String,String>();
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put("AU", "com.au"); // AUSTRALIA
//GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.CHINA.getCountry(), "cn");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.FRANCE.getCountry(), "fr");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.GERMANY.getCountry(), "de");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.ITALY.getCountry(), "it");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.JAPAN.getCountry(), "co.jp");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put("NL", "nl"); // NETHERLANDS
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put("ES", "es"); // SPAIN
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put("CH", "ch"); // SWITZERLAND
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.UK.getCountry(), "co.uk");
GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD.put(Locale.US.getCountry(), "com");
}
/**
* Book search is offered everywhere that web search is available.
*/
private static final Map<String,String> GOOGLE_BOOK_SEARCH_COUNTRY_TLD = GOOGLE_COUNTRY_TLD;
private static final Collection<String> TRANSLATED_HELP_ASSET_LANGUAGES =
Arrays.asList("de", "en", "es", "fr", "it", "ja", "ko", "nl", "pt", "ru", "zh-rCN", "zh-rTW");
private LocaleManager() {}
/**
* @return country-specific TLD suffix appropriate for the current default locale
* (e.g. "co.uk" for the United Kingdom)
*/
public static String getCountryTLD(Context context) {
return doGetTLD(GOOGLE_COUNTRY_TLD, context);
}
/**
* The same as above, but specifically for Google Product Search.
* @return The top-level domain to use.
*/
public static String getProductSearchCountryTLD(Context context) {
return doGetTLD(GOOGLE_PRODUCT_SEARCH_COUNTRY_TLD, context);
}
/**
* The same as above, but specifically for Google Book Search.
* @return The top-level domain to use.
*/
public static String getBookSearchCountryTLD(Context context) {
return doGetTLD(GOOGLE_BOOK_SEARCH_COUNTRY_TLD, context);
}
/**
* Does a given URL point to Google Book Search, regardless of domain.
*
* @param url The address to check.
* @return True if this is a Book Search URL.
*/
public static boolean isBookSearchUrl(String url) {
return url.startsWith("http://google.com/books") || url.startsWith("http://books.google.");
}
private static String getSystemCountry() {
Locale locale = Locale.getDefault();
return locale == null ? DEFAULT_COUNTRY : locale.getCountry();
}
private static String getSystemLanguage() {
Locale locale = Locale.getDefault();
if (locale == null) {
return DEFAULT_LANGUAGE;
}
String language = locale.getLanguage();
// Special case Chinese
if (Locale.SIMPLIFIED_CHINESE.getLanguage().equals(language)) {
return language + "-r" + getSystemCountry();
}
return language;
}
public static String getTranslatedAssetLanguage() {
String language = getSystemLanguage();
return TRANSLATED_HELP_ASSET_LANGUAGES.contains(language) ? language : DEFAULT_LANGUAGE;
}
private static String doGetTLD(Map<String,String> map, Context context) {
String tld = map.get(getCountry(context));
return tld == null ? DEFAULT_TLD : tld;
}
public static String getCountry(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String countryOverride = prefs.getString(PreferencesActivity.KEY_SEARCH_COUNTRY, null);
if (countryOverride != null && countryOverride.length() > 0 && !"-".equals(countryOverride)) {
return countryOverride;
}
return getSystemCountry();
}
}
| apache-2.0 |
chaupal/jp2p | Workspace/net.jp2p.builder/src/net/jp2p/builder/context/Jp2pServiceManager.java | 9091 | package net.jp2p.builder.context;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.jp2p.builder.activator.Jp2pBundleActivator;
import net.jp2p.container.builder.IFactoryBuilder;
import net.jp2p.container.context.IJp2pFactoryCollection;
import net.jp2p.container.context.IJp2pServiceManager;
import net.jp2p.container.context.IServiceManagerListener;
import net.jp2p.container.context.Jp2pLoaderEvent;
import net.jp2p.container.context.IContextLoaderListener;
import net.jp2p.container.context.Jp2pServiceDescriptor;
import net.jp2p.container.context.Jp2pServiceLoader;
import net.jp2p.container.context.ServiceManagerEvent;
import net.jp2p.container.factory.IPropertySourceFactory;
public class Jp2pServiceManager implements IJp2pServiceManager{
public static final String S_CONTEXT_FOUND = "The following context was found and registered: ";
public static final String S_INFO_BUILDING = "\n\nAll the required services have been found. Start to build the container: ";
private Jp2pServiceLoader loader;
private Collection<FactoryContainer> containers;
private Collection<IServiceManagerListener> listeners;
private Jp2pBundleActivator activator;
private Collection<ContextServiceParser> parsers;
private boolean completed;
private IContextLoaderListener listener = new IContextLoaderListener() {
@Override
public void notifyRegisterContext(Jp2pLoaderEvent event) {
logger.info( "Builder registered: " + event.getBuilder().getName() );
switch( event.getType() ){
case REGISTERED:
if( completed )
break;
updateServiceDescriptors( event.getBuilder() );
completeRegistration();
break;
case UNREGISTERED:
updateServiceDescriptors( event.getBuilder() );
break;
}
}
@Override
public void notifyUnregisterContext(Jp2pLoaderEvent event) {
Collection<FactoryContainer> temp = new ArrayList<FactoryContainer>( containers );
for( FactoryContainer container: temp ){
if( !container.containsFactory() )
continue;
Jp2pServiceDescriptor info = container.getDescriptor();
if( event.getBuilder().hasFactory( info ))
container.removeFactory( event.getBuilder().getFactory(info ));
}
}
};
private Logger logger = Logger.getLogger( this.getClass().getName() );
public Jp2pServiceManager( Jp2pBundleActivator activator, Jp2pServiceLoader contextLoader ) {
this.loader = contextLoader;
this.activator = activator;
containers = new ArrayList<FactoryContainer>();
listeners = new ArrayList<IServiceManagerListener>();
parsers = new ArrayList<ContextServiceParser>();
this.completed = false;
}
@Override
public String getName() {
return activator.getBundleId();
}
/**
* Update the service descriptor objects that are needed to build the JP2P container,
* by checking the available services
* @param builder
*/
protected void updateServiceDescriptors( IJp2pFactoryCollection builder ) {
for( FactoryContainer container: containers ){
Jp2pServiceDescriptor info = container.getDescriptor();
if( builder.hasFactory( info ) ){
container.addFactory( builder.getName(), builder.getFactory(info));
this.isCompleted();
}
}
}
/**
* Sets and returns true if the registered builders are able to build all the factories from the
* list of descriptors
*/
private boolean isCompleted() {
Logger log = Logger.getLogger( this.getClass().getName() );
for( FactoryContainer container: containers ){
Jp2pServiceDescriptor info = container.getDescriptor();
if( info.isOptional())
continue;
if( !container.containsFactory() ){
log.log( Level.WARNING, "waiting for: " + info.getName());
this.completed = false;
return completed;
}else{
log.log( Level.FINE, "Service registered: " + info.getName());
}
}
log.log( Level.INFO, "Building completed: " + activator.getBundleId() );
this.completed = true;
return this.completed;
}
/**
* Complete the registration
*/
protected void completeRegistration(){
if( !completed )
return;
logger.info(S_INFO_BUILDING + activator.getBundleId() + "\n");
for( IServiceManagerListener listener: listeners )
listener.notifyContainerBuilt( new ServiceManagerEvent( this ));
}
@Override
public boolean hasFactory(Jp2pServiceDescriptor descriptor) {
for( FactoryContainer container: containers ){
if( !container.containsFactory() )
continue;
if( container.getDescriptor().equals( descriptor ))
return true;
}
return this.loader.hasFactory(descriptor);
}
/**
* Returns true if the manager supports a factory with the given context and name
* @param componentName
* @return
*/
@Override
public IPropertySourceFactory getFactory( Jp2pServiceDescriptor descriptor ){
for( FactoryContainer container: containers ){
if( container.containsFactory() && container.getDescriptor().equals( descriptor ))
return container.getFirst();
}
return this.loader.getFactory( descriptor);
}
public void addListener( IServiceManagerListener listener ){
this.listeners.add( listener );
}
public void removeListener( IServiceManagerListener listener ){
this.listeners.remove( listener );
}
/**
* First we load the service descriptors
* by checking the available services
* @param builder
*/
protected void loadServiceDescriptors() {
//We parse the jp2p xml file to see which services we need, and then include the contexts we find
try {
extendParsers( activator.getClass() );
extendParsers( Jp2pServiceManager.class );
} catch (IOException e) {
e.printStackTrace();
}
//first we parse the xml files to determine which services we need
for(ContextServiceParser parser: parsers ){
Collection<Jp2pServiceDescriptor> descriptors = parser.parse();
for( Jp2pServiceDescriptor descriptor: descriptors )
containers.add( new FactoryContainer( descriptor ));
}
}
/**
* Open the manager
* @return
*/
public boolean open(){
this.loadServiceDescriptors();
this.updateServiceDescriptors( this.loader );
completeRegistration();
loader.addContextLoaderListener(listener);
return true;
}
public void close(){
loader.removeContextLoaderListener(listener);
listener = null;
}
/**
* Allow additional builders to extend the primary builder, by looking at resources with the
* similar name and location, for instance provided by fragments
* @param clss
* @param containerBuilder
* @throws IOException
*/
private void extendParsers( Class<?> clss ) throws IOException{
Enumeration<URL> enm = clss.getClassLoader().getResources( IFactoryBuilder.S_DEFAULT_LOCATION );
while( enm.hasMoreElements()){
URL url = enm.nextElement();
parsers.add( new ContextServiceParser( url, clss ));
}
}
/**
* Lists the correct factories for the given descriptor
* @author Kees
*
*/
private class FactoryContainer{
private Jp2pServiceDescriptor descriptor;
private Collection<IPropertySourceFactory> factories;
public FactoryContainer( Jp2pServiceDescriptor descriptor ) {
super();
this.descriptor = descriptor;
factories = new ArrayList<IPropertySourceFactory>();
}
final Jp2pServiceDescriptor getDescriptor() {
return descriptor;
}
void addFactory( String context, IPropertySourceFactory factory ){
if(( factory == null ) || ( factories.contains( factory )))
return;
descriptor.setContext(context);
this.factories.add( factory );
}
void removeFactory( IPropertySourceFactory factory ){
this.factories.remove( factory );
}
public boolean containsFactory(){
return !this.factories.isEmpty();
}
public IPropertySourceFactory getFirst(){
if( factories.isEmpty())
return null;
IPropertySourceFactory factory = this.factories.iterator().next();
return (IPropertySourceFactory) newFactory( factory );
}
}
/**
* Create a new factory
* @return
*/
public static IPropertySourceFactory newFactory( IPropertySourceFactory factory ){
try {
Constructor<?> constructor = factory.getClass().getConstructor();
IPropertySourceFactory fact = (IPropertySourceFactory) constructor.newInstance();
return fact;
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
} | apache-2.0 |
motivecodex/Java | Web Hibernate Simple Usersystem/src/main/java/controllers/UserEditController.java | 4380 | package controllers;
import java.io.*;
import java.util.LinkedList;
import java.util.List;
import javax.servlet.*;
import javax.servlet.http.*;
import models.User;
public class UserEditController extends HttpServlet {
private static String newTitle = "New user";
private static String editTitle = "Edit user";
/* HTTP GET request */
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
if (request.getParameter("id") != null) {
// If id is given, user data is loaded.
long id = Long.parseLong(request.getParameter("id"));
request.setAttribute("id", id);
// Retrieve session object out request.
HttpSession sessie = request.getSession();
LinkedList user = (LinkedList) sessie.getAttribute("user"); // Gets user data and saves it in LinkedList
for (int i = 0; i < user.size(); i++) {
User userTemp = (User) user.get(i);
//If id matches user, form is filled in.
if (userTemp.getUserId() == id) {
request.setAttribute("firstName", userTemp.getFirstName());
request.setAttribute("lastName", userTemp.getLastName());
request.setAttribute("email", userTemp.getEmail());
}
}
redirect(request, response, editTitle); // Redirects to edit page.
} else {
redirect(request, response, newTitle); // Else to new page.
}
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
// Retrieve session object out of request.
HttpSession sessie = request.getSession();
// If parameter 'id' is not null, user update.
boolean isUserUpdate = request.getParameter("id") != null;
// Retrieve users
List<User> user = (List) sessie.getAttribute("user");
// Check if user list is not null, else create new list with this session.
if (user == null) {
user = new LinkedList<User>();
}
// Form to user object.
User formUser = getUserFromRequest(request);
// If user update, then retrieve user and edit data.
if (isUserUpdate) {
for (int i = 0; i < user.size(); i++) {
User userTemp = (User) user.get(i);
// If userId == customerId, update user.
if (userTemp.getUserId() == formUser.getUserId()) {
userTemp.setFirstName(formUser.getFirstName());
userTemp.setLastName(formUser.getLastName());
userTemp.setEmail(formUser.getEmail());
}
}
} else {
// Else add user with new id to object.
long uniekId = System.nanoTime();
formUser.setUserId(uniekId);
user.add(formUser);
}
sessie.setAttribute("user", user);
// Redirect back to client.
response.sendRedirect("../user");
}
private void redirect(HttpServletRequest request, HttpServletResponse response, String title)
throws ServletException, IOException {
request.setAttribute("pageTitle", title);
// Send the result of edit_user.jsp back to the client
String address = "/edit_user.jsp";
RequestDispatcher dispatcher = request.getRequestDispatcher(address);
dispatcher.forward(request, response);
}
/**
* Creates user object with parameters out of HTTP request.
*/
private User getUserFromRequest(HttpServletRequest request) {
User user = new User();
if (request.getParameter("id") != null && !request.getParameter("id").isEmpty()) {
user.setUserId(Long.parseLong(request.getParameter("id")));
}
if (request.getParameter("firstName") != null) {
user.setFirstName(request.getParameter("firstName"));
}
if (request.getParameter("lastName") != null) {
user.setLastName(request.getParameter("lastName"));
}
if (request.getParameter("email") != null) {
user.setEmail(request.getParameter("email"));
}
return user;
}
}
| apache-2.0 |
iVCE/RDFS | src/core/org/apache/hadoop/fs/FsShell.java | 80667 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.GZIPInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.fs.shell.Count;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.StringUtils;
/** Provide command line access to a FileSystem. */
public class FsShell extends Configured implements Tool {
protected FileSystem fs;
private Trash trash;
public static final SimpleDateFormat dateForm =
new SimpleDateFormat("yyyy-MM-dd HH:mm");
protected static final SimpleDateFormat modifFmt =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
static final int BORDER = 2;
static {
modifFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
}
static final String SETREP_SHORT_USAGE="-setrep [-R] [-w] <rep> <path/file ...>";
static final int SETREP_MAX_PATHS = 1024;
static final String GET_SHORT_USAGE = "-get [-ignoreCrc] [-crc] [-gencrc] <src> <localdst>";
static final String COPYTOLOCAL_SHORT_USAGE = GET_SHORT_USAGE.replace(
"-get", "-copyToLocal");
static final String HEAD_USAGE="-head <file>";
static final String TAIL_USAGE="-tail [-f] <file>";
/**
*/
public FsShell() {
this(null);
}
public FsShell(Configuration conf) {
super(conf);
fs = null;
trash = null;
}
protected FileSystem getFS() throws IOException {
if (this.fs != null) {
return this.fs;
}
synchronized(this) {
if (this.fs == null) {
this.fs = FileSystem.get(getConf());
}
}
return this.fs;
}
protected Trash getTrash() throws IOException{
if (this.trash != null) {
return this.trash;
}
synchronized(this) {
if (this.trash == null) {
this.trash = new Trash(getConf());
}
}
return this.trash;
}
public void init() throws IOException {
getConf().setQuietMode(true);
}
public enum LsOption {
Recursive,
WithBlockSize
};
/**
* Copies from stdin to the indicated file.
*/
private void copyFromStdin(Path dst, FileSystem dstFs) throws IOException {
if (dstFs.isDirectory(dst)) {
throw new IOException("When source is stdin, destination must be a file.");
}
if (dstFs.exists(dst)) {
throw new IOException("Target " + dst.toString() + " already exists.");
}
FSDataOutputStream out = dstFs.create(dst);
try {
IOUtils.copyBytes(System.in, out, getConf(), false);
}
finally {
out.close();
}
}
/**
* Print from src to stdout
*/
private void printToStdout(InputStream in) throws IOException {
try {
IOUtils.copyBytes(in, System.out, getConf(), false);
} finally {
in.close();
}
}
/**
* Print from src to stdout
* And print the checksum to stderr
*/
private void printToStdoutAndCRCToStderr(InputStream in, Path file) throws IOException {
try {
long checksum = IOUtils.copyBytesAndGenerateCRC(in, System.out, getConf(), false);
FileUtil.printChecksumToStderr(checksum, file);
} finally {
in.close();
}
}
/**
* Add local files to the indicated FileSystem name. src is kept.
*/
void copyFromLocal(Path[] srcs, String dstf) throws IOException {
Path dstPath = new Path(dstf);
FileSystem dstFs = dstPath.getFileSystem(getConf());
if (srcs.length == 1 && srcs[0].toString().equals("-"))
copyFromStdin(dstPath, dstFs);
else
dstFs.copyFromLocalFile(false, false, srcs, dstPath);
}
/**
* Add local files to the indicated FileSystem name. src is removed.
*/
void moveFromLocal(Path[] srcs, String dstf) throws IOException {
Path dstPath = new Path(dstf);
FileSystem dstFs = dstPath.getFileSystem(getConf());
dstFs.moveFromLocalFile(srcs, dstPath);
}
/**
* Add a local file to the indicated FileSystem name. src is removed.
*/
void moveFromLocal(Path src, String dstf) throws IOException {
moveFromLocal((new Path[]{src}), dstf);
}
/**
* Obtain the indicated files that match the file pattern <i>srcf</i>
* and copy them to the local name. srcf is kept.
* When copying multiple files, the destination must be a directory.
* Otherwise, IOException is thrown.
* @param argv: arguments
* @param pos: Ignore everything before argv[pos]
* @exception: IOException
* @see org.apache.hadoop.fs.FileSystem.globStatus
*/
void copyToLocal(String[]argv, int pos) throws IOException {
CommandFormat cf = new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc", "gencrc");
String srcstr = null;
String dststr = null;
try {
List<String> parameters = cf.parse(argv, pos);
srcstr = parameters.get(0);
dststr = parameters.get(1);
}
catch(IllegalArgumentException iae) {
System.err.println("Usage: java FsShell " + GET_SHORT_USAGE);
throw iae;
}
boolean copyCrc = cf.getOpt("crc");
final boolean genCrc = cf.getOpt("gencrc");
final boolean verifyChecksum = !cf.getOpt("ignoreCrc");
if (dststr.equals("-")) {
if (copyCrc) {
System.err.println("-crc option is not valid when destination is stdout.");
}
cat(srcstr, verifyChecksum, genCrc);
} else {
File dst = new File(dststr);
Path srcpath = new Path(srcstr);
FileSystem srcFS = getSrcFileSystem(srcpath, verifyChecksum);
if (copyCrc && !(srcFS instanceof ChecksumFileSystem)) {
System.err.println("-crc option is not valid when source file system " +
"does not have crc files. Automatically turn the option off.");
copyCrc = false;
}
FileStatus[] srcs = srcFS.globStatus(srcpath);
boolean dstIsDir = dst.isDirectory();
if (srcs.length > 1 && !dstIsDir) {
throw new IOException("When copying multiple files, "
+ "destination should be a directory.");
}
for (FileStatus status : srcs) {
Path p = status.getPath();
File f = dstIsDir? new File(dst, p.getName()): dst;
copyToLocal(srcFS, p, f, copyCrc, genCrc);
}
}
}
/**
* Return the {@link FileSystem} specified by src and the conf.
* It the {@link FileSystem} supports checksum, set verifyChecksum.
*/
private FileSystem getSrcFileSystem(Path src, boolean verifyChecksum
) throws IOException {
FileSystem srcFs = src.getFileSystem(getConf());
srcFs.setVerifyChecksum(verifyChecksum);
return srcFs;
}
/**
* The prefix for the tmp file used in copyToLocal.
* It must be at least three characters long, required by
* {@link java.io.File#createTempFile(String, String, File)}.
*/
static final String COPYTOLOCAL_PREFIX = "_copyToLocal_";
/**
* Copy a source file from a given file system to local destination.
* @param srcFS source file system
* @param src source path
* @param dst destination
* @param copyCrc copy CRC files?
* @param generate CRC or not?
* @exception IOException If some IO failed
*/
private void copyToLocal(final FileSystem srcFS, final Path src,
final File dst, final boolean copyCrc,
final boolean genCrc)
throws IOException {
/* Keep the structure similar to ChecksumFileSystem.copyToLocal().
* Ideal these two should just invoke FileUtil.copy() and not repeat
* recursion here. Of course, copy() should support two more options :
* copyCrc and useTmpFile (may be useTmpFile need not be an option).
*/
if (!srcFS.getFileStatus(src).isDir()) {
if (dst.exists()) {
// match the error message in FileUtil.checkDest():
throw new IOException("Target " + dst + " already exists");
}
// use absolute name so that tmp file is always created under dest dir
File tmp = FileUtil.createLocalTempFile(dst.getAbsoluteFile(),
COPYTOLOCAL_PREFIX, true);
if (!FileUtil.copy(srcFS, src, tmp, false, srcFS.getConf(), genCrc)) {
throw new IOException("Failed to copy " + src + " to " + dst);
}
if (!tmp.renameTo(dst)) {
throw new IOException("Failed to rename tmp file " + tmp +
" to local destination \"" + dst + "\".");
}
if (copyCrc) {
if (!(srcFS instanceof ChecksumFileSystem)) {
throw new IOException("Source file system does not have crc files");
}
ChecksumFileSystem csfs = (ChecksumFileSystem) srcFS;
File dstcs = FileSystem.getLocal(srcFS.getConf())
.pathToFile(csfs.getChecksumFile(new Path(dst.getCanonicalPath())));
copyToLocal(csfs.getRawFileSystem(), csfs.getChecksumFile(src),
dstcs, false, genCrc);
}
} else {
// once FileUtil.copy() supports tmp file, we don't need to mkdirs().
dst.mkdirs();
for(FileStatus path : srcFS.listStatus(src)) {
copyToLocal(srcFS, path.getPath(),
new File(dst, path.getPath().getName()), copyCrc,
genCrc);
}
}
}
/**
* Get all the files in the directories that match the source file
* pattern and merge and sort them to only one file on local fs
* srcf is kept.
* @param srcf: a file pattern specifying source files
* @param dstf: a destination local file/directory
* @exception: IOException
* @see org.apache.hadoop.fs.FileSystem.globStatus
*/
void copyMergeToLocal(String srcf, Path dst) throws IOException {
copyMergeToLocal(srcf, dst, false);
}
/**
* Get all the files in the directories that match the source file pattern
* and merge and sort them to only one file on local fs
* srcf is kept.
*
* Also adds a string between the files (useful for adding \n
* to a text file)
* @param srcf: a file pattern specifying source files
* @param dstf: a destination local file/directory
* @param endline: if an end of line character is added to a text file
* @exception: IOException
* @see org.apache.hadoop.fs.FileSystem.globStatus
*/
void copyMergeToLocal(String srcf, Path dst, boolean endline) throws IOException {
Path srcPath = new Path(srcf);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path [] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath),
srcPath);
for(int i=0; i<srcs.length; i++) {
if (endline) {
FileUtil.copyMerge(srcFs, srcs[i],
FileSystem.getLocal(getConf()), dst, false, getConf(), "\n");
} else {
FileUtil.copyMerge(srcFs, srcs[i],
FileSystem.getLocal(getConf()), dst, false, getConf(), null);
}
}
}
/**
* Obtain the indicated file and copy to the local name.
* srcf is removed.
*/
void moveToLocal(String srcf, Path dst) throws IOException {
System.err.println("Option '-moveToLocal' is not implemented yet.");
}
/**
* Fetch all files that match the file pattern <i>srcf</i> and display
* their content on stdout.
* @param srcf: a file pattern specifying source files
* @exception: IOException
* @see org.apache.hadoop.fs.FileSystem.globStatus
*/
void cat(String src, boolean verifyChecksum, final boolean genCrc) throws IOException {
//cat behavior in Linux
// [~/1207]$ ls ?.txt
// x.txt z.txt
// [~/1207]$ cat x.txt y.txt z.txt
// xxx
// cat: y.txt: No such file or directory
// zzz
Path srcPattern = new Path(src);
new DelayedExceptionThrowing() {
@Override
void process(Path p, FileSystem srcFs) throws IOException {
if (srcFs.getFileStatus(p).isDir()) {
throw new IOException("Source must be a file.");
}
if (genCrc) {
printToStdoutAndCRCToStderr(srcFs.open(p), p);
} else {
printToStdout(srcFs.open(p));
}
}
}.globAndProcess(srcPattern, getSrcFileSystem(srcPattern, verifyChecksum));
}
private class TextRecordInputStream extends InputStream {
SequenceFile.Reader r;
WritableComparable key;
Writable val;
DataInputBuffer inbuf;
DataOutputBuffer outbuf;
public TextRecordInputStream(FileStatus f) throws IOException {
r = new SequenceFile.Reader(getFS(), f.getPath(), getConf());
key = ReflectionUtils.newInstance(r.getKeyClass().asSubclass(WritableComparable.class),
getConf());
val = ReflectionUtils.newInstance(r.getValueClass().asSubclass(Writable.class),
getConf());
inbuf = new DataInputBuffer();
outbuf = new DataOutputBuffer();
}
public int read() throws IOException {
int ret;
if (null == inbuf || -1 == (ret = inbuf.read())) {
if (!r.next(key, val)) {
return -1;
}
byte[] tmp = key.toString().getBytes();
outbuf.write(tmp, 0, tmp.length);
outbuf.write('\t');
tmp = val.toString().getBytes();
outbuf.write(tmp, 0, tmp.length);
outbuf.write('\n');
inbuf.reset(outbuf.getData(), outbuf.getLength());
outbuf.reset();
ret = inbuf.read();
}
return ret;
}
}
private InputStream forMagic(Path p, FileSystem srcFs) throws IOException {
FSDataInputStream i = srcFs.open(p);
switch(i.readShort()) {
case 0x1f8b: // RFC 1952
i.seek(0);
return new GZIPInputStream(i);
case 0x5345: // 'S' 'E'
if (i.readByte() == 'Q') {
i.close();
return new TextRecordInputStream(srcFs.getFileStatus(p));
}
break;
}
i.seek(0);
return i;
}
void text(String srcf) throws IOException {
Path srcPattern = new Path(srcf);
new DelayedExceptionThrowing() {
@Override
void process(Path p, FileSystem srcFs) throws IOException {
if (srcFs.isDirectory(p)) {
throw new IOException("Source must be a file.");
}
printToStdout(forMagic(p, srcFs));
}
}.globAndProcess(srcPattern, srcPattern.getFileSystem(getConf()));
}
private InputStream decompress(Path p, FileSystem srcFs) throws IOException {
CompressionCodecFactory factory = new CompressionCodecFactory(getConf());
CompressionCodec codec = factory.getCodec(p);
InputStream in = srcFs.open(p);
if (codec == null) {
throw new IOException("Cannot find codec for " + p);
}
return codec.createInputStream(in);
}
void decompress(String srcf) throws IOException {
Path srcPattern = new Path(srcf);
new DelayedExceptionThrowing() {
@Override
void process(Path p, FileSystem srcFs) throws IOException {
if (srcFs.isDirectory(p)) {
throw new IOException("Source must be a file.");
}
printToStdout(decompress(p, srcFs));
}
}.globAndProcess(srcPattern, srcPattern.getFileSystem(getConf()));
}
/**
* Parse the incoming command string
* @param cmd
* @param pos ignore anything before this pos in cmd
* @throws IOException
*/
private void setReplication(String[] cmd, int pos) throws IOException {
final int minArgs = 2; // We need the replication and at least one path.
CommandFormat c =
new CommandFormat("setrep", minArgs, SETREP_MAX_PATHS, "R", "w");
short rep = 0;
List<String> dsts = null;
try {
List<String> parameters = c.parse(cmd, pos);
rep = Short.parseShort(parameters.get(0));
dsts = parameters.subList(1, parameters.size());
} catch (NumberFormatException nfe) {
System.err.println("Illegal replication, a positive integer expected");
throw nfe;
}
catch(IllegalArgumentException iae) {
System.err.println("Usage: java FsShell " + SETREP_SHORT_USAGE);
throw iae;
}
if (rep < 1) {
System.err.println("Cannot set replication to: " + rep);
throw new IllegalArgumentException("replication must be >= 1");
}
List<Path> waitList = c.getOpt("w")? new ArrayList<Path>(): null;
for (String dst: dsts) {
setReplication(rep, dst, c.getOpt("R"), waitList);
}
if (waitList != null) {
waitForReplication(waitList, rep);
}
}
/**
* Wait for all files in waitList to have replication number equal to rep.
* @param waitList The files are waited for.
* @param rep The new replication number.
* @throws IOException IOException
*/
void waitForReplication(List<Path> waitList, int rep) throws IOException {
for(Path f : waitList) {
System.out.print("Waiting for " + f + " ...");
System.out.flush();
boolean printWarning = false;
FileStatus status = getFS().getFileStatus(f);
long len = status.getLen();
for(boolean done = false; !done; ) {
BlockLocation[] locations = getFS().getFileBlockLocations(status, 0, len);
int i = 0;
for(; i < locations.length &&
locations[i].getHosts().length == rep; i++)
if (!printWarning && locations[i].getHosts().length > rep) {
System.out.println("\nWARNING: the waiting time may be long for "
+ "DECREASING the number of replication.");
printWarning = true;
}
done = i == locations.length;
if (!done) {
System.out.print(".");
System.out.flush();
try {Thread.sleep(10000);} catch (InterruptedException e) {}
}
}
System.out.println(" done");
}
}
/**
* Set the replication for files that match file pattern <i>srcf</i>
* if it's a directory and recursive is true,
* set replication for all the subdirs and those files too.
* @param newRep new replication factor
* @param srcf a file pattern specifying source files
* @param recursive if need to set replication factor for files in subdirs
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void setReplication(short newRep, String srcf, boolean recursive,
List<Path> waitingList)
throws IOException {
Path srcPath = new Path(srcf);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path[] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath),
srcPath);
for(int i=0; i<srcs.length; i++) {
setReplication(newRep, srcFs, srcs[i], recursive, waitingList);
}
}
private void setReplication(short newRep, FileSystem srcFs,
Path src, boolean recursive,
List<Path> waitingList)
throws IOException {
if (!srcFs.getFileStatus(src).isDir()) {
setFileReplication(src, srcFs, newRep, waitingList);
return;
}
FileStatus items[] = srcFs.listStatus(src);
if (items == null) {
throw new IOException("Could not get listing for " + src);
} else {
for (int i = 0; i < items.length; i++) {
if (!items[i].isDir()) {
setFileReplication(items[i].getPath(), srcFs, newRep, waitingList);
} else if (recursive) {
setReplication(newRep, srcFs, items[i].getPath(), recursive,
waitingList);
}
}
}
}
/**
* Actually set the replication for this file
* If it fails either throw IOException or print an error msg
* @param file: a file/directory
* @param newRep: new replication factor
* @throws IOException
*/
private void setFileReplication(Path file, FileSystem srcFs, short newRep, List<Path> waitList)
throws IOException {
if (srcFs.setReplication(file, newRep)) {
if (waitList != null) {
waitList.add(file);
}
System.out.println("Replication " + newRep + " set: " + file);
} else {
System.err.println("Could not set replication for: " + file);
}
}
/**
* Get a listing of all files in that match the file pattern <i>srcf</i>.
* @param srcf a file pattern specifying source files
* @param flags LS options.
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
private int ls(String srcf, EnumSet<LsOption> flags) throws IOException {
Path srcPath = new Path(srcf);
FileSystem srcFs = srcPath.getFileSystem(this.getConf());
FileStatus[] srcs = srcFs.globStatus(srcPath);
if (srcs==null || srcs.length==0) {
throw new FileNotFoundException("Cannot access " + srcf +
": No such file or directory.");
}
boolean printHeader = (srcs.length == 1) ? true: false;
int numOfErrors = 0;
for(int i=0; i<srcs.length; i++) {
numOfErrors += ls(srcs[i], srcFs, flags, printHeader);
}
return numOfErrors == 0 ? 0 : -1;
}
/* list all files under the directory <i>src</i>
* ideally we should provide "-l" option, that lists like "ls -l".
*/
private int ls(FileStatus src, FileSystem srcFs, EnumSet<LsOption> flags,
boolean printHeader) throws IOException {
final boolean recursive = flags.contains(LsOption.Recursive);
final boolean withBlockSize = flags.contains(LsOption.WithBlockSize);
final String cmd = recursive? "lsr": "ls";
final FileStatus[] items = shellListStatus(cmd, srcFs, src);
if (items == null) {
return 1;
} else {
int numOfErrors = 0;
if (!recursive && printHeader) {
if (items.length != 0) {
System.out.println("Found " + items.length + " items");
}
}
int maxReplication = 3, maxLen = 10, maxOwner = 0,maxGroup = 0;
for(int i = 0; i < items.length; i++) {
FileStatus stat = items[i];
int replication = String.valueOf(stat.getReplication()).length();
int len = String.valueOf(stat.getLen()).length();
int owner = String.valueOf(stat.getOwner()).length();
int group = String.valueOf(stat.getGroup()).length();
if (replication > maxReplication) maxReplication = replication;
if (len > maxLen) maxLen = len;
if (owner > maxOwner) maxOwner = owner;
if (group > maxGroup) maxGroup = group;
}
for (int i = 0; i < items.length; i++) {
FileStatus stat = items[i];
Path cur = stat.getPath();
String mdate = dateForm.format(new Date(stat.getModificationTime()));
System.out.print((stat.isDir() ? "d" : "-") +
stat.getPermission() + " ");
System.out.printf("%"+ maxReplication +
"s ", (!stat.isDir() ? stat.getReplication() : "-"));
if (maxOwner > 0)
System.out.printf("%-"+ maxOwner + "s ", stat.getOwner());
if (maxGroup > 0)
System.out.printf("%-"+ maxGroup + "s ", stat.getGroup());
System.out.printf("%"+ maxLen + "d ", stat.getLen());
if (withBlockSize)
System.out.printf("%"+ maxLen + "d ", stat.getBlockSize());
System.out.print(mdate + " ");
System.out.println(cur.toUri().getPath());
if (recursive && stat.isDir()) {
numOfErrors += ls(stat,srcFs, flags, printHeader);
}
}
return numOfErrors;
}
}
/**
* Show the size of all files that match the file pattern <i>src</i>
* @param src a file pattern specifying source files
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void du(String src) throws IOException {
Path srcPath = new Path(src);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path[] pathItems = FileUtil.stat2Paths(srcFs.globStatus(srcPath),
srcPath);
FileStatus items[] = srcFs.listStatus(pathItems);
if ((items == null) || ((items.length == 0) &&
(!srcFs.exists(srcPath)))){
throw new FileNotFoundException("Cannot access " + src
+ ": No such file or directory.");
} else {
System.out.println("Found " + items.length + " items");
int maxLength = 10;
long length[] = new long[items.length];
for (int i = 0; i < items.length; i++) {
length[i] = items[i].isDir() ?
srcFs.getContentSummary(items[i].getPath()).getLength() :
items[i].getLen();
int len = String.valueOf(length[i]).length();
if (len > maxLength) maxLength = len;
}
for(int i = 0; i < items.length; i++) {
System.out.printf("%-"+ (maxLength + BORDER) +"d", length[i]);
System.out.println(items[i].getPath());
}
}
}
/**
* Show the summary disk usage of each dir/file
* that matches the file pattern <i>src</i>
* @param src a file pattern specifying source files
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void dus(String src) throws IOException {
Path srcPath = new Path(src);
FileSystem srcFs = srcPath.getFileSystem(getConf());
FileStatus status[] = srcFs.globStatus(new Path(src));
if (status==null || status.length==0) {
throw new FileNotFoundException("Cannot access " + src +
": No such file or directory.");
}
for(int i=0; i<status.length; i++) {
long totalSize = srcFs.getContentSummary(status[i].getPath()).getLength();
String pathStr = status[i].getPath().toString();
System.out.println(("".equals(pathStr)?".":pathStr) + "\t" + totalSize);
}
}
/*
* Remove the given dir pattern if it is empty
*/
void rmdir(String src, final boolean ignoreFailOnEmpty,
final boolean skipTrash) throws IOException {
Path fPattern = new Path(src);
new DelayedExceptionThrowing() {
@Override
void process(Path p, FileSystem srcFs) throws IOException {
delete(p, srcFs, false, true, ignoreFailOnEmpty, false);
}
}.globAndProcess(fPattern, fPattern.getFileSystem(getConf()));
}
/**
* Create the given dir
*/
void mkdir(String src) throws IOException {
Path f = new Path(src);
FileSystem srcFs = f.getFileSystem(getConf());
FileStatus fstatus = null;
try {
fstatus = srcFs.getFileStatus(f);
if (fstatus.isDir()) {
throw new IOException("cannot create directory "
+ src + ": File exists");
}
else {
throw new IOException(src + " exists but " +
"is not a directory");
}
} catch(FileNotFoundException e) {
if (!srcFs.mkdirs(f)) {
throw new IOException("failed to create " + src);
}
}
}
/**
* (Re)create zero-length file at the specified path.
* This will be replaced by a more UNIX-like touch when files may be
* modified.
*/
void touchz(String src) throws IOException {
Path f = new Path(src);
FileSystem srcFs = f.getFileSystem(getConf());
FileStatus st;
if (srcFs.exists(f)) {
st = srcFs.getFileStatus(f);
if (st.isDir()) {
// TODO: handle this
throw new IOException(src + " is a directory");
} else if (st.getLen() != 0)
throw new IOException(src + " must be a zero-length file");
}
FSDataOutputStream out = srcFs.create(f);
out.close();
}
/**
* Check file types.
*/
int test(String argv[], int i) throws IOException {
if (!argv[i].startsWith("-") || argv[i].length() > 2)
throw new IOException("Not a flag: " + argv[i]);
char flag = argv[i].toCharArray()[1];
Path f = new Path(argv[++i]);
FileSystem srcFs = f.getFileSystem(getConf());
switch(flag) {
case 'e':
return srcFs.exists(f) ? 0 : 1;
case 'z':
return srcFs.getFileStatus(f).getLen() == 0 ? 0 : 1;
case 'd':
return srcFs.getFileStatus(f).isDir() ? 0 : 1;
default:
throw new IOException("Unknown flag: " + flag);
}
}
/**
* Print statistics about path in specified format.
* Format sequences:
* %b: Size of file in blocks
* %n: Filename
* %o: Block size
* %r: replication
* %y: UTC date as "yyyy-MM-dd HH:mm:ss"
* %Y: Milliseconds since January 1, 1970 UTC
*/
void stat(char[] fmt, String src) throws IOException {
Path srcPath = new Path(src);
FileSystem srcFs = srcPath.getFileSystem(getConf());
FileStatus glob[] = srcFs.globStatus(srcPath);
if (null == glob)
throw new IOException("cannot stat `" + src + "': No such file or directory");
for (FileStatus f : glob) {
StringBuilder buf = new StringBuilder();
for (int i = 0; i < fmt.length; ++i) {
if (fmt[i] != '%') {
buf.append(fmt[i]);
} else {
if (i + 1 == fmt.length) break;
switch(fmt[++i]) {
case 'b':
buf.append(f.getLen());
break;
case 'F':
buf.append(f.isDir() ? "directory" : "regular file");
break;
case 'n':
buf.append(f.getPath().getName());
break;
case 'o':
buf.append(f.getBlockSize());
break;
case 'r':
buf.append(f.getReplication());
break;
case 'y':
buf.append(modifFmt.format(new Date(f.getModificationTime())));
break;
case 'Y':
buf.append(f.getModificationTime());
break;
default:
buf.append(fmt[i]);
break;
}
}
}
System.out.println(buf.toString());
}
}
/**
* Move files that match the file pattern <i>srcf</i>
* to a destination file.
* When moving mutiple files, the destination must be a directory.
* Otherwise, IOException is thrown.
* @param srcf a file pattern specifying source files
* @param dstf a destination local file/directory
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void rename(String srcf, String dstf) throws IOException {
Path srcPath = new Path(srcf);
Path dstPath = new Path(dstf);
FileSystem srcFs = srcPath.getFileSystem(getConf());
FileSystem dstFs = dstPath.getFileSystem(getConf());
URI srcURI = srcFs.getUri();
URI dstURI = dstFs.getUri();
if (srcURI.compareTo(dstURI) != 0) {
throw new IOException("src and destination filesystems do not match.");
}
Path[] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
Path dst = new Path(dstf);
if (srcs.length > 1 && !srcFs.isDirectory(dst)) {
throw new IOException("When moving multiple files, "
+ "destination should be a directory.");
}
for(int i=0; i<srcs.length; i++) {
if (!srcFs.rename(srcs[i], dst)) {
FileStatus srcFstatus = null;
FileStatus dstFstatus = null;
try {
srcFstatus = srcFs.getFileStatus(srcs[i]);
} catch(FileNotFoundException e) {
throw new FileNotFoundException(srcs[i] +
": No such file or directory");
}
try {
dstFstatus = dstFs.getFileStatus(dst);
} catch(IOException e) {
}
if((srcFstatus!= null) && (dstFstatus!= null)) {
if (srcFstatus.isDir() && !dstFstatus.isDir()) {
throw new IOException("cannot overwrite non directory "
+ dst + " with directory " + srcs[i]);
}
}
throw new IOException("Failed to rename " + srcs[i] + " to " + dst);
}
}
}
/**
* Move/rename file(s) to a destination file. Multiple source
* files can be specified. The destination is the last element of
* the argvp[] array.
* If multiple source files are specified, then the destination
* must be a directory. Otherwise, IOException is thrown.
* @exception: IOException
*/
private int rename(String argv[], Configuration conf) throws IOException {
int i = 0;
int exitCode = 0;
String cmd = argv[i++];
String dest = argv[argv.length-1];
//
// If the user has specified multiple source files, then
// the destination has to be a directory
//
if (argv.length > 3) {
Path dst = new Path(dest);
FileSystem dstFs = dst.getFileSystem(getConf());
if (!dstFs.isDirectory(dst)) {
throw new IOException("When moving multiple files, "
+ "destination " + dest + " should be a directory.");
}
}
//
// for each source file, issue the rename
//
for (; i < argv.length - 1; i++) {
try {
//
// issue the rename to the fs
//
rename(argv[i], dest);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error mesage.
//
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": " + content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": " +
ex.getLocalizedMessage());
}
} catch (IOException e) {
//
// IO exception encountered locally.
//
exitCode = -1;
System.err.println(cmd.substring(1) + ": " +
e.getLocalizedMessage());
}
}
return exitCode;
}
/**
* Copy files that match the file pattern <i>srcf</i>
* to a destination file.
* When copying mutiple files, the destination must be a directory.
* Otherwise, IOException is thrown.
* @param srcf a file pattern specifying source files
* @param dstf a destination local file/directory
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void copy(String srcf, String dstf, Configuration conf) throws IOException {
Path srcPath = new Path(srcf);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path dstPath = new Path(dstf);
FileSystem dstFs = dstPath.getFileSystem(getConf());
Path [] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
if (srcs.length > 1 && !dstFs.isDirectory(dstPath)) {
throw new IOException("When copying multiple files, "
+ "destination should be a directory.");
}
for(int i=0; i<srcs.length; i++) {
FileUtil.copy(srcFs, srcs[i], dstFs, dstPath, false, conf);
}
}
/**
* Copy file(s) to a destination file. Multiple source
* files can be specified. The destination is the last element of
* the argvp[] array.
* If multiple source files are specified, then the destination
* must be a directory. Otherwise, IOException is thrown.
* @exception: IOException
*/
private int copy(String argv[], Configuration conf) throws IOException {
int i = 0;
int exitCode = 0;
String cmd = argv[i++];
String dest = argv[argv.length-1];
//
// If the user has specified multiple source files, then
// the destination has to be a directory
//
if (argv.length > 3) {
Path dst = new Path(dest);
if (!getFS().isDirectory(dst)) {
throw new IOException("When copying multiple files, "
+ "destination " + dest + " should be a directory.");
}
}
//
// for each source file, issue the copy
//
for (; i < argv.length - 1; i++) {
try {
//
// issue the copy to the fs
//
copy(argv[i], dest, conf);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error mesage.
//
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": " +
content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": " +
ex.getLocalizedMessage());
}
} catch (IOException e) {
//
// IO exception encountered locally.
//
exitCode = -1;
System.err.println(cmd.substring(1) + ": " +
e.getLocalizedMessage());
}
}
return exitCode;
}
/**
* Compress a file.
*/
private int compress(String argv[], Configuration conf) throws IOException {
int i = 0;
String cmd = argv[i++];
String srcf = argv[i++];
String dstf = argv[i++];
Path srcPath = new Path(srcf);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path dstPath = new Path(dstf);
FileSystem dstFs = dstPath.getFileSystem(getConf());
// Create codec
CompressionCodecFactory factory = new CompressionCodecFactory(conf);
CompressionCodec codec = factory.getCodec(dstPath);
if (codec == null) {
System.err.println(cmd.substring(1) + ": cannot find compression codec for "
+ dstf);
return 1;
}
// open input stream
InputStream in = srcFs.open(srcPath);
// Create compression stream
OutputStream out = dstFs.create(dstPath);
out = codec.createOutputStream(out);
IOUtils.copyBytes(in, out, conf, true);
return 0;
}
/**
* Delete all files that match the file pattern <i>srcf</i>.
* @param srcf a file pattern specifying source files
* @param recursive if need to delete subdirs
* @param skipTrash Should we skip the trash, if it's enabled?
* @throws IOException
* @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
*/
void delete(String srcf, final boolean recursive, final boolean skipTrash)
throws IOException {
//rm behavior in Linux
// [~/1207]$ ls ?.txt
// x.txt z.txt
// [~/1207]$ rm x.txt y.txt z.txt
// rm: cannot remove `y.txt': No such file or directory
Path srcPattern = new Path(srcf);
new DelayedExceptionThrowing() {
@Override
void process(Path p, FileSystem srcFs) throws IOException {
delete(p, srcFs, recursive, skipTrash, false, true);
}
}.globAndProcess(srcPattern, srcPattern.getFileSystem(getConf()));
}
public void delete(Path src, FileSystem srcFs, boolean recursive,
boolean skipTrash) throws IOException {
delete(src, srcFs, recursive, skipTrash, false, true);
}
/* delete a file */
public void delete(Path src, FileSystem srcFs, boolean recursive,
boolean skipTrash, boolean ignoreNonEmpty,
boolean rmContext) throws IOException {
FileStatus fs = null;
try {
fs = srcFs.getFileStatus(src);
} catch (FileNotFoundException fnfe) {
// Have to re-throw so that console output is as expected
throw new FileNotFoundException("cannot remove "
+ src + ": No such file or directory.");
}
if (fs.isDir() && !recursive) {
// We may safely delete empty directories if
// the recursive option is not specified
FileStatus children[] = srcFs.listStatus(src);
if (rmContext || (children != null && children.length != 0)) {
if (ignoreNonEmpty) {
return;
} else {
throw new IOException("Cannot remove directory \"" + src + "\"," +
" use -rmr instead");
}
} else if (children == null) {
throw new IOException(src + " no longer exists");
}
}
if(!skipTrash) {
try {
Trash trashTmp = new Trash(srcFs, getConf());
if (trashTmp.moveToTrash(src)) {
System.err.println("Moved to trash: " + src);
return;
}
} catch (IOException e) {
Exception cause = (Exception) e.getCause();
String msg = "";
if(cause != null) {
msg = cause.getLocalizedMessage();
}
System.err.println("Problem with Trash." + msg +". Consider using -skipTrash option");
throw e;
}
}
if (srcFs.delete(src, recursive)) {
System.err.println("Deleted " + src);
} else {
throw new IOException("Delete failed " + src);
}
}
/** Undelete a file
* @param src path to the file in the trash
* @param srcFs FileSystem instance
* @param userName name of the user whose trash will be searched, or
* null for current user
* @return true if the file was deleted
* @throws IOException
*/
public boolean undelete(String src, FileSystem srcFs, String userName)
throws IOException {
boolean result = srcFs.undelete(new Path(src), userName);
if (result)
System.err.println("Moved from trash: " + src);
return result;
}
private void expunge() throws IOException {
getTrash().expunge();
getTrash().checkpoint();
}
/**
* Returns the Trash object associated with this shell.
*/
public Path getCurrentTrashDir() throws IOException{
return getTrash().getCurrentTrashDir();
}
/**
* Parse the incoming command string
* @param cmd
* @param pos ignore anything before this pos in cmd
* @throws IOException
*/
private void tail(String[] cmd, int pos) throws IOException {
CommandFormat c = new CommandFormat("tail", 1, 1, "f");
String src = null;
Path path = null;
try {
List<String> parameters = c.parse(cmd, pos);
src = parameters.get(0);
} catch(IllegalArgumentException iae) {
System.err.println("Usage: java FsShell " + TAIL_USAGE);
throw iae;
}
boolean foption = c.getOpt("f") ? true: false;
path = new Path(src);
FileSystem srcFs = path.getFileSystem(getConf());
if (srcFs.isDirectory(path)) {
throw new IOException("Source must be a file.");
}
long fileSize = srcFs.getFileStatus(path).getLen();
long offset = (fileSize > 1024) ? fileSize - 1024: 0;
while (true) {
FSDataInputStream in = srcFs.open(path);
in.seek(offset);
IOUtils.copyBytes(in, System.out, 1024, false);
offset = in.getPos();
in.close();
if (!foption) {
break;
}
fileSize = srcFs.getFileStatus(path).getLen();
offset = (fileSize > offset) ? offset: fileSize;
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
break;
}
}
}
/**
* Parse the incoming command string
* @param cmd
* @param pos ignore anything before this pos in cmd
* @throws IOException
*/
private void head(String[] cmd, int pos) throws IOException {
CommandFormat c = new CommandFormat("head", 1, 1);
String src = null;
Path path = null;
try {
List<String> parameters = c.parse(cmd, pos);
src = parameters.get(0);
} catch(IllegalArgumentException iae) {
System.err.println("Usage: java FsShell " + HEAD_USAGE);
throw iae;
}
path = new Path(src);
FileSystem srcFs = path.getFileSystem(getConf());
if (srcFs.isDirectory(path)) {
throw new IOException("Source must be a file.");
}
long fileSize = srcFs.getFileStatus(path).getLen();
int len = (fileSize > 1024) ? 1024 : (int) fileSize;
FSDataInputStream in = srcFs.open(path);
byte buf[] = new byte[len];
IOUtils.readFully(in, buf, 0, len);
System.out.write(buf);
System.out.write('\n');
in.close();
}
/**
* This class runs a command on a given FileStatus. This can be used for
* running various commands like chmod, chown etc.
*/
static abstract class CmdHandler {
protected int errorCode = 0;
protected boolean okToContinue = true;
protected String cmdName;
int getErrorCode() { return errorCode; }
boolean okToContinue() { return okToContinue; }
String getName() { return cmdName; }
protected CmdHandler(String cmdName, FileSystem fs) {
this.cmdName = cmdName;
}
public abstract void run(FileStatus file, FileSystem fs) throws IOException;
}
/** helper returns listStatus() */
private static FileStatus[] shellListStatus(String cmd,
FileSystem srcFs,
FileStatus src) {
if (!src.isDir()) {
FileStatus[] files = { src };
return files;
}
Path path = src.getPath();
try {
FileStatus[] files = srcFs.listStatus(path);
if ( files == null ) {
System.err.println(cmd +
": could not get listing for '" + path + "'");
}
return files;
} catch (IOException e) {
System.err.println(cmd +
": could not get get listing for '" + path + "' : " +
e.getMessage().split("\n")[0]);
}
return null;
}
/**
* Runs the command on a given file with the command handler.
* If recursive is set, command is run recursively.
*/
private static int runCmdHandler(CmdHandler handler, FileStatus stat,
FileSystem srcFs,
boolean recursive) throws IOException {
int errors = 0;
handler.run(stat, srcFs);
if (recursive && stat.isDir() && handler.okToContinue()) {
FileStatus[] files = shellListStatus(handler.getName(), srcFs, stat);
if (files == null) {
return 1;
}
for(FileStatus file : files ) {
errors += runCmdHandler(handler, file, srcFs, recursive);
}
}
return errors;
}
///top level runCmdHandler
int runCmdHandler(CmdHandler handler, String[] args,
int startIndex, boolean recursive)
throws IOException {
int errors = 0;
for (int i=startIndex; i<args.length; i++) {
Path srcPath = new Path(args[i]);
FileSystem srcFs = srcPath.getFileSystem(getConf());
Path[] paths = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
for(Path path : paths) {
try {
FileStatus file = srcFs.getFileStatus(path);
if (file == null) {
System.err.println(handler.getName() +
": could not get status for '" + path + "'");
errors++;
} else {
errors += runCmdHandler(handler, file, srcFs, recursive);
}
} catch (IOException e) {
String msg = (e.getMessage() != null ? e.getLocalizedMessage() :
(e.getCause().getMessage() != null ?
e.getCause().getLocalizedMessage() : "null"));
System.err.println(handler.getName() + ": could not get status for '"
+ path + "': " + msg.split("\n")[0]);
}
}
}
return (errors > 0 || handler.getErrorCode() != 0) ? 1 : 0;
}
/**
* Return an abbreviated English-language desc of the byte length
* @deprecated Consider using {@link org.apache.hadoop.util.StringUtils#byteDesc} instead.
*/
@Deprecated
public static String byteDesc(long len) {
return StringUtils.byteDesc(len);
}
/**
* @deprecated Consider using {@link org.apache.hadoop.util.StringUtils#limitDecimalTo2} instead.
*/
@Deprecated
public static synchronized String limitDecimalTo2(double d) {
return StringUtils.limitDecimalTo2(d);
}
private void printHelp(String cmd) {
String summary = "hadoop fs is the command to execute fs commands. " +
"The full syntax is: \n\n" +
"hadoop fs [-fs <local | file system URI>] [-conf <configuration file>]\n\t" +
"[-D <property=value>] [-ls <path>] [-lsr <path>] [-lsrx <path>] [-du <path>]\n\t" +
"[-dus <path>] [-mv <src> <dst>] [-cp <src> <dst>] [-rm [-skipTrash] <src>]\n\t" +
"[-rmr [-skipTrash] <src>] [-put <localsrc> ... <dst>]\n\t" +
"[-rmdir [-ignore-fail-on-non-empty] <src>]\n\t" +
"[-copyFromLocal <localsrc> ... <dst>]\n\t" +
"[-moveFromLocal <localsrc> ... <dst>] [" +
GET_SHORT_USAGE + "\n\t" +
"[-getmerge <src> <localdst> [addnl]] [-cat <src>]\n\t" +
"[" + COPYTOLOCAL_SHORT_USAGE + "] [-moveToLocal <src> <localdst>]\n\t" +
"[-mkdir <path>] [-report] [" + SETREP_SHORT_USAGE + "]\n\t" +
"[-touchz <path>] [" + FsShellTouch.TOUCH_USAGE + "]\n\t" +
"[-test -[ezd] <path>] [-stat [format] <path>]\n\t" +
"[-head <path>] [-tail [-f] <path>] [-text <path>]\n\t" +
"[-decompress <path>] [-compress <src> <tgt>]\n\t" +
"[-undelete [-u <username>] <path>]\n\t" +
"[" + FsShellPermissions.CHMOD_USAGE + "]\n\t" +
"[" + FsShellPermissions.CHOWN_USAGE + "]\n\t" +
"[" + FsShellPermissions.CHGRP_USAGE + "]\n\t" +
"[" + Count.USAGE + "]\n\t" +
"[-help [cmd]]\n";
String conf ="-conf <configuration file>: Specify an application configuration file.";
String D = "-D <property=value>: Use value for given property.";
String fs = "-fs [local | <file system URI>]: \tSpecify the file system to use.\n" +
"\t\tIf not specified, the current configuration is used, \n" +
"\t\ttaken from the following, in increasing precedence: \n" +
"\t\t\tcore-default.xml inside the hadoop jar file \n" +
"\t\t\tcore-site.xml in $HADOOP_CONF_DIR \n" +
"\t\t'local' means use the local file system as your DFS. \n" +
"\t\t<file system URI> specifies a particular file system to \n" +
"\t\tcontact. This argument is optional but if used must appear\n" +
"\t\tappear first on the command line. Exactly one additional\n" +
"\t\targument must be specified. \n";
String ls = "-ls <path>: \tList the contents that match the specified file pattern. If\n" +
"\t\tpath is not specified, the contents of /user/<currentUser>\n" +
"\t\twill be listed. Directory entries are of the form \n" +
"\t\t\tdirName (full path) <dir> \n" +
"\t\tand file entries are of the form \n" +
"\t\t\tfileName(full path) <r n> size \n" +
"\t\twhere n is the number of replicas specified for the file \n" +
"\t\tand size is the size of the file, in bytes.\n";
String lsr = "-lsr <path>: \tRecursively list the contents that match the specified\n" +
"\t\tfile pattern. Behaves very similarly to hadoop fs -ls,\n" +
"\t\texcept that the data is shown for all the entries in the\n" +
"\t\tsubtree.\n";
String lsrx = "-lsrx <path>: \tRecursively list the contents that match the specified\n" +
"\t\tfile pattern. Behaves very similarly to hadoop fs -lsr,\n" +
"\t\texcept that block size of files is also shown.\n";
String du = "-du <path>: \tShow the amount of space, in bytes, used by the files that \n" +
"\t\tmatch the specified file pattern. Equivalent to the unix\n" +
"\t\tcommand \"du -sb <path>/*\" in case of a directory, \n" +
"\t\tand to \"du -b <path>\" in case of a file.\n" +
"\t\tThe output is in the form \n" +
"\t\t\tname(full path) size (in bytes)\n";
String dus = "-dus <path>: \tShow the amount of space, in bytes, used by the files that \n" +
"\t\tmatch the specified file pattern. Equivalent to the unix\n" +
"\t\tcommand \"du -sb\" The output is in the form \n" +
"\t\t\tname(full path) size (in bytes)\n";
String mv = "-mv <src> <dst>: Move files that match the specified file pattern <src>\n" +
"\t\tto a destination <dst>. When moving multiple files, the \n" +
"\t\tdestination must be a directory. \n";
String cp = "-cp <src> <dst>: Copy files that match the file pattern <src> to a \n" +
"\t\tdestination. When copying multiple files, the destination\n" +
"\t\tmust be a directory. \n";
String rm = "-rm [-skipTrash] <src>: \tDelete all files that match the specified file pattern.\n" +
"\t\tEquivalent to the Unix command \"rm <src>\"\n" +
"\t\t-skipTrash option bypasses trash, if enabled, and immediately\n" +
"deletes <src>";
String rmr = "-rmr [-skipTrash] <src>: \tRemove all directories which match the specified file \n" +
"\t\tpattern. Equivalent to the Unix command \"rm -rf <src>\"\n" +
"\t\t-skipTrash option bypasses trash, if enabled, and immediately\n" +
"deletes <src>";
String rmdir = "-rmdir [-ignore-fail-on-non-empty] <src>: \tRemoves the directory entry \n" +
"\t\tspecified by the directory argument, provided it is empty. \n";
String put = "-put <localsrc> ... <dst>: \tCopy files " +
"from the local file system \n\t\tinto fs. \n";
String copyFromLocal = "-copyFromLocal <localsrc> ... <dst>:" +
" Identical to the -put command.\n";
String moveFromLocal = "-moveFromLocal <localsrc> ... <dst>:" +
" Same as -put, except that the source is\n\t\tdeleted after it's copied.\n";
String get = GET_SHORT_USAGE
+ ": Copy files that match the file pattern <src> \n" +
"\t\tto the local name. <src> is kept. When copying mutiple, \n" +
"\t\tfiles, the destination must be a directory. " +
"\t\twhen -gencrc is given, it will print out CRC32 checksum of the file in stderr\n";
String getmerge = "-getmerge <src> <localdst>: Get all the files in the directories that \n" +
"\t\tmatch the source file pattern and merge and sort them to only\n" +
"\t\tone file on local fs. <src> is kept.\n";
String cat = "-cat <src>: \tFetch all files that match the file pattern <src> \n" +
"\t\tand display their content on stdout.\n";
String text = "-text <src>: \tTakes a source file and outputs the file in text format.\n" +
"\t\tThe allowed formats are zip and TextRecordInputStream.\n";
String decompress = "-decompress <src>: \tTakes a source file and decompress the file based on file name extension.\n" +
"\t\tThe allowed formats are any registered compressed file formats.\n";
String compress = "-compress <src> <tgt>: \tTakes a source file and compress the file to target.\n" +
"\t\tThe compression codec is determined by the target file name extension.";
String undelete = "-undelete [-u <username>] <src>: \tUndelete a file from the trash folder\n" +
"\t\tIf -u <username> is supplied, attempt to undelete from given users\n" +
"\t\ttrash, otherwise use current users trash as default\n";
String copyToLocal = COPYTOLOCAL_SHORT_USAGE
+ ": Identical to the -get command.\n";
String moveToLocal = "-moveToLocal <src> <localdst>: Not implemented yet \n";
String mkdir = "-mkdir <path>: \tCreate a directory in specified location. \n";
String setrep = SETREP_SHORT_USAGE
+ ": Set the replication level of a file. \n"
+ "\t\tThe -R flag requests a recursive change of replication level \n"
+ "\t\tfor an entire tree.\n";
String touchz = "-touchz <path>: Write a timestamp in yyyy-MM-dd HH:mm:ss format\n" +
"\t\tin a file at <path>. An error is returned if the file exists with non-zero length\n";
String touch = FsShellTouch.TOUCH_USAGE + "\n" +
"\t\tUpdate the access and modification times of each PATH to the current time.\n" +
"\t\tIf PATH does not exist, create empty file.\n\n" +
"\t-a\tChange only access time\n" +
"\t-c, --no-create\n" +
"\t\tDo not create any files\n" +
"\t-d, --date=\"yyyy-MM-dd HH:mm:ss\"\n" +
"\t\tUse specified date instead of current time\n" +
"\t-m\tChange only modification time\n" +
"\t-u timestamp\n" +
"\t\tUse specified timestamp instead of date";
String test = "-test -[ezd] <path>: If file { exists, has zero length, is a directory\n" +
"\t\tthen return 0, else return 1.\n";
String stat = "-stat [format] <path>: Print statistics about the file/directory at <path>\n" +
"\t\tin the specified format. Format accepts filesize in blocks (%b), filename (%n),\n" +
"\t\tblock size (%o), replication (%r), modification date (%y, %Y)\n";
String tail = TAIL_USAGE
+ ": Show the last 1KB of the file. \n"
+ "\t\tThe -f option shows apended data as the file grows. \n";
String head = HEAD_USAGE
+ ": Show the first 1KB of the file. \n";
String chmod = FsShellPermissions.CHMOD_USAGE + "\n" +
"\t\tChanges permissions of a file.\n" +
"\t\tThis works similar to shell's chmod with a few exceptions.\n\n" +
"\t-R\tmodifies the files recursively. This is the only option\n" +
"\t\tcurrently supported.\n\n" +
"\tMODE\tMode is same as mode used for chmod shell command.\n" +
"\t\tOnly letters recognized are 'rwxX'. E.g. a+r,g-w,+rwx,o=r\n\n" +
"\tOCTALMODE Mode specifed in 3 digits. Unlike shell command,\n" +
"\t\tthis requires all three digits.\n" +
"\t\tE.g. 754 is same as u=rwx,g=rx,o=r\n\n" +
"\t\tIf none of 'augo' is specified, 'a' is assumed and unlike\n" +
"\t\tshell command, no umask is applied.\n";
String chown = FsShellPermissions.CHOWN_USAGE + "\n" +
"\t\tChanges owner and group of a file.\n" +
"\t\tThis is similar to shell's chown with a few exceptions.\n\n" +
"\t-R\tmodifies the files recursively. This is the only option\n" +
"\t\tcurrently supported.\n\n" +
"\t\tIf only owner or group is specified then only owner or\n" +
"\t\tgroup is modified.\n\n" +
"\t\tThe owner and group names may only cosists of digits, alphabet,\n"+
"\t\tand any of '-_.@/' i.e. [-_.@/a-zA-Z0-9]. The names are case\n" +
"\t\tsensitive.\n\n" +
"\t\tWARNING: Avoid using '.' to separate user name and group though\n" +
"\t\tLinux allows it. If user names have dots in them and you are\n" +
"\t\tusing local file system, you might see surprising results since\n" +
"\t\tshell command 'chown' is used for local files.\n";
String chgrp = FsShellPermissions.CHGRP_USAGE + "\n" +
"\t\tThis is equivalent to -chown ... :GROUP ...\n";
String help = "-help [cmd]: \tDisplays help for given command or all commands if none\n" +
"\t\tis specified.\n";
if ("fs".equals(cmd)) {
System.out.println(fs);
} else if ("conf".equals(cmd)) {
System.out.println(conf);
} else if ("D".equals(cmd)) {
System.out.println(D);
} else if ("ls".equals(cmd)) {
System.out.println(ls);
} else if ("lsr".equals(cmd)) {
System.out.println(lsr);
} else if ("lsrx".equals(cmd)) {
System.out.println(lsrx);
} else if ("du".equals(cmd)) {
System.out.println(du);
} else if ("dus".equals(cmd)) {
System.out.println(dus);
} else if ("rm".equals(cmd)) {
System.out.println(rm);
} else if ("rmr".equals(cmd)) {
System.out.println(rmr);
} else if ("rmdir".equals(cmd)) {
System.out.println(rmdir);
} else if ("mkdir".equals(cmd)) {
System.out.println(mkdir);
} else if ("mv".equals(cmd)) {
System.out.println(mv);
} else if ("cp".equals(cmd)) {
System.out.println(cp);
} else if ("put".equals(cmd)) {
System.out.println(put);
} else if ("copyFromLocal".equals(cmd)) {
System.out.println(copyFromLocal);
} else if ("moveFromLocal".equals(cmd)) {
System.out.println(moveFromLocal);
} else if ("get".equals(cmd)) {
System.out.println(get);
} else if ("getmerge".equals(cmd)) {
System.out.println(getmerge);
} else if ("copyToLocal".equals(cmd)) {
System.out.println(copyToLocal);
} else if ("moveToLocal".equals(cmd)) {
System.out.println(moveToLocal);
} else if ("cat".equals(cmd)) {
System.out.println(cat);
} else if ("get".equals(cmd)) {
System.out.println(get);
} else if ("setrep".equals(cmd)) {
System.out.println(setrep);
} else if ("touchz".equals(cmd)) {
System.out.println(touchz);
} else if ("touch".equals(cmd)) {
System.out.println(touch);
} else if ("test".equals(cmd)) {
System.out.println(test);
} else if ("text".equals(cmd)) {
System.out.println(text);
} else if ("decompress".equals(cmd)) {
System.out.println(decompress);
} else if ("compress".equals(cmd)) {
System.out.println(compress);
} else if ("stat".equals(cmd)) {
System.out.println(stat);
} else if ("tail".equals(cmd)) {
System.out.println(tail);
} else if ("head".equals(cmd)) {
System.out.println(head);
} else if ("chmod".equals(cmd)) {
System.out.println(chmod);
} else if ("chown".equals(cmd)) {
System.out.println(chown);
} else if ("chgrp".equals(cmd)) {
System.out.println(chgrp);
} else if ("undelete".equals(cmd)) {
System.out.println(undelete);
} else if (Count.matches(cmd)) {
System.out.println(Count.DESCRIPTION);
} else if ("help".equals(cmd)) {
System.out.println(help);
} else {
System.out.println(summary);
System.out.println(fs);
System.out.println(ls);
System.out.println(lsr);
System.out.println(lsrx);
System.out.println(du);
System.out.println(dus);
System.out.println(mv);
System.out.println(cp);
System.out.println(rm);
System.out.println(rmr);
System.out.println(rmdir);
System.out.println(put);
System.out.println(copyFromLocal);
System.out.println(moveFromLocal);
System.out.println(get);
System.out.println(getmerge);
System.out.println(cat);
System.out.println(copyToLocal);
System.out.println(moveToLocal);
System.out.println(mkdir);
System.out.println(setrep);
System.out.println(head);
System.out.println(tail);
System.out.println(touchz);
System.out.println(touch);
System.out.println(test);
System.out.println(text);
System.out.println(decompress);
System.out.println(undelete);
System.out.println(compress);
System.out.println(stat);
System.out.println(chmod);
System.out.println(chown);
System.out.println(chgrp);
System.out.println(Count.DESCRIPTION);
System.out.println(help);
}
}
/**
* Apply operation specified by 'cmd' on all parameters
* starting from argv[startindex].
*/
private int doall(String cmd, String argv[], int startindex) {
int exitCode = 0;
int i = startindex;
boolean rmSkipTrash = false;
boolean rmdirIgnoreFail = false;
// Check for -skipTrash option in rm/rmr
if(("-rm".equals(cmd) || "-rmr".equals(cmd))
&& "-skipTrash".equals(argv[i])) {
rmSkipTrash = true;
i++;
}
if ("-rmdir".equals(cmd) && "-ignore-fail-on-non-empty".equals(argv[i])) {
rmdirIgnoreFail = true;
i++;
}
//
// for each source file, issue the command
//
for (; i < argv.length; i++) {
try {
//
// issue the command to the fs
//
if ("-cat".equals(cmd)) {
cat(argv[i], true, false);
} else if ("-mkdir".equals(cmd)) {
mkdir(argv[i]);
} else if ("-rm".equals(cmd)) {
delete(argv[i], false, rmSkipTrash);
} else if ("-rmr".equals(cmd)) {
delete(argv[i], true, rmSkipTrash);
} else if ("-rmdir".equals(cmd)) {
rmdir(argv[i], rmdirIgnoreFail, rmSkipTrash);
} else if ("-du".equals(cmd)) {
du(argv[i]);
} else if ("-dus".equals(cmd)) {
dus(argv[i]);
} else if (Count.matches(cmd)) {
new Count(argv, i, getConf()).runAll();
} else if ("-ls".equals(cmd)) {
exitCode = ls(argv[i], EnumSet.noneOf(LsOption.class));
} else if ("-lsr".equals(cmd)) {
exitCode = ls(argv[i], EnumSet.of(LsOption.Recursive));
} else if ("-lsrx".equals(cmd)) {
exitCode = ls(argv[i],
EnumSet.of(LsOption.Recursive, LsOption.WithBlockSize));
} else if ("-touchz".equals(cmd)) {
touchz(argv[i]);
} else if ("-text".equals(cmd)) {
text(argv[i]);
} else if ("-decompress".equals(cmd)) {
decompress(argv[i]);
} else if ("-undelete".equals(cmd)) {
String userName = null;
if ("-u".equals(argv[i])) {
// make sure there's at least one file path specified
if (argv.length < i+1) {
printUsage(cmd);
return -1;
}
i++;
userName = argv[i++];
}
exitCode = undelete(argv[i], getFS(), userName) ? 0 : -1;
}
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error message.
//
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": " +
content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": " +
ex.getLocalizedMessage());
}
} catch (IOException e) {
//
// IO exception encountered locally.
//
exitCode = -1;
String content = e.getLocalizedMessage();
if (content != null) {
content = content.split("\n")[0];
}
System.err.println(cmd.substring(1) + ": " +
content);
}
}
return exitCode;
}
/**
* Displays format of commands.
*
*/
private static void printUsage(String cmd) {
String prefix = "Usage: java " + FsShell.class.getSimpleName();
if ("-fs".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-fs <local | file system URI>]");
} else if ("-conf".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-conf <configuration file>]");
} else if ("-D".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-D <[property=value>]");
} else if ("-ls".equals(cmd) || "-lsr".equals(cmd) || "-lsrx".equals(cmd) ||
"-du".equals(cmd) || "-dus".equals(cmd) ||
"-touchz".equals(cmd) || "-mkdir".equals(cmd) ||
"-text".equals(cmd) || "-decompress".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " <path>]");
} else if (Count.matches(cmd)) {
System.err.println(prefix + " [" + Count.USAGE + "]");
} else if ("-rm".equals(cmd) || "-rmr".equals(cmd)) {
System.err.println("Usage: java FsShell [" + cmd +
" [-skipTrash] <src>]");
} else if ("-rmdir".equals(cmd)) {
System.err.println("Usage: java FsShell [" + cmd +
" [-ignore-fail-on-non-empty] <src>]");
} else if ("-mv".equals(cmd) || "-cp".equals(cmd) || "-compress".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " <src> <dst>]");
} else if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd) ||
"-moveFromLocal".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " <localsrc> ... <dst>]");
} else if ("-get".equals(cmd)) {
System.err.println("Usage: java FsShell [" + GET_SHORT_USAGE + "]");
} else if ("-copyToLocal".equals(cmd)) {
System.err.println("Usage: java FsShell [" + COPYTOLOCAL_SHORT_USAGE+ "]");
} else if ("-moveToLocal".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " [-crc] <src> <localdst>]");
} else if ("-cat".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " <src>]");
} else if ("-setrep".equals(cmd)) {
System.err.println("Usage: java FsShell [" + SETREP_SHORT_USAGE + "]");
} else if ("-test".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-test -[ezd] <path>]");
} else if ("-stat".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-stat [format] <path>]");
} else if ("-head".equals(cmd)) {
System.err.println("Usage: java FsShell [" + HEAD_USAGE + "]");
} else if ("-tail".equals(cmd)) {
System.err.println("Usage: java FsShell [" + TAIL_USAGE + "]");
} else if ("-touch".equals(cmd)) {
System.err.println("Usage: java FsShell [" + FsShellTouch.TOUCH_USAGE + "]");
} else {
System.err.println("Usage: java FsShell");
System.err.println(" [-ls <path>]");
System.err.println(" [-lsr <path>]");
System.err.println(" [-lsrx <path>]");
System.err.println(" [-du <path>]");
System.err.println(" [-dus <path>]");
System.err.println(" [" + Count.USAGE + "]");
System.err.println(" [-mv <src> <dst>]");
System.err.println(" [-cp <src> <dst>]");
System.err.println(" [-rm [-skipTrash] <path>]");
System.err.println(" [-rmr [-skipTrash] <path>]");
System.err.println(" [-rmdir [-ignore-fail-on-non-empty] <path>]");
System.err.println(" [-expunge]");
System.err.println(" [-put <localsrc> ... <dst>]");
System.err.println(" [-copyFromLocal <localsrc> ... <dst>]");
System.err.println(" [-moveFromLocal <localsrc> ... <dst>]");
System.err.println(" [" + GET_SHORT_USAGE + "]");
System.err.println(" [-getmerge <src> <localdst> [addnl]]");
System.err.println(" [-cat <src>]");
System.err.println(" [-text <src>]");
System.err.println(" [-decompress <src>]");
System.err.println(" [-compress <src> <tgt>]");
System.err.println(" [-undelete [-u <username>] <src>]");
System.err.println(" [" + COPYTOLOCAL_SHORT_USAGE + "]");
System.err.println(" [-moveToLocal [-crc] <src> <localdst>]");
System.err.println(" [-mkdir <path>]");
System.err.println(" [" + SETREP_SHORT_USAGE + "]");
System.err.println(" [-touchz <path>]");
System.err.println(" [" + FsShellTouch.TOUCH_USAGE + "]");
System.err.println(" [-test -[ezd] <path>]");
System.err.println(" [-stat [format] <path>]");
System.err.println(" [" + HEAD_USAGE + "]");
System.err.println(" [" + TAIL_USAGE + "]");
System.err.println(" [" + FsShellPermissions.CHMOD_USAGE + "]");
System.err.println(" [" + FsShellPermissions.CHOWN_USAGE + "]");
System.err.println(" [" + FsShellPermissions.CHGRP_USAGE + "]");
System.err.println(" [-help [cmd]]");
System.err.println();
ToolRunner.printGenericCommandUsage(System.err);
}
}
/**
* run
*/
public int run(String argv[]) throws Exception {
if (argv.length < 1) {
printUsage("");
return -1;
}
int exitCode = -1;
int i = 0;
String cmd = argv[i++];
//
// verify that we have enough command line parameters
//
if ("-put".equals(cmd) || "-test".equals(cmd) ||
"-copyFromLocal".equals(cmd) || "-moveFromLocal".equals(cmd)) {
if (argv.length < 3) {
printUsage(cmd);
return exitCode;
}
} else if ("-get".equals(cmd) ||
"-copyToLocal".equals(cmd) || "-moveToLocal".equals(cmd)) {
if (argv.length < 3) {
printUsage(cmd);
return exitCode;
}
} else if ("-mv".equals(cmd) || "-cp".equals(cmd) || "-compress".equals(cmd)) {
if (argv.length < 3) {
printUsage(cmd);
return exitCode;
}
} else if ("-rm".equals(cmd) || "-rmr".equals(cmd) ||
"-rmdir".equals(cmd) || "-cat".equals(cmd) ||
"-mkdir".equals(cmd) || "-touchz".equals(cmd) ||
"-stat".equals(cmd) || "-text".equals(cmd) ||
"-decompress".equals(cmd) || "-touch".equals(cmd) ||
"-undelete".equals(cmd)) {
if (argv.length < 2) {
printUsage(cmd);
return exitCode;
}
}
// initialize FsShell
try {
init();
} catch (RPC.VersionMismatch v) {
System.err.println("Version Mismatch between client and server" +
"... command aborted.");
return exitCode;
} catch (IOException e) {
System.err.println("Bad connection to FS. command aborted.");
return exitCode;
}
exitCode = 0;
try {
if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd)) {
Path[] srcs = new Path[argv.length-2];
for (int j=0 ; i < argv.length-1 ;)
srcs[j++] = new Path(argv[i++]);
copyFromLocal(srcs, argv[i++]);
} else if ("-moveFromLocal".equals(cmd)) {
Path[] srcs = new Path[argv.length-2];
for (int j=0 ; i < argv.length-1 ;)
srcs[j++] = new Path(argv[i++]);
moveFromLocal(srcs, argv[i++]);
} else if ("-get".equals(cmd) || "-copyToLocal".equals(cmd)) {
copyToLocal(argv, i);
} else if ("-getmerge".equals(cmd)) {
if (argv.length>i+2)
copyMergeToLocal(argv[i++], new Path(argv[i++]), Boolean.parseBoolean(argv[i++]));
else
copyMergeToLocal(argv[i++], new Path(argv[i++]));
} else if ("-cat".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-text".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-decompress".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-moveToLocal".equals(cmd)) {
moveToLocal(argv[i++], new Path(argv[i++]));
} else if ("-setrep".equals(cmd)) {
setReplication(argv, i);
} else if ("-chmod".equals(cmd) ||
"-chown".equals(cmd) ||
"-chgrp".equals(cmd)) {
FsShellPermissions.changePermissions(getFS(), cmd, argv, i, this);
} else if ("-ls".equals(cmd)) {
if (i < argv.length) {
exitCode = doall(cmd, argv, i);
} else {
exitCode = ls(Path.CUR_DIR, EnumSet.noneOf(LsOption.class));
}
} else if ("-lsr".equals(cmd)) {
if (i < argv.length) {
exitCode = doall(cmd, argv, i);
} else {
exitCode = ls(Path.CUR_DIR, EnumSet.of(LsOption.Recursive));
}
} else if ("-lsrx".equals(cmd)) {
if (i < argv.length) {
exitCode = doall(cmd, argv, i);
} else {
exitCode = ls(Path.CUR_DIR,
EnumSet.of(LsOption.Recursive, LsOption.WithBlockSize));
}
} else if ("-mv".equals(cmd)) {
exitCode = rename(argv, getConf());
} else if ("-cp".equals(cmd)) {
exitCode = copy(argv, getConf());
} else if ("-compress".equals(cmd)) {
exitCode = compress(argv, getConf());
} else if ("-rm".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-rmr".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-rmdir".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-expunge".equals(cmd)) {
expunge();
} else if ("-du".equals(cmd)) {
if (i < argv.length) {
exitCode = doall(cmd, argv, i);
} else {
du(".");
}
} else if ("-dus".equals(cmd)) {
if (i < argv.length) {
exitCode = doall(cmd, argv, i);
} else {
dus(".");
}
} else if (Count.matches(cmd)) {
exitCode = new Count(argv, i, getConf()).runAll();
} else if ("-mkdir".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-undelete".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-touchz".equals(cmd)) {
exitCode = doall(cmd, argv, i);
} else if ("-touch".equals(cmd)) {
FsShellTouch.touchFiles(getFS(), argv, i);
} else if ("-test".equals(cmd)) {
exitCode = test(argv, i);
} else if ("-stat".equals(cmd)) {
if (i + 1 < argv.length) {
stat(argv[i++].toCharArray(), argv[i++]);
} else {
stat("%y".toCharArray(), argv[i]);
}
} else if ("-help".equals(cmd)) {
if (i < argv.length) {
printHelp(argv[i]);
} else {
printHelp("");
}
} else if ("-head".equals(cmd)) {
head(argv, i);
} else if ("-tail".equals(cmd)) {
tail(argv, i);
} else {
exitCode = -1;
System.err.println(cmd.substring(1) + ": Unknown command");
printUsage("");
}
} catch (IllegalArgumentException arge) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage());
printUsage(cmd);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error mesage, ignore the stack trace.
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": " +
content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": " +
ex.getLocalizedMessage());
}
} catch (IOException e) {
//
// IO exception encountered locally.
//
exitCode = -1;
System.err.println(cmd.substring(1) + ": " +
e.getLocalizedMessage());
} catch (Exception re) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": " + re.getLocalizedMessage());
} finally {
}
return exitCode;
}
public void close() throws IOException {
if (fs != null) {
fs.close();
fs = null;
}
}
/**
* main() has some simple utility methods
*/
public static void main(String argv[]) throws Exception {
FsShell shell = new FsShell();
int res;
try {
res = ToolRunner.run(shell, argv);
} finally {
shell.close();
}
System.exit(res);
}
/**
* Accumulate exceptions if there is any. Throw them at last.
*/
private abstract class DelayedExceptionThrowing {
abstract void process(Path p, FileSystem srcFs) throws IOException;
final void globAndProcess(Path srcPattern, FileSystem srcFs
) throws IOException {
List<IOException> exceptions = new ArrayList<IOException>();
for(Path p : FileUtil.stat2Paths(srcFs.globStatus(srcPattern),
srcPattern))
try { process(p, srcFs); }
catch(IOException ioe) { exceptions.add(ioe); }
if (!exceptions.isEmpty())
if (exceptions.size() == 1)
throw exceptions.get(0);
else
throw new IOException("Multiple IOExceptions: " + exceptions);
}
}
}
| apache-2.0 |
GoogleCloudPlatform/dataflow-sample-applications | timeseries-streaming/timeseries-java-applications/Adapters/src/main/java/com/google/dataflow/sample/timeseriesflow/adaptors/domain/BlendedIndex.java | 6622 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.dataflow.sample.timeseriesflow.adaptors.domain;
import com.google.auto.value.AutoValue;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.Data;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.TSDataPoint;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.TSKey;
import com.google.dataflow.sample.timeseriesflow.common.CommonUtils;
import com.google.dataflow.sample.timeseriesflow.common.TSDataUtils;
import com.google.protobuf.util.Timestamps;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A domain adaptor that consumes a stream of 1 or more values and produces a new value per window
* based on ratios that are provided.
*
* <p>The values are expected to have come from the Type 1 computations with the last value per
* window used as the value to create the index from.
*
* <p>Ratios must add up to 1
*
* <p>example:
*
* <p>[ timeWindow1, timeWindow2, timeWindow3 ]
*
* <p>Key A [ 10, No Data, 12 ] Key B [ 100, No Data, 110]
*
* <p>Ratios [0.8,0.2]
*
* <p>Output [ ((10*.8) + (100*.2)),((10*.8) + (100*.2)), ((20*.8) + (110*.2)) ]
*/
@AutoValue
@Experimental
public abstract class BlendedIndex
extends PTransform<PCollection<TSDataPoint>, PCollection<TSDataPoint>> {
private static final Logger LOG = LoggerFactory.getLogger(BlendedIndex.class);
abstract PCollectionView<Map<TSKey, Double>> getRatioList();
abstract Set<TSKey> getIndexList();
abstract Duration getDownSampleWindowLength();
abstract @Nullable Instant getAbsoluteStopTime();
abstract @Nullable Duration getTimeToLive();
abstract TSKey getIndexKey();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setRatioList(PCollectionView<Map<TSKey, Double>> newRatioList);
public abstract Builder setIndexList(Set<TSKey> newIndexList);
public abstract Builder setDownSampleWindowLength(Duration newDownSampleWindowLength);
public abstract Builder setAbsoluteStopTime(Instant newAbsoluteStopTime);
public abstract Builder setTimeToLive(Duration newTimeToLive);
public abstract Builder setIndexKey(TSKey newIndexKey);
public abstract BlendedIndex build();
}
public static Builder builder() {
return new AutoValue_BlendedIndex.Builder();
}
@Override
public PCollection<TSDataPoint> expand(PCollection<TSDataPoint> input) {
MergeSparseStreamsToSingleDenseStream merge =
MergeSparseStreamsToSingleDenseStream.builder()
.setAbsoluteStopTime(getAbsoluteStopTime())
.setDownSampleWindowLength(getDownSampleWindowLength())
.setIndexList(getIndexList())
.setTimeToLive(getTimeToLive())
.setNewIndexKey(getIndexKey())
.build();
return input.apply(merge).apply(ParDo.of(Blend.create(this)).withSideInputs(getRatioList()));
}
@AutoValue
abstract static class Blend extends DoFn<Iterable<TSDataPoint>, TSDataPoint> {
abstract BlendedIndex getBlendedIndex();
public static Blend create(BlendedIndex newBlendedIndex) {
return builder().setBlendedIndex(newBlendedIndex).build();
}
public static Builder builder() {
return new AutoValue_BlendedIndex_Blend.Builder();
}
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setBlendedIndex(BlendedIndex newBlendedIndex);
public abstract Blend build();
}
@ProcessElement
public void process(
ProcessContext pc,
@Element Iterable<TSDataPoint> element,
@Timestamp Instant timestamp,
OutputReceiver<TSDataPoint> o) {
BigDecimal sum = new BigDecimal(0);
List<TSDataPoint> datapoints = new ArrayList<>();
Set<TSKey> seenKeys = new HashSet<>();
for (TSDataPoint value : element) {
datapoints.add(value);
BigDecimal item_value = computeRatio(value, pc.sideInput(getBlendedIndex().getRatioList()));
sum = sum.add(item_value);
seenKeys.add(value.getKey());
}
// We must have all values, else we will refuse to output
if (getBlendedIndex().getIndexList().equals(seenKeys)) {
o.output(
TSDataPoint.newBuilder()
.setKey(getBlendedIndex().getIndexKey())
.setTimestamp(Timestamps.fromMillis(timestamp.getMillis()))
.setData(Data.newBuilder().setDoubleVal(sum.doubleValue()))
.putAllMetadata(CommonUtils.mergeDataPoints(datapoints))
.build());
} else {
LOG.error(
"Blended Index did not have all values for the expected indexes {}, only had {}, will not output value.",
getBlendedIndex().getIndexList(),
seenKeys);
}
}
private BigDecimal computeRatio(TSDataPoint dataPoint, Map<TSKey, Double> ratios) {
Double ratio = ratios.get(dataPoint.getKey());
Preconditions.checkNotNull(
ratio, String.format("There is no ratio provided for key %s", dataPoint.getKey()));
BigDecimal value = TSDataUtils.getBigDecimalFromData(dataPoint.getData());
return value.multiply(BigDecimal.valueOf(ratio));
}
}
}
| apache-2.0 |
NationalSecurityAgency/ghidra | Ghidra/Debug/Framework-Debugging/src/main/java/ghidra/dbg/util/PathPredicates.java | 9949 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.dbg.util;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import ghidra.async.AsyncFence;
import ghidra.dbg.target.TargetObject;
import ghidra.dbg.util.PathUtils.PathComparator;
public interface PathPredicates {
static boolean keyMatches(String pat, String key) {
if (key.equals(pat)) {
return true;
}
if ("[]".equals(pat)) {
return PathUtils.isIndex(key);
}
if ("".equals(pat)) {
return PathUtils.isName(key);
}
return false;
}
static boolean anyMatches(Set<String> pats, String key) {
return pats.stream().anyMatch(p -> keyMatches(p, key));
}
static PathPredicates pattern(String... keyPatterns) {
return new PathPattern(List.of(keyPatterns));
}
static PathPredicates pattern(List<String> keyPatterns) {
return new PathPattern(keyPatterns);
}
static PathPredicates parse(String pattern) {
return new PathPattern(PathUtils.parse(pattern));
}
PathPredicates or(PathPredicates that);
/**
* Check if the entire path passes
*
* @param path the path to check
* @return true if it matches, false otherwise
*/
boolean matches(List<String> path);
/**
* Check if the given path <em>could</em> have a matching successor
*
* <p>
* This essentially checks if the given path is a viable prefix to the matcher.
*
* @implNote this method could become impractical for culling queries if we allow too
* sophisticated of patterns. Notably, to allow an "any number of keys" pattern, e.g.,
* akin to {@code /src/**{@literal /}*.c} in file system path matchers. Anything
* starting with "src" could have a successor that matches.
*
*
* @param path the path (prefix) to check
* @param strict true to exclude the case where {@link #matches(List)} would return true
* @return true if a successor could match, false otherwise
*/
boolean successorCouldMatch(List<String> path, boolean strict);
/**
* Check if the given path has an ancestor that matches
*
* @param path the path to check
* @param strict true to exclude the case where {@link #matches(List)} would return true
* @return true if an ancestor matches, false otherwise
*/
boolean ancestorMatches(List<String> path, boolean strict);
/**
* Get the patterns for the next possible key
*
* <p>
* If a successor of the given path cannot match this pattern, the empty set is returned.
*
* @param path the ancestor path
* @return a set of patterns where indices are enclosed in brackets ({@code [])
*/
Set<String> getNextKeys(List<String> path);
/**
* Get the patterns for the next possible name
*
* <p>
* If a successor of the given path cannot match this pattern, the empty set is returned. If the
* pattern could accept a name next, get all patterns describing those names
*
* @param path the ancestor path
* @return a set of patterns
*/
Set<String> getNextNames(List<String> path);
/**
* Assuming a successor of path could match, get the patterns for the next possible index
*
* <p>
* If a successor of the given path cannot match this pattern, the empty set is returned. If the
* pattern could accept an index next, get all patterns describing those indices
*
* @param path the ancestor path
* @return a set of patterns, without brackets ({@code [])
*/
Set<String> getNextIndices(List<String> path);
/**
* If this predicate is known to match only one path, i.e., no wildcards, get that path
*
* @return the singleton path, or {@code null}
*/
List<String> getSingletonPath();
/**
* If this predicate consists of a single pattern, get that pattern
*
* @return the singleton pattern, or {@code null}
*/
PathPattern getSingletonPattern();
default NavigableMap<List<String>, ?> getCachedValues(TargetObject seed) {
return getCachedValues(List.of(), seed);
}
default NavigableMap<List<String>, ?> getCachedValues(List<String> path, Object val) {
NavigableMap<List<String>, Object> result = new TreeMap<>(PathComparator.KEYED);
getCachedValues(result, path, val);
return result;
}
default void getCachedValues(Map<List<String>, Object> result, List<String> path, Object val) {
if (matches(path)) {
result.put(path, val);
}
if (val instanceof TargetObject && successorCouldMatch(path, true)) {
TargetObject cur = (TargetObject) val;
Set<String> nextNames = getNextNames(path);
if (!nextNames.isEmpty()) {
for (Map.Entry<String, ?> ent : cur.getCachedAttributes().entrySet()) {
Object value = ent.getValue();
String name = ent.getKey();
if (!anyMatches(nextNames, name)) {
continue;
}
getCachedValues(result, PathUtils.extend(path, name), value);
}
}
Set<String> nextIndices = getNextIndices(path);
if (!nextIndices.isEmpty()) {
for (Map.Entry<String, ?> ent : cur.getCachedElements().entrySet()) {
Object obj = ent.getValue();
String index = ent.getKey();
if (!anyMatches(nextIndices, index)) {
continue;
}
getCachedValues(result, PathUtils.index(path, index), obj);
}
}
}
}
default NavigableMap<List<String>, TargetObject> getCachedSuccessors(TargetObject seed) {
NavigableMap<List<String>, TargetObject> result = new TreeMap<>(PathComparator.KEYED);
getCachedSuccessors(result, List.of(), seed);
return result;
}
default void getCachedSuccessors(Map<List<String>, TargetObject> result,
List<String> path, TargetObject cur) {
if (matches(path)) {
result.put(path, cur);
}
if (successorCouldMatch(path, true)) {
Set<String> nextNames = getNextNames(path);
if (!nextNames.isEmpty()) {
for (Map.Entry<String, ?> ent : cur.getCachedAttributes().entrySet()) {
Object value = ent.getValue();
if (!(value instanceof TargetObject)) {
continue;
}
String name = ent.getKey();
if (!anyMatches(nextNames, name)) {
continue;
}
TargetObject obj = (TargetObject) value;
getCachedSuccessors(result, PathUtils.extend(path, name), obj);
}
}
Set<String> nextIndices = getNextIndices(path);
if (!nextIndices.isEmpty()) {
for (Map.Entry<String, ? extends TargetObject> ent : cur.getCachedElements()
.entrySet()) {
TargetObject obj = ent.getValue();
String index = ent.getKey();
if (!anyMatches(nextIndices, index)) {
continue;
}
getCachedSuccessors(result, PathUtils.index(path, index), obj);
}
}
}
}
default CompletableFuture<NavigableMap<List<String>, TargetObject>> fetchSuccessors(
TargetObject seed) {
NavigableMap<List<String>, TargetObject> result = new TreeMap<>(PathComparator.KEYED);
return fetchSuccessors(result, List.of(), seed).thenApply(__ -> result);
}
default CompletableFuture<Void> fetchSuccessors(Map<List<String>, TargetObject> result,
List<String> path, TargetObject cur) {
AsyncFence fence = new AsyncFence();
if (matches(path)) {
synchronized (result) {
result.put(path, cur);
}
}
if (successorCouldMatch(path, true)) {
Set<String> nextNames = getNextNames(path);
if (!nextNames.isEmpty()) {
fence.include(cur.fetchAttributes().thenCompose(attrs -> {
AsyncFence aFence = new AsyncFence();
for (Map.Entry<String, ?> ent : attrs.entrySet()) {
Object value = ent.getValue();
if (!(value instanceof TargetObject)) {
continue;
}
String name = ent.getKey();
if (!anyMatches(nextNames, name)) {
continue;
}
TargetObject obj = (TargetObject) value;
aFence.include(
fetchSuccessors(result, PathUtils.extend(path, name), obj));
}
return aFence.ready();
}));
}
Set<String> nextIndices = getNextIndices(path);
if (!nextIndices.isEmpty()) {
fence.include(cur.fetchElements().thenCompose(elems -> {
AsyncFence eFence = new AsyncFence();
for (Map.Entry<String, ? extends TargetObject> ent : elems.entrySet()) {
TargetObject obj = ent.getValue();
String index = ent.getKey();
if (!anyMatches(nextIndices, index)) {
continue;
}
eFence.include(
fetchSuccessors(result, PathUtils.index(path, index), obj));
}
return eFence.ready();
}));
}
}
return fence.ready();
}
/**
* Substitute wildcards from left to right for the given list of indices
*
* <p>
* Takes each pattern and substitutes its wildcards for the given indices, starting from the
* left and working right. This object is unmodified, and the result is returned.
*
* <p>
* If there are fewer wildcards in a pattern than given, only the left-most indices are taken.
* If there are fewer indices than wildcards in a pattern, then the right-most wildcards are
* left in the resulting pattern. Note while rare, attribute wildcards are substituted, too.
*
* @param indices the indices to substitute
* @return the pattern or matcher with the applied substitutions
*/
PathPredicates applyKeys(List<String> indices);
default PathPredicates applyIndices(String... indices) {
return applyKeys(List.of(indices));
}
/**
* Test if any patterns are contained here
*
* <p>
* Note that the presence of a pattern does not guarantee the presence of a matching object.
* However, the absence of any pattern does guarantee no object can match.
*
* @return
*/
boolean isEmpty();
}
| apache-2.0 |
kingston-csj/jforgame | jforgame-server/src/main/java/jforgame/server/cross/core/client/CrossTransportManager.java | 4598 | package jforgame.server.cross.core.client;
import jforgame.common.thread.NamedThreadFactory;
import jforgame.server.cross.core.callback.CallBackService;
import jforgame.server.cross.core.callback.CallTimeoutException;
import jforgame.server.cross.core.callback.G2FCallBack;
import jforgame.server.cross.core.callback.RequestCallback;
import jforgame.server.cross.core.callback.RequestResponseFuture;
import jforgame.socket.HostAndPort;
import jforgame.socket.message.Message;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class CrossTransportManager {
private static volatile CrossTransportManager instance;
private int defaultCoreSum = Runtime.getRuntime().availableProcessors();
private ExecutorService[] services;
private C2SSessionPoolFactory sessionFactory;
private AtomicInteger idFactory = new AtomicInteger();
public static CrossTransportManager getInstance() {
if (instance != null) {
return instance;
}
synchronized (CrossTransportManager.class) {
if (instance == null) {
CrossTransportManager obj = new CrossTransportManager();
obj.init();
instance = obj;
}
}
return instance;
}
private void init() {
services = new ExecutorService[defaultCoreSum];
for (int i = 0; i < defaultCoreSum; i++) {
services[i] = Executors.newSingleThreadExecutor(new NamedThreadFactory("cross-ladder-transport" + i));
}
GenericObjectPoolConfig config = new GenericObjectPoolConfig();
config.setMaxTotal(5);
config.setMaxWaitMillis(5000);
sessionFactory = new C2SSessionPoolFactory(config);
}
/**
* 同步发消息
*
* @param ip
* @param port
* @param message
*/
public void sendMessage(String ip, int port, Message message) {
CCSession session = sessionFactory.borrowSession(ip, port);
session.sendMessage(message);
}
/**
* 异步发消息
*
* @param ip
* @param port
* @param message
*/
public void sendMessageAsync(String ip, int port, Message message) {
String key = ip + port;
int index = key.hashCode() % defaultCoreSum;
services[index].submit(() -> {
sendMessage(ip, port, message);
});
}
/**
* 发送消息并返回执行结果(类似rpc消息返回值)
*
* @param addr
* @param request
* @return
*/
public Message request(HostAndPort addr, G2FCallBack request) throws InterruptedException, CallTimeoutException {
int timeout = 5000;
int index = request.getIndex();
request.serialize();
CCSession session = C2SSessionPoolFactory.getInstance().borrowSession(addr.getHost(), addr.getPort());
session.sendMessage(request);
final RequestResponseFuture future = new RequestResponseFuture(index, timeout,null);
try {
CallBackService.getInstance().register(index, future);
Message responseMessage = future.waitResponseMessage(timeout);
if (responseMessage == null) {
CallTimeoutException exception = new CallTimeoutException("send request message failed");
future.setCause(exception);
throw exception;
} else {
return responseMessage;
}
} catch (InterruptedException e) {
future.setCause(e);
throw e;
} finally {
CallBackService.getInstance().remove(index);
C2SSessionPoolFactory.getInstance().returnSession(session);
}
}
/**
* 发送消息并注册回调任务
*
* @param addr
* @param request
* @return
*/
public void request(HostAndPort addr, G2FCallBack request, RequestCallback callBack) {
CCSession session = C2SSessionPoolFactory.getInstance().borrowSession(addr.getHost(), addr.getPort());
request.serialize();
int timeout = 5000;
int index = request.getIndex();
final RequestResponseFuture requestResponseFuture = new RequestResponseFuture(index, timeout, callBack);
CallBackService.getInstance().register(index, requestResponseFuture);
session.sendMessage(request, ()->{
C2SSessionPoolFactory.getInstance().returnSession(session);
});
}
}
| apache-2.0 |
StartFlow/easeweather | app/src/main/java/com/example/easeweather/gson/Suggestion.java | 571 | package com.example.easeweather.gson;
import com.google.gson.annotations.SerializedName;
/**
* Created by ZS_PC on 2017/10/12.
*/
public class Suggestion {
@SerializedName("comf")
public Comfort comfort;
@SerializedName("cw")
public CarWash carWash;
public Sport sport;
public class Comfort{
@SerializedName("txt")
public String info;
}
public class CarWash{
@SerializedName("txt")
public String info;
}
public class Sport{
@SerializedName("txt")
public String info;
}
}
| apache-2.0 |
ZhouKanZ/SweepRobot | app/src/main/java/com/gps/sweeprobot/TestActivity.java | 5012 | package com.gps.sweeprobot;
import android.app.Activity;
import android.os.Bundle;
import android.service.carrier.CarrierIdentifier;
import android.support.annotation.IdRes;
import android.support.annotation.Nullable;
import android.view.View;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.TextView;
import com.gps.ros.android.RosApplication;
import com.gps.ros.android.RosService;
import com.gps.ros.android.TranslationManager;
import com.gps.ros.message.Message;
import com.gps.ros.rosbridge.operation.Advertise;
import com.gps.ros.rosbridge.operation.Publish;
import com.gps.ros.rosbridge.operation.Subscribe;
import com.gps.ros.rosbridge.rosWebsocketHelper.TopicsManager;
import com.gps.sweeprobot.utils.RosProtrocol;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* @Author : zhoukan
* @CreateDate : 2017/7/17 0017
* @Descriptiong : xxx
*/
public class TestActivity extends Activity implements RadioGroup.OnCheckedChangeListener {
@BindView(R.id.connect)
Button connect;
@BindView(R.id.unconnect)
Button unconnect;
@BindView(R.id.rb_speed_adv)
RadioButton rbSpeedAdv;
@BindView(R.id.rb_position_adv)
RadioButton rbPositionAdv;
@BindView(R.id.rg_advertise)
RadioGroup rgAdvertise;
@BindView(R.id.btn_commit_adv)
Button btnCommitAdv;
@BindView(R.id.rb_speed_pub)
RadioButton rbSpeedPub;
@BindView(R.id.rb_position_pub)
RadioButton rbPositionPub;
@BindView(R.id.rg_publish)
RadioGroup rgPublish;
@BindView(R.id.btn_commit_publish)
Button btnCommitPublish;
@BindView(R.id.rb_speed_sub)
RadioButton rbSpeedSub;
@BindView(R.id.rb_position_sub)
RadioButton rbPositionSub;
@BindView(R.id.rg_subscribe)
RadioGroup rgSubscribe;
@BindView(R.id.btn_commit_sub)
Button btnCommitSub;
@BindView(R.id.screen)
TextView screen;
Advertise advertise = new Advertise();
Publish publish = new Publish();
Subscribe subscribe = new Subscribe();
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
ButterKnife.bind(this);
rgAdvertise.setOnCheckedChangeListener(this);
rgPublish.setOnCheckedChangeListener(this);
rgSubscribe.setOnCheckedChangeListener(this);
advertise.id = "zktest";
advertise.topic = RosProtrocol.Speed.TOPIC;
advertise.type = RosProtrocol.Speed.TYPE;
publish.id = "zktest";
publish.topic = RosProtrocol.Speed.TOPIC;
publish.msg = new Message() {
@Override
public void print() {
super.print();
}
};
subscribe.topic = RosProtrocol.Speed.TOPIC;
subscribe.type = RosProtrocol.Speed.TYPE;
}
@OnClick({R.id.connect, R.id.unconnect, R.id.btn_commit_adv, R.id.btn_commit_publish, R.id.btn_commit_sub})
public void onViewClicked(View view) {
switch (view.getId()) {
case R.id.connect:
Bundle bundle = new Bundle();
bundle.putString(RosService.ROS_URI_KEY, "ws://192.168.2.142:9090");
((MainApplication) getApplication()).startService(getApplication(), bundle, RosService.class);
break;
case R.id.unconnect:
((MainApplication) getApplication()).stopService();
break;
case R.id.btn_commit_adv:
TranslationManager.advertiseTopic(advertise);
break;
case R.id.btn_commit_publish:
TranslationManager.publish(publish);
break;
case R.id.btn_commit_sub:
TranslationManager.subscribe(subscribe);
break;
}
}
@Override
public void onCheckedChanged(RadioGroup radioGroup, @IdRes int i) {
switch (i) {
case R.id.rb_position_adv:
advertise.topic = RosProtrocol.Position.TOPIC;
advertise.type = RosProtrocol.Position.TYPE;
break;
case R.id.rb_position_pub:
publish.topic = RosProtrocol.Position.TOPIC;
break;
case R.id.rb_position_sub:
subscribe.topic = RosProtrocol.Position.TOPIC;
subscribe.type = RosProtrocol.Position.TYPE;
break;
case R.id.rb_speed_adv:
advertise.topic = RosProtrocol.Speed.TOPIC;
advertise.type = RosProtrocol.Speed.TYPE;
break;
case R.id.rb_speed_pub:
publish.topic = RosProtrocol.Speed.TOPIC;
break;
case R.id.rb_speed_sub:
subscribe.topic = RosProtrocol.Speed.TOPIC;
subscribe.type = RosProtrocol.Speed.TYPE;
break;
}
}
}
| apache-2.0 |
amsa-code/risky | risky-util/src/main/java/au/gov/amsa/util/Pair.java | 757 | package au.gov.amsa.util;
public class Pair<T, S> {
private final T a;
private final S b;
public Pair(T a, S b) {
this.a = a;
this.b = b;
}
public static <T, S> Pair<T, S> create(T t, S s) {
return new Pair<T, S>(t, s);
}
public T a() {
return a;
}
public S b() {
return b;
}
public T left() {
return a;
}
public S right() {
return b;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("Pair [left=");
builder.append(a);
builder.append(", right=");
builder.append(b);
builder.append("]");
return builder.toString();
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-inspector/src/main/java/com/amazonaws/services/inspector/model/AgentsAlreadyRunningAssessmentException.java | 5478 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.inspector.model;
import javax.annotation.Generated;
/**
* <p>
* You started an assessment run, but one of the instances is already participating in another assessment run.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AgentsAlreadyRunningAssessmentException extends com.amazonaws.services.inspector.model.AmazonInspectorException {
private static final long serialVersionUID = 1L;
/** <p/> */
private java.util.List<AgentAlreadyRunningAssessment> agents;
/** <p/> */
private Boolean agentsTruncated;
/**
* <p>
* You can immediately retry your request.
* </p>
*/
private Boolean canRetry;
/**
* Constructs a new AgentsAlreadyRunningAssessmentException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public AgentsAlreadyRunningAssessmentException(String message) {
super(message);
}
/**
* <p/>
*
* @return
*/
@com.fasterxml.jackson.annotation.JsonProperty("agents")
public java.util.List<AgentAlreadyRunningAssessment> getAgents() {
return agents;
}
/**
* <p/>
*
* @param agents
*/
@com.fasterxml.jackson.annotation.JsonProperty("agents")
public void setAgents(java.util.Collection<AgentAlreadyRunningAssessment> agents) {
if (agents == null) {
this.agents = null;
return;
}
this.agents = new java.util.ArrayList<AgentAlreadyRunningAssessment>(agents);
}
/**
* <p/>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAgents(java.util.Collection)} or {@link #withAgents(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param agents
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AgentsAlreadyRunningAssessmentException withAgents(AgentAlreadyRunningAssessment... agents) {
if (this.agents == null) {
setAgents(new java.util.ArrayList<AgentAlreadyRunningAssessment>(agents.length));
}
for (AgentAlreadyRunningAssessment ele : agents) {
this.agents.add(ele);
}
return this;
}
/**
* <p/>
*
* @param agents
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AgentsAlreadyRunningAssessmentException withAgents(java.util.Collection<AgentAlreadyRunningAssessment> agents) {
setAgents(agents);
return this;
}
/**
* <p/>
*
* @param agentsTruncated
*/
@com.fasterxml.jackson.annotation.JsonProperty("agentsTruncated")
public void setAgentsTruncated(Boolean agentsTruncated) {
this.agentsTruncated = agentsTruncated;
}
/**
* <p/>
*
* @return
*/
@com.fasterxml.jackson.annotation.JsonProperty("agentsTruncated")
public Boolean getAgentsTruncated() {
return this.agentsTruncated;
}
/**
* <p/>
*
* @param agentsTruncated
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AgentsAlreadyRunningAssessmentException withAgentsTruncated(Boolean agentsTruncated) {
setAgentsTruncated(agentsTruncated);
return this;
}
/**
* <p/>
*
* @return
*/
public Boolean isAgentsTruncated() {
return this.agentsTruncated;
}
/**
* <p>
* You can immediately retry your request.
* </p>
*
* @param canRetry
* You can immediately retry your request.
*/
@com.fasterxml.jackson.annotation.JsonProperty("canRetry")
public void setCanRetry(Boolean canRetry) {
this.canRetry = canRetry;
}
/**
* <p>
* You can immediately retry your request.
* </p>
*
* @return You can immediately retry your request.
*/
@com.fasterxml.jackson.annotation.JsonProperty("canRetry")
public Boolean getCanRetry() {
return this.canRetry;
}
/**
* <p>
* You can immediately retry your request.
* </p>
*
* @param canRetry
* You can immediately retry your request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AgentsAlreadyRunningAssessmentException withCanRetry(Boolean canRetry) {
setCanRetry(canRetry);
return this;
}
/**
* <p>
* You can immediately retry your request.
* </p>
*
* @return You can immediately retry your request.
*/
public Boolean isCanRetry() {
return this.canRetry;
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-lexmodelbuilding/src/main/java/com/amazonaws/services/lexmodelbuilding/model/transform/GetIntentResultJsonUnmarshaller.java | 6351 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lexmodelbuilding.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.lexmodelbuilding.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* GetIntentResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetIntentResultJsonUnmarshaller implements Unmarshaller<GetIntentResult, JsonUnmarshallerContext> {
public GetIntentResult unmarshall(JsonUnmarshallerContext context) throws Exception {
GetIntentResult getIntentResult = new GetIntentResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return getIntentResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("name", targetDepth)) {
context.nextToken();
getIntentResult.setName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("description", targetDepth)) {
context.nextToken();
getIntentResult.setDescription(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("slots", targetDepth)) {
context.nextToken();
getIntentResult.setSlots(new ListUnmarshaller<Slot>(SlotJsonUnmarshaller.getInstance()).unmarshall(context));
}
if (context.testExpression("sampleUtterances", targetDepth)) {
context.nextToken();
getIntentResult.setSampleUtterances(new ListUnmarshaller<String>(context.getUnmarshaller(String.class)).unmarshall(context));
}
if (context.testExpression("confirmationPrompt", targetDepth)) {
context.nextToken();
getIntentResult.setConfirmationPrompt(PromptJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("rejectionStatement", targetDepth)) {
context.nextToken();
getIntentResult.setRejectionStatement(StatementJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("followUpPrompt", targetDepth)) {
context.nextToken();
getIntentResult.setFollowUpPrompt(FollowUpPromptJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("conclusionStatement", targetDepth)) {
context.nextToken();
getIntentResult.setConclusionStatement(StatementJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("dialogCodeHook", targetDepth)) {
context.nextToken();
getIntentResult.setDialogCodeHook(CodeHookJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("fulfillmentActivity", targetDepth)) {
context.nextToken();
getIntentResult.setFulfillmentActivity(FulfillmentActivityJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("parentIntentSignature", targetDepth)) {
context.nextToken();
getIntentResult.setParentIntentSignature(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("lastUpdatedDate", targetDepth)) {
context.nextToken();
getIntentResult.setLastUpdatedDate(context.getUnmarshaller(java.util.Date.class).unmarshall(context));
}
if (context.testExpression("createdDate", targetDepth)) {
context.nextToken();
getIntentResult.setCreatedDate(context.getUnmarshaller(java.util.Date.class).unmarshall(context));
}
if (context.testExpression("version", targetDepth)) {
context.nextToken();
getIntentResult.setVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("checksum", targetDepth)) {
context.nextToken();
getIntentResult.setChecksum(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return getIntentResult;
}
private static GetIntentResultJsonUnmarshaller instance;
public static GetIntentResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new GetIntentResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
jexp/idea2 | java/java-impl/src/com/intellij/codeInsight/completion/StaticMembersWeigher.java | 2152 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.psi.*;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
/**
* @author peter
*/
public class StaticMembersWeigher extends CompletionWeigher {
public Comparable weigh(@NotNull LookupElement element, CompletionLocation loc) {
if (loc.getCompletionType() != CompletionType.BASIC) return 0;
final PsiElement position = loc.getCompletionParameters().getPosition();
if (!position.isValid()) return 0;
if (PsiTreeUtil.getParentOfType(position, PsiDocComment.class) != null) return 0;
if (position.getParent() instanceof PsiReferenceExpression) {
final PsiReferenceExpression refExpr = (PsiReferenceExpression)position.getParent();
final PsiElement qualifier = refExpr.getQualifier();
if (qualifier == null) {
return 0;
}
if (!(qualifier instanceof PsiJavaCodeReferenceElement) || !(((PsiJavaCodeReferenceElement)qualifier).resolve() instanceof PsiClass)) {
return 0;
}
}
final Object o = element.getObject();
if (!(o instanceof PsiMember)) return 0;
if (((PsiMember)o).hasModifierProperty(PsiModifier.STATIC)) {
if (o instanceof PsiMethod) return 5;
if (o instanceof PsiField) return 4;
}
if (o instanceof PsiClass && ((PsiClass) o).getContainingClass() != null) {
return 3;
}
//instance method or field
return 5;
}
}
| apache-2.0 |
google/intellij-protocol-buffer-editor | core/src/main/java/com/google/devtools/intellij/protoeditor/lang/annotation/OptionOccurrenceTracker.java | 19115 | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.intellij.protoeditor.lang.annotation;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.devtools.intellij.protoeditor.lang.PbLangBundle;
import com.google.devtools.intellij.protoeditor.lang.psi.PbAggregateValue;
import com.google.devtools.intellij.protoeditor.lang.psi.PbField;
import com.google.devtools.intellij.protoeditor.lang.psi.PbFile;
import com.google.devtools.intellij.protoeditor.lang.psi.PbMessageType;
import com.google.devtools.intellij.protoeditor.lang.psi.PbNamedTypeElement;
import com.google.devtools.intellij.protoeditor.lang.psi.PbOneofDefinition;
import com.google.devtools.intellij.protoeditor.lang.psi.PbOptionExpression;
import com.google.devtools.intellij.protoeditor.lang.psi.PbOptionName;
import com.google.devtools.intellij.protoeditor.lang.psi.PbOptionOwner;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextElement;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextExtensionName;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextField;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextFieldName;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextMessage;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTextRootMessage;
import com.google.devtools.intellij.protoeditor.lang.psi.PbTypeName;
import com.google.devtools.intellij.protoeditor.lang.psi.util.PbPsiImplUtil;
import com.google.devtools.intellij.protoeditor.lang.psi.util.PbPsiUtil;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.CachedValueProvider.Result;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.util.PsiTreeUtil;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* A helper class to track option occurrences within some scope and annotate the following problems:
*
* <ul>
* <li>A non-repeated option specified multiple times
* <li>Multiple members of a oneof specified
* <li>A missing required field
* </ul>
*/
public class OptionOccurrenceTracker {
private final Occurrence root = new Occurrence(null, null, null);
private final Multimap<PsiElement, Occurrence> elementOccurrences = ArrayListMultimap.create();
private OptionOccurrenceTracker() {}
/**
* Returns a tracker containing occurrences for all descendants of the given {@link PbOptionOwner
* owner}.
*
* @param owner the owner
* @return the tracker
*/
public static OptionOccurrenceTracker forOptionOwner(PbOptionOwner owner) {
return CachedValuesManager.getCachedValue(
owner,
() -> {
OptionOccurrenceTracker tracker = new OptionOccurrenceTracker();
tracker.addAllOccurrences(owner);
return Result.create(tracker, PsiModificationTracker.MODIFICATION_COUNT);
});
}
/**
* Returns the appropriate tracker to use for the given {@link PbTextMessage message}.
*
* <p>The tracker will be rooted at one of the following:
*
* <ul>
* <li>The closest descendant Any value if the given message is within an embedded Any
* <li>The top-level message if the file is standalone
* <li>The host {@link PbOptionOwner} if the file is injected
* </ul>
*
* <p>The tracker is populated with all descendants of the determined root, except for those under
* embedded Any values.
*
* @param message the message
* @return the tracker
*/
@Nullable
public static OptionOccurrenceTracker forMessage(PbTextMessage message) {
return CachedValuesManager.getCachedValue(
message,
() -> Result.create(computeForMessage(message), PsiModificationTracker.MODIFICATION_COUNT));
}
private static OptionOccurrenceTracker computeForMessage(PbTextMessage message) {
if (message instanceof PbTextRootMessage) {
if (message.getContainingFile() instanceof PbFile) {
PbOptionOwner owner = PbPsiImplUtil.getOptionOwner(message);
if (owner == null) {
return null;
}
return OptionOccurrenceTracker.forOptionOwner(owner);
}
OptionOccurrenceTracker tracker = new OptionOccurrenceTracker();
tracker.addAllOccurrences(message);
return tracker;
}
// Any messages are themselves roots.
if (isAnyBody(message)) {
OptionOccurrenceTracker tracker = new OptionOccurrenceTracker();
tracker.addAllOccurrences(message);
return tracker;
}
PbTextMessage parent = PsiTreeUtil.getParentOfType(message, PbTextMessage.class);
if (parent == null) {
return null;
}
return forMessage(parent);
}
private static boolean isAnyBody(PbTextMessage message) {
PbTextField parentField = PsiTreeUtil.getParentOfType(message, PbTextField.class);
if (parentField == null) {
return false;
}
PbTextExtensionName extensionName = parentField.getFieldName().getExtensionName();
return extensionName != null && extensionName.isAnyTypeUrl();
}
/** Returns the root occurrence. */
@NotNull
public Occurrence getRootOccurrence() {
return root;
}
/** Return the occurrence for the given {@link PbOptionName}. */
@Nullable
public Occurrence getOccurrence(PbOptionName name) {
return elementOccurrences.get(name).stream().findFirst().orElse(null);
}
/** Return the occurrences for the given {@link PbTextFieldName}. */
@NotNull
public Collection<Occurrence> getOccurrences(PbTextFieldName name) {
return elementOccurrences.get(name);
}
/** Return the occurrence containing the fields within the given {@link PbTextMessage}. */
@Nullable
public Occurrence getOccurrence(PbTextMessage message) {
return elementOccurrences.get(message).stream().findFirst().orElse(null);
}
private void addAllOccurrences(PbOptionOwner optionOwner) {
for (PbOptionExpression option : optionOwner.getOptions()) {
PbOptionName name = option.getOptionName();
Occurrence occurrence = addName(name);
if (occurrence == null) {
continue;
}
PbAggregateValue aggregateValue = option.getAggregateValue();
if (aggregateValue != null) {
addTextMessage(aggregateValue, occurrence);
}
}
}
private void addAllOccurrences(PbTextMessage message) {
addTextMessage(message, root);
}
private Occurrence addName(PbOptionName name) {
return addName(name, false);
}
private Occurrence addName(PbOptionName name, boolean merge) {
PbOptionName qualifier = name.getQualifier();
Occurrence occurrence;
if (qualifier != null) {
occurrence = addName(qualifier, true);
if (occurrence == null) {
return null;
}
} else {
occurrence = root;
}
PbField field = resolveField(name);
if (field == null) {
return null;
}
// Qualifier names (e.g., "foo" and "bar" in "foo.bar.baz") are merged into the occurrence
// (added only if they have not previously been added).
//
// This is because multiple qualified statements are merged together. e.g.,
// option (foo).bar.x = 1;
// option (foo).bar.y = 2;
// option (foo).baz = 3;
//
// In this example, messages are implicitly created for (foo) and (foo).bar, and the values
// x, y, and baz are set appropriately.
Occurrence nextOccurrence;
if (merge) {
nextOccurrence = occurrence.mergeOccurrence(field);
} else {
nextOccurrence = occurrence.addOccurrence(field);
}
elementOccurrences.put(name, nextOccurrence);
return nextOccurrence;
}
// Register fields defined in an aggregate value.
//
// Note that the only way a sibling option statement can affect annotations within a text format
// value is if it fulfills a required field. For example:
//
// option (opt) = {
// foo: 1
// };
// option (opt).foo = 2;
//
// In this example, the duplicate annotation would be on (opt).foo, as it is the second
// definition. And, another example:
//
// option (opt).foo = 2;
// option (opt) = {
// foo: 1
// };
//
// In this example, the annotation is placed on the second (opt), as the entire opt field
// re-definition is a duplicate and this invalid.
//
// Finally:
//
// option (opt) = {
// foo: {
// bar: 1
// }
// };
// option (opt).foo.required = 2;
//
// In this example, the second option statement prevents a missing required field annotation from
// being attached to "foo"
private void addTextMessage(PbTextMessage message, Occurrence occurrence) {
elementOccurrences.put(message, occurrence);
for (PbTextField field : message.getFields()) {
PbTextFieldName fieldName = field.getFieldName();
PbTextExtensionName extensionName = fieldName.getExtensionName();
if (extensionName != null && extensionName.isAnyTypeUrl()) {
// Any fields are handled specially. Instead of adding a single field occurrence, we add
// occurrences for each of the type_url and value members of the Any type. And we don't
// recurse into the descendants of the Any.
AnyType type = AnyType.forElement(message.getDeclaredMessage());
if (type != null) {
elementOccurrences.put(fieldName, occurrence.addOccurrence(type.getTypeUrlField()));
elementOccurrences.put(fieldName, occurrence.addOccurrence(type.getValueField()));
}
} else {
PbField declaredField = fieldName.getDeclaredField();
if (declaredField == null) {
continue;
}
for (PbTextElement element : field.getValues()) {
Occurrence nextOccurrence = occurrence.addOccurrence(declaredField);
elementOccurrences.put(fieldName, nextOccurrence);
if (element instanceof PbTextMessage) {
addTextMessage((PbTextMessage) element, nextOccurrence);
}
}
}
}
}
private static PbField resolveField(PbOptionName name) {
return PbPsiUtil.resolveRefToType(name.getEffectiveReference(), PbField.class);
}
private static PbMessageType getFieldType(PbField field) {
PbTypeName typeName = field.getTypeName();
if (typeName == null) {
return null;
}
return PbPsiUtil.resolveRefToType(typeName.getEffectiveReference(), PbMessageType.class);
}
/**
* An Occurrence represents an instance of a message in an option setting context. An Occurrence
* maintains a list of option fields and oneof usage that occur within it.
*
* <p>Scopes are necessary to support repeated and recursive option scenarios. For example:
*
* <pre>
* option (repeated) = {
* foo: abc
* };
* option (repeated) = {
* foo: abc
* };
* </pre>
*
* In this example, the field "foo" is specified multiple times, but within distinct occurrences.
* This usage is allowed. And a recursive example:
*
* <pre>
* option (opt) = {
* value: 1
* recurse {
* value: 1
* recurse {
* value: 1
* }
* }
* };
* </pre>
*
* Each "recurse" instance corresponds to the same declared field, but defines a unique
* occurrence. Thus, "value" can be set multiple times.
*/
public class Occurrence {
private final Multimap<PbField, Occurrence> registeredFields = ArrayListMultimap.create();
private final Map<PbOneofDefinition, PbField> registeredOneofFields = new HashMap<>();
private final PbField field;
private final PsiElement annotationElement;
private final Occurrence parent;
private Occurrence(PbField field, PsiElement annotationElement, Occurrence parent) {
this.field = field;
this.annotationElement = annotationElement;
this.parent = parent;
}
/**
* Attach errors with this occurrence to the given holder and element.
*
* <p>The following cases are annotated:
*
* <ul>
* <li>non-repeated fields specified multiple times
* <li>multiple members of a oneof specified
* <li>missing required fields
* <li>repeated message fields initialized without an aggregate value
* </ul>
*
* @param holder the {@link AnnotationHolder}
* @param annotationElement the element to which error annotations should be attached
*/
public void annotate(AnnotationHolder holder, PsiElement annotationElement) {
// The Multimap used to store occurrences for each field maintains insertion order. If
// this occurrence is not the first in the list, then it's a duplicate. Only duplicates are
// annotated.
Occurrence first = parent.firstOccurrence(field);
if (first != null && !first.equals(this) && !field.isRepeated()) {
// Already set.
holder.createErrorAnnotation(
annotationElement,
PbLangBundle.message("non.repeated.field.specified.multiple.times", field.getName()));
} else {
PbOneofDefinition oneof = field.getOneof();
PbField previousOneofField = oneof != null ? parent.registeredOneofFields.get(oneof) : null;
if (previousOneofField != null && !previousOneofField.equals(field)) {
// Another field in the same oneof was already set.
holder.createErrorAnnotation(
annotationElement,
PbLangBundle.message(
"multiple.oneof.fields.specified",
field.getName(),
previousOneofField.getName(),
oneof.getName()));
}
}
// Annotate missing required fields.
annotateMissingRequiredFields(holder, annotationElement);
}
/**
* Returns <code>true</code> if the given field can be used again within the scope of this
* occurrence.
*
* @param field the field to test
* @return <code>true</code> if the field can be used.
*/
public boolean canFieldBeUsed(PbField field) {
PbOneofDefinition oneof = field.getOneof();
if (oneof != null && registeredOneofFields.containsKey(oneof)) {
return false;
}
if (registeredFields.containsKey(field) && !field.isRepeated()) {
return false;
}
return true;
}
/**
* Returns <code>true</code> if the given field can be used or merged into an existing usage
* within the scope of this occurrence.
*
* @param field the field to test
* @return <code>true</code> if the field can be used or merged.
*/
public boolean canFieldBeUsedOrMerged(PbField field) {
PbOneofDefinition oneof = field.getOneof();
if (oneof != null) {
// The field can be merged if its the already-specified oneof field.
PbField oneofField = registeredOneofFields.get(oneof);
if (oneofField != null && !field.equals(oneofField)) {
return false;
}
}
if (registeredFields.containsKey(field) && !field.isRepeated()) {
// We always return true for message fields, since they might be used as a namespace rather
// than a type.
PbTypeName typeName = field.getTypeName();
if (typeName == null) {
return false;
}
PbNamedTypeElement namedTypeElement =
PbPsiUtil.resolveRefToType(typeName.getEffectiveReference(), PbNamedTypeElement.class);
return namedTypeElement instanceof PbMessageType;
}
return true;
}
/**
* Registers usage of the given field within this occurrence.
*
* <p>Annotations will be created at this time if:
*
* <ul>
* <li>the field is non-repeated and has already been used
* <li> the field is part of a oneof definition and another field from the same oneof has
* already been used
* </ul>
*
* @param field the field to register
* @return the newly-created {@link Occurrence} instance.
*/
private Occurrence addOccurrence(PbField field) {
PbOneofDefinition oneof = field.getOneof();
if (oneof != null && !registeredOneofFields.containsKey(oneof)) {
registeredOneofFields.put(oneof, field);
}
Occurrence occurrence = new Occurrence(field, annotationElement, this);
registeredFields.put(field, occurrence);
return occurrence;
}
/**
* Merges usage of this field with any pre-existing occurrence.
*
* <p>If an occurrence already exists for the given field, it is returned. Else, a new
* occurrence is created via {@link #addOccurrence(PbField)}.
*
* @param field the field to merge or register
* @return the resulting occurrence, either already-existing or newly-created
*/
private Occurrence mergeOccurrence(PbField field) {
Occurrence occurrence = firstOccurrence(field);
if (occurrence != null) {
// We already have a occurrence for this field. Reuse it.
return occurrence;
} else {
// Create a new occurrence.
return addOccurrence(field);
}
}
private void annotateMissingRequiredFields(
AnnotationHolder holder, PsiElement annotationElement) {
PbMessageType message = getFieldType(field);
if (message == null) {
return;
}
// Iterate through the message type's fields looking for required fields. If a required field
// is not present in the occurrence's registeredFields collection, record it for an error
// annotation.
List<String> missingFieldNames =
message
.getSymbols(PbField.class)
.stream()
.filter(f -> f.isRequired() && !registeredFields.containsKey(f))
.map(PbField::getName)
.collect(Collectors.toList());
if (!missingFieldNames.isEmpty()) {
// Required field is missing.
holder.createErrorAnnotation(
annotationElement,
PbLangBundle.message(
"missing.required.fields",
message.getName(),
String.join(", ", missingFieldNames)));
}
}
private Occurrence firstOccurrence(PbField field) {
return registeredFields.get(field).stream().findFirst().orElse(null);
}
}
}
| apache-2.0 |
z123/datacollector | spark-executor-protolib/src/main/java/com/streamsets/datacollector/pipeline/executor/spark/ClusterManagerChooserValues.java | 896 | /*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.pipeline.executor.spark;
import com.streamsets.pipeline.api.base.BaseEnumChooserValues;
public class ClusterManagerChooserValues extends BaseEnumChooserValues<ClusterManager> {
public ClusterManagerChooserValues() {
super(ClusterManager.class);
}
}
| apache-2.0 |
topoos/topoos_sdk_android | src/topoos/APIAccess/Results/SocialRelationshipCounterResult.java | 3213 | /**
* Copyright 2014-present topoos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package topoos.APIAccess.Results;
import org.json.JSONObject;
import org.json.JSONTokener;
import android.util.Log;
import topoos.Constants;
import topoos.Messages;
import topoos.Exception.TopoosException;
import topoos.Objects.*;
/**
* The Class SocialRelationshipResult.
*
* @see APICallResult
* @author topoos
*/
public class SocialRelationshipCounterResult extends APICallResult {
/** The Relationship counter. */
private RelationshipCounter counters = null;
/**
* Instantiates a new Relationship result.
*/
public SocialRelationshipCounterResult() {
super();
}
/**
* Instantiates a new Relationship result.
*
* @param error
* the error
* @param result
* the result
*/
public SocialRelationshipCounterResult(String error, String result) {
super(error, result);
}
/**
* Instantiates a new pOI result.
*
* @param error
* the error
* @param result
* the result
* @param counter
* the counters
*/
public SocialRelationshipCounterResult(String error, String result, RelationshipCounter counters) {
super(error, result);
this.counters = counters;
}
/*
* (non-Javadoc)
*
* @see topoos.APIAccess.Results.APICallResult#setParameters()
*/
@Override
public void setParameters() throws TopoosException {
String user_id = null;
Integer following = 0;
Integer followedBy = 0;
if (APIUtils.getcorrectJSONstring(Result) != null) {
try {
JSONObject jObject = (JSONObject) new JSONTokener(
APIUtils.getcorrectJSONstring(Result)).nextValue();
JSONObject jCounter = jObject.getJSONObject("counters");
// Extracting content
user_id = APIUtils.getStringorNull(jCounter, "user_id");
following = APIUtils.getIntegerorNull(jCounter, "following");
followedBy = APIUtils.getIntegerorNull(jCounter, "followed_by");
this.counters = new RelationshipCounter(user_id, following, followedBy);
} catch (Exception e) {
if (Constants.DEBUG) {
e.printStackTrace();
}
throw new TopoosException(TopoosException.ERROR_PARSE);
}
} else {
if (Constants.DEBUG) {
Log.i(Constants.TAG, Messages.TOPOOS_NORESULT);
}
}
}
/**
* Gets the Relationship counter.
*
* @return the Relationship counter
*/
public RelationshipCounter getRelationshipCounters() {
return counters;
}
/**
* Sets the Relationship counter.
*
* @param counters
* the Relationship counter to set
*/
public void setRelationshipCounters(RelationshipCounter counters) {
this.counters = counters;
}
} | apache-2.0 |
baquiax/Compiler | cc4/compiler/ast/HexLiteral.java | 375 | package compiler.ast;
public class HexLiteral extends Node {
private String value;
public HexLiteral(String v) {
this.value = v;
}
public String getType() {
return "hex";
}
public void print() {
this.print("");
}
public void print(String padding) {
System.out.println(padding + "Hexadecimal Value: " + this.value);
}
} | apache-2.0 |
ok2c/httpclient | httpclient5-cache/src/main/java/org/apache/hc/client5/http/impl/cache/FileResource.java | 3665 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.impl.cache;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hc.client5.http.cache.Resource;
import org.apache.hc.client5.http.cache.ResourceIOException;
import org.apache.hc.core5.annotation.Contract;
import org.apache.hc.core5.annotation.ThreadingBehavior;
import org.apache.hc.core5.util.Args;
import org.apache.hc.core5.util.ByteArrayBuffer;
/**
* Cache resource backed by a file.
*
* @since 4.1
*/
@Contract(threading = ThreadingBehavior.SAFE)
public class FileResource extends Resource {
private static final long serialVersionUID = 4132244415919043397L;
private final AtomicReference<File> fileRef;
private final long len;
public FileResource(final File file) {
super();
Args.notNull(file, "File");
this.fileRef = new AtomicReference<>(file);
this.len = file.length();
}
File getFile() {
return this.fileRef.get();
}
@Override
public byte[] get() throws ResourceIOException {
final File file = this.fileRef.get();
if (file == null) {
throw new ResourceIOException("Resouce already dispoased");
}
try (final InputStream in = new FileInputStream(file)) {
final ByteArrayBuffer buf = new ByteArrayBuffer(1024);
final byte[] tmp = new byte[2048];
int len;
while ((len = in.read(tmp)) != -1) {
buf.append(tmp, 0, len);
}
return buf.toByteArray();
} catch (final IOException ex) {
throw new ResourceIOException(ex.getMessage(), ex);
}
}
@Override
public InputStream getInputStream() throws ResourceIOException {
final File file = this.fileRef.get();
if (file != null) {
try {
return new FileInputStream(file);
} catch (final FileNotFoundException ex) {
throw new ResourceIOException(ex.getMessage(), ex);
}
}
throw new ResourceIOException("Resouce already dispoased");
}
@Override
public long length() {
return len;
}
@Override
public void dispose() {
final File file = this.fileRef.getAndSet(null);
if (file != null) {
file.delete();
}
}
}
| apache-2.0 |
everttigchelaar/camel-svn | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/model/complex/twoclassesandonelink/Client.java | 1953 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy.model.complex.twoclassesandonelink;
import org.apache.camel.dataformat.bindy.annotation.DataField;
import org.apache.camel.dataformat.bindy.annotation.Link;
@Link
public class Client {
@DataField(pos = 2)
private String clientNr;
@DataField(pos = 3)
private String firstName;
@DataField(pos = 4)
private String lastName;
public String getClientNr() {
return clientNr;
}
public void setClientNr(String clientNr) {
this.clientNr = clientNr;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
@Override
public String toString() {
return "Model : " + Client.class.getName() + " : " + String.valueOf(this.clientNr) + ", " + String.valueOf(this.firstName) + ", " + String.valueOf(this.lastName);
}
}
| apache-2.0 |
magenm/bsming | bsming/common/wx-lib/src/main/java/com/qding/community/common/weixin/vo/template/message/CSNotificationsChanges.java | 1868 | package com.qding.community.common.weixin.vo.template.message;
import java.io.Serializable;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.qding.community.common.weixin.vo.template.CommonTemplate;
import com.qding.community.common.weixin.vo.template.TemplateEntry;
public class CSNotificationsChanges implements Serializable {
/**
*
*/
private static final long serialVersionUID = 6641720966460803041L;
private TemplateEntry title;
private TemplateEntry service;
private TemplateEntry detail;
private TemplateEntry time;
private TemplateEntry location;
private TemplateEntry reason;
private TemplateEntry remark;
public TemplateEntry getTitle() {
return title;
}
public void setTitle(TemplateEntry title) {
this.title = title;
}
public TemplateEntry getService() {
return service;
}
public void setService(TemplateEntry service) {
this.service = service;
}
public TemplateEntry getDetail() {
return detail;
}
public void setDetail(TemplateEntry detail) {
this.detail = detail;
}
public TemplateEntry getTime() {
return time;
}
public void setTime(TemplateEntry time) {
this.time = time;
}
public TemplateEntry getLocation() {
return location;
}
public void setLocation(TemplateEntry location) {
this.location = location;
}
public TemplateEntry getReason() {
return reason;
}
public void setReason(TemplateEntry reason) {
this.reason = reason;
}
public TemplateEntry getRemark() {
return remark;
}
public void setRemark(TemplateEntry remark) {
this.remark = remark;
}
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE);
}
}
| apache-2.0 |
dorzey/assertj-core | src/test/java/org/assertj/core/internal/shorts/Shorts_assertLessThanOrEqualTo_Test.java | 3358 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2016 the original author or authors.
*/
package org.assertj.core.internal.shorts;
import static org.assertj.core.error.ShouldBeLessOrEqual.shouldBeLessOrEqual;
import static org.assertj.core.test.TestData.someInfo;
import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown;
import static org.assertj.core.util.FailureMessages.actualIsNull;
import static org.mockito.Mockito.verify;
import org.assertj.core.api.AssertionInfo;
import org.assertj.core.internal.Shorts;
import org.assertj.core.internal.ShortsBaseTest;
import org.junit.Test;
/**
* Tests for <code>{@link Shorts#assertLessThanOrEqualTo(AssertionInfo, Short, short)}</code>.
*
* @author Alex Ruiz
* @author Joel Costigliola
*/
public class Shorts_assertLessThanOrEqualTo_Test extends ShortsBaseTest {
@Test
public void should_fail_if_actual_is_null() {
thrown.expectAssertionError(actualIsNull());
shorts.assertLessThanOrEqualTo(someInfo(), null, (short) 8);
}
@Test
public void should_pass_if_actual_is_less_than_other() {
shorts.assertLessThanOrEqualTo(someInfo(), (short) 6, (short) 8);
}
@Test
public void should_pass_if_actual_is_equal_to_other() {
shorts.assertLessThanOrEqualTo(someInfo(), (short) 6, (short) 6);
}
@Test
public void should_fail_if_actual_is_greater_than_other() {
AssertionInfo info = someInfo();
try {
shorts.assertLessThanOrEqualTo(info, (short) 8, (short) 6);
} catch (AssertionError e) {
verify(failures).failure(info, shouldBeLessOrEqual((short) 8, (short) 6));
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
@Test
public void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
thrown.expectAssertionError(actualIsNull());
shortsWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(someInfo(), null, (short) 8);
}
@Test
public void should_pass_if_actual_is_less_than_other_according_to_custom_comparison_strategy() {
shortsWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(someInfo(), (short) 6, (short) -8);
}
@Test
public void should_pass_if_actual_is_equal_to_other_according_to_custom_comparison_strategy() {
shortsWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(someInfo(), (short) 6, (short) -6);
}
@Test
public void should_fail_if_actual_is_greater_than_other_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
try {
shortsWithAbsValueComparisonStrategy.assertLessThanOrEqualTo(info, (short) -8, (short) 6);
} catch (AssertionError e) {
verify(failures).failure(info, shouldBeLessOrEqual((short) -8, (short) 6, absValueComparisonStrategy));
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
}
| apache-2.0 |
sody/greatage | labs/greatage-common/src/main/java/org/greatage/util/PathUtils.java | 8144 | /*
* Copyright (c) 2008-2011 Ivan Khalopik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.greatage.util;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This interface represents utility methods for working with resource paths.
*
* @author Ivan Khalopik
* @since 1.0
*/
public abstract class PathUtils {
public static final String PATH_SEPARATOR = "/";
public static final String TYPE_SEPARATOR = ".";
public static final String LOCALE_SEPARATOR = "_";
private static final Pattern BACK_SLASH_PATTERN = Pattern.compile("\\\\");
private static final Pattern DOUBLE_SLASH_PATTERN = Pattern.compile("([^:/]/)/+");
private static final Pattern END_SLASH_PATTERN = Pattern.compile("([^:/])/$");
private static final Pattern ANT_TEXT_PATTERN = Pattern.compile("\\?|\\*");
private static final String ANT_ANY_PATH_MARKER = "**";
private static final String ANT_ANY_STRING_MARKER = "*";
private static final String ANT_ANY_CHARACTER_MARKER = "?";
/**
* Calculates resource path from given absolute location, resource name, type and locale using formula {@code
* location/name_locale.type}.
*
* @param location absolute resource location, can be {@code null}
* @param name resource name, not {@code null}
* @param type resource type, can be {@code null}
* @param locale resource locale, can be {@code null}
* @return calculated full resource path, not {@code null}
*/
public static String calculatePath(final String location, final String name, final String type, final Locale locale) {
final StringBuilder builder = new StringBuilder();
if (location != null) {
builder.append(location);
if (!location.endsWith(PATH_SEPARATOR)) {
builder.append(PATH_SEPARATOR);
}
}
builder.append(name);
if (locale != null && !StringUtils.isEmpty(locale.toString())) {
builder.append(LOCALE_SEPARATOR).append(locale);
}
if (!StringUtils.isEmpty(type)) {
builder.append(TYPE_SEPARATOR).append(type);
}
return builder.toString();
}
/**
* Normalizes given path by replacing file-dependent path separators like {@code '\'} with standard {@code '/'}. It also removes
* double slashes and ending slash.
*
* @param path path which must be normalized, not {@code null}
* @return normalized path, not {@code null}
*/
public static String normalizePath(final String path) {
if (path == null) {
return null;
}
String normalizedPath = BACK_SLASH_PATTERN.matcher(path).replaceAll(PATH_SEPARATOR);
normalizedPath = DOUBLE_SLASH_PATTERN.matcher(normalizedPath).replaceAll("$1");
return END_SLASH_PATTERN.matcher(normalizedPath).replaceAll("$1");
}
/**
* Tests whether a given path matches against ant-style pattern. The pattern may contain some special characters: <br/> {@code
* '**'} means any path <br/> {@code '*'} means zero or more characters <br/> {@code '?'} means one and only one character.
*
* @param path path which must be matched against the pattern, not {@code null}
* @param pattern ant-style pattern to match against, not {@code null}
* @return {@code true} if path matches against the pattern, {@code false} otherwise
*/
public static boolean matchAntPath(final String path, final String pattern) {
// When path starts with a /, pattern also has to start with a / and vice versa
if (path.startsWith(PATH_SEPARATOR) != pattern.startsWith(PATH_SEPARATOR)) {
return false;
}
final String[] patternDirs = pattern.split(PATH_SEPARATOR);
final String[] pathDirs = path.split(PATH_SEPARATOR);
int patternStart = 0;
int patternEnd = patternDirs.length - 1;
int pathStart = 0;
int pathEnd = pathDirs.length - 1;
// up to first '**'
while (patternStart <= patternEnd && pathStart <= pathEnd) {
if (patternDirs[patternStart].equals(ANT_ANY_PATH_MARKER)) {
break;
}
if (!matchAntPattern(pathDirs[pathStart], patternDirs[patternStart])) {
return false;
}
patternStart++;
pathStart++;
}
if (pathStart > pathEnd) {
// String is exhausted
for (int i = patternStart; i <= patternEnd; i++) {
if (!patternDirs[i].equals(ANT_ANY_PATH_MARKER)) {
return false;
}
}
return true;
} else if (patternStart > patternEnd) {
// String not exhausted, but pattern is. Failure.
return false;
}
// up to last '**'
while (patternStart <= patternEnd && pathStart <= pathEnd) {
if (patternDirs[patternEnd].equals(ANT_ANY_PATH_MARKER)) {
break;
}
if (!matchAntPattern(pathDirs[pathEnd], patternDirs[patternEnd])) {
return false;
}
patternEnd--;
pathEnd--;
}
if (pathStart > pathEnd) {
// String is exhausted
for (int i = patternStart; i <= patternEnd; i++) {
if (!patternDirs[i].equals(ANT_ANY_PATH_MARKER)) {
return false;
}
}
return true;
}
while (patternStart != patternEnd && pathStart <= pathEnd) {
int subPatternEnd = -1;
for (int i = patternStart + 1; i <= patternEnd; i++) {
if (patternDirs[i].equals(ANT_ANY_PATH_MARKER)) {
subPatternEnd = i;
break;
}
}
if (subPatternEnd == patternStart + 1) {
// '**/**' situation, so skip one
patternStart++;
continue;
}
// Find the pattern between padIdxStart & padIdxTmp in path between
// pathStart & pathEnd
final int patternLength = (subPatternEnd - patternStart - 1);
final int pathLength = (pathEnd - pathStart + 1);
int foundPosition = -1;
strLoop:
for (int i = 0; i <= pathLength - patternLength; i++) {
for (int j = 0; j < patternLength; j++) {
if (!matchAntPattern(pathDirs[pathStart + i + j], patternDirs[patternStart + j + 1])) {
continue strLoop;
}
}
foundPosition = pathStart + i;
break;
}
if (foundPosition == -1) {
return false;
}
patternStart = subPatternEnd;
pathStart = foundPosition + patternLength;
}
for (int i = patternStart; i <= patternEnd; i++) {
if (!patternDirs[i].equals(ANT_ANY_PATH_MARKER)) {
return false;
}
}
return true;
}
/**
* Tests whether a given string matches against a pattern. The pattern may contain two special characters: <br/> {@code '*'}
* means zero or more characters <br/> {@code '?'} means one and only one character
*
* @param text string which must be matched against the pattern, not {@code null}
* @param pattern pattern to match against, not {@code null}
* @return {@code true} if the string matches against the pattern, {@code false} otherwise
*/
public static boolean matchAntPattern(final String text, final String pattern) {
final StringBuilder patternBuilder = new StringBuilder();
final Matcher matcher = ANT_TEXT_PATTERN.matcher(pattern);
int position = 0;
while (matcher.find()) {
patternBuilder.append(quote(pattern, position, matcher.start()));
final String match = matcher.group();
if (ANT_ANY_CHARACTER_MARKER.equals(match)) {
patternBuilder.append('.');
} else if (ANT_ANY_STRING_MARKER.equals(match)) {
patternBuilder.append(".*");
}
position = matcher.end();
}
patternBuilder.append(quote(pattern, position, pattern.length()));
return Pattern.compile(patternBuilder.toString()).matcher(text).matches();
}
/**
* Quotes text in regexp pattern from specified start to end positions.
*
* @param text text to be quoted
* @param start start position
* @param end end position
* @return regexp pattern with quoted text or empty string
*/
private static String quote(final String text, final int start, final int end) {
if (start >= end) {
return "";
}
return Pattern.quote(text.substring(start, end));
}
}
| apache-2.0 |
harfalm/Sakai-10.1 | jsf/jsf-widgets/src/java/org/sakaiproject/jsf/tag/PanelEditTag.java | 1436 | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/jsf/tags/sakai-10.1/jsf-widgets/src/java/org/sakaiproject/jsf/tag/PanelEditTag.java $
* $Id: PanelEditTag.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.jsf.tag;
import org.sakaiproject.jsf.util.JSFDepends;
/**
* A two-column layout for when the user is editing something
* (defined by the style guide).
*/
public class PanelEditTag extends JSFDepends.PanelGridTag
{
public String getComponentType()
{
return "org.sakaiproject.PanelEdit";
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-api-gateway/src/main/java/com/amazonaws/services/apigateway/model/FlushStageAuthorizersCacheRequest.java | 5089 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request to flush authorizer cache entries on a specified stage.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class FlushStageAuthorizersCacheRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The string identifier of the associated <a>RestApi</a>.
* </p>
*/
private String restApiId;
/**
* <p>
* The name of the stage to flush.
* </p>
*/
private String stageName;
/**
* <p>
* The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @param restApiId
* The string identifier of the associated <a>RestApi</a>.
*/
public void setRestApiId(String restApiId) {
this.restApiId = restApiId;
}
/**
* <p>
* The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @return The string identifier of the associated <a>RestApi</a>.
*/
public String getRestApiId() {
return this.restApiId;
}
/**
* <p>
* The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @param restApiId
* The string identifier of the associated <a>RestApi</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FlushStageAuthorizersCacheRequest withRestApiId(String restApiId) {
setRestApiId(restApiId);
return this;
}
/**
* <p>
* The name of the stage to flush.
* </p>
*
* @param stageName
* The name of the stage to flush.
*/
public void setStageName(String stageName) {
this.stageName = stageName;
}
/**
* <p>
* The name of the stage to flush.
* </p>
*
* @return The name of the stage to flush.
*/
public String getStageName() {
return this.stageName;
}
/**
* <p>
* The name of the stage to flush.
* </p>
*
* @param stageName
* The name of the stage to flush.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FlushStageAuthorizersCacheRequest withStageName(String stageName) {
setStageName(stageName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRestApiId() != null)
sb.append("RestApiId: ").append(getRestApiId()).append(",");
if (getStageName() != null)
sb.append("StageName: ").append(getStageName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof FlushStageAuthorizersCacheRequest == false)
return false;
FlushStageAuthorizersCacheRequest other = (FlushStageAuthorizersCacheRequest) obj;
if (other.getRestApiId() == null ^ this.getRestApiId() == null)
return false;
if (other.getRestApiId() != null && other.getRestApiId().equals(this.getRestApiId()) == false)
return false;
if (other.getStageName() == null ^ this.getStageName() == null)
return false;
if (other.getStageName() != null && other.getStageName().equals(this.getStageName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRestApiId() == null) ? 0 : getRestApiId().hashCode());
hashCode = prime * hashCode + ((getStageName() == null) ? 0 : getStageName().hashCode());
return hashCode;
}
@Override
public FlushStageAuthorizersCacheRequest clone() {
return (FlushStageAuthorizersCacheRequest) super.clone();
}
}
| apache-2.0 |
mhus/mhus-inka | de.mhus.app.inka.morse.server/src/de/mhu/com/morse/channel/sql/FindResult.java | 3731 | package de.mhu.com.morse.channel.sql;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import de.mhu.lib.log.AL;
import de.mhu.com.morse.channel.AbstractSelectResult;
import de.mhu.com.morse.channel.IConnection;
import de.mhu.com.morse.channel.IQueryFunction;
import de.mhu.com.morse.mql.IQueryResult;
import de.mhu.com.morse.obj.AbstractObjectRead;
import de.mhu.com.morse.obj.ITableRead;
import de.mhu.com.morse.types.IAttribute;
import de.mhu.com.morse.utils.MorseException;
import de.mhu.com.morse.utils.ObjectUtil;
public class FindResult extends AbstractSelectResult {
private static AL log = new AL( FindResult.class );
private IAttribute[] attributeTypes;
private String[] values;
private String[] names;
private boolean canNext = true;
private Hashtable<String, Integer> index;
public FindResult(IConnection con, Descriptor desc, IQueryResult result) {
try {
// execute
while ( result.next() ) {
for ( int i = 0; i < desc.attrSize; i++ ) {
if ( desc.attrs[ i ].functionObject != null ) {
SqlUtils.executeFunction( desc.attrs[ i ], result );
}
}
}
attributeTypes = new IAttribute[ desc.attrSize ];
values = new String[ desc.attrSize ];
names = new String[ desc.attrSize ];
index = new Hashtable<String, Integer>();
for ( int i = 0; i < desc.attrSize; i++ ) {
attributeTypes[ i ] = desc.attrs[ i ].functionObject.getType();
values[ i ] = desc.attrs[ i ].functionObject.getResult();
names[ i ] = desc.attrs[ i ].alias;
if ( names[ i ] == null )
names[ i ] = desc.attrs[ i ].name;
index.put( names[ i ], i );
}
returnCode = 1;
} catch (MorseException e) {
log.error( e );
errorCode = -1;
errorInfo = e.toString();
returnCode = 0;
canNext = false;
} catch ( Throwable e ) {
log.error( e );
errorCode = -1;
errorInfo = e.toString();
returnCode = 0;
canNext = false;
}
result.close();
}
@Override
public void close() {
}
@Override
public IAttribute getAttribute(String name) throws MorseException {
return attributeTypes[ getIndex( name ) ];
}
private int getIndex(String name) {
return index.get( name );
}
@Override
public IAttribute getAttribute(int index) throws MorseException {
return attributeTypes[ index ];
}
@Override
public int getAttributeCount() {
return attributeTypes.length;
}
@Override
public String[] getColumns() throws MorseException {
return names;
}
@Override
public String getRawString(String name) throws MorseException {
return getRawString( getIndex( name ) );
}
@Override
public String getRawString(int index) throws MorseException {
return values[ index ];
}
@Override
public ITableRead getTable(String name) throws MorseException {
return null;
}
@Override
public ITableRead getTable(int index) throws MorseException {
return null;
}
@Override
public boolean next() throws MorseException {
if ( canNext ) {
canNext = false;
return true;
}
return false;
}
public boolean reset() throws MorseException {
if ( errorCode != 0 )
return false;
canNext = true;
return true;
}
@Override
public int getPreferedQuereType() {
return IQueryResult.QUEUE_ONE_PACKAGE;
}
public InputStream getInputStream() throws MorseException {
return null;
}
public OutputStream getOutputStream() throws MorseException {
return null;
}
}
| apache-2.0 |
CloudScale-Project/StaticSpotter | plugins/org.reclipse.structure.specification.ui/src/org/reclipse/structure/specification/ui/utils/MathFunctionHelper.java | 2329 | package org.reclipse.structure.specification.ui.utils;
import org.reclipse.math.functions.FunctionParameter;
import org.reclipse.math.functions.MathematicalFunction;
import org.reclipse.structure.specification.PSFunctionParameter;
import org.reclipse.structure.specification.PSFuzzyConstraint;
import org.reclipse.structure.specification.ui.PSPlugin;
/**
* Simply helps to recreate some function corresponding to a fuzzy constraint.
*
* @author harka
* @author Last editor: $Author$
* @version $Revision$ $Date$
*/
public class MathFunctionHelper
{
public static MathematicalFunction getMathematicalFunction(
PSFuzzyConstraint constraint)
{
if (constraint == null || constraint.getMathFunctionID() == null
|| constraint.getMathFunctionID().length() < 1)
{
return null;
}
MathematicalFunction function = null;
String fClassName = constraint.getMathFunctionID();
try
{
Class<?> fClass = Class.forName(fClassName);
Object obj = fClass.newInstance();
if (obj != null && obj instanceof MathematicalFunction)
{
function = (MathematicalFunction) obj;
}
}
catch (ClassNotFoundException e)
{
PSPlugin.getDefault()
.logError(
"Could not load mathematical function class " + fClassName
+ ".", e);
}
catch (InstantiationException e)
{
PSPlugin.getDefault().logError(
"Could not instantiate mathematical function class "
+ fClassName + ".", e);
}
catch (IllegalAccessException e)
{
PSPlugin.getDefault().logError(
"Access denied during instantiation of mathematical function class "
+ fClassName + ".", e);
}
if (function != null)
{
// set parameters
for (PSFunctionParameter param : constraint.getParameters())
{
FunctionParameter p = new FunctionParameter();
p.setName(param.getName());
p.setValue(param.getValue());
function.addToParams(param.getName(), p);
}
}
return function;
}
}
| apache-2.0 |
indeedeng/ttorrent | ttorrent-protocol/src/main/java/com/turn/ttorrent/protocol/SuppressWarnings.java | 354 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.turn.ttorrent.protocol;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
*
* @author shevek
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface SuppressWarnings {
public String value();
}
| apache-2.0 |
i0712326/bcnx | BCNXST/src/com/bcnx/ui/acquirer/action/BalMsgSaveAction.java | 3913 | package com.bcnx.ui.acquirer.action;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JComboBox;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import org.springframework.context.ApplicationContext;
import com.bcnx.application.context.BcnxApplicationContext;
import com.bcnx.data.entity.BalMsg;
import com.bcnx.data.service.BalMsgService;
public class BalMsgSaveAction implements ActionListener {
private JComboBox<String> de02Box;
private JComboBox<String> de03Box;
private JTextField de28Field;
private JComboBox<String> de32Box;
private JTextField de41Field;
private JTextField de42Field;
private JTextField de43Field;
private JTextField de52Field;
public BalMsgSaveAction(JComboBox<String> de02Box, JComboBox<String> de03Box,
JTextField de28Field, JComboBox<String> de32Box, JTextField de41Field,
JTextField de42Field, JTextField de43Field, JTextField de52Field) {
this.de02Box = de02Box;
this.de03Box = de03Box;
this.de28Field = de28Field;
this.de32Box = de32Box;
this.de41Field = de41Field;
this.de42Field = de42Field;
this.de43Field = de43Field;
this.de52Field = de52Field;
}
@Override
public void actionPerformed(ActionEvent e) {
String de02 = (String) de02Box.getSelectedItem();
String de03 = (String) de03Box.getSelectedItem();
String de28 = de28Field.getText().trim();
String de32 = (String) de32Box.getSelectedItem();
String de41 = de41Field.getText().trim();
String de42 = de42Field.getText().trim();
String de43 = de43Field.getText().trim();
String de52 = de52Field.getText().trim();
if(de02 == null || de02.isEmpty()){
JOptionPane.showMessageDialog(null, "CARD NUMBER is Empty", "ERROR MESSSAGE", JOptionPane.ERROR_MESSAGE);
return ;
}
if(de03 == null || de03.isEmpty()){
JOptionPane.showMessageDialog(null, "PROCESSING CODE is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return ;
}
if(de28==null||de28.isEmpty()){
JOptionPane.showMessageDialog(null, "FEE is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
if(de32 == null || de32.isEmpty()){
JOptionPane.showMessageDialog(null, "Acquirer ID is empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
if(de41 == null || de41.isEmpty()){
JOptionPane.showMessageDialog(null, "ATM ID is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
if(de42 == null || de42.isEmpty()){
JOptionPane.showMessageDialog(null, "ATM Name is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
if(de43 == null || de43.isEmpty()){
JOptionPane.showMessageDialog(null, "ATM Location is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
if(de52==null || de52.isEmpty()){
JOptionPane.showMessageDialog(null, "PIN Block is Empty", "ERROR MESSAGE", JOptionPane.ERROR_MESSAGE);
return;
}
ApplicationContext context = BcnxApplicationContext.getApplicationContext();
BalMsgService service = (BalMsgService) context.getBean("balMsgService");
BalMsg balMsg = new BalMsg();
String de02Val = String.format("%-16s", de02);
String de03Val = String.format("%-6s", de03);
String de28Val = String.format("%-9s", de28);
String de32Val = String.format("%-6s", de32);
String de41Val = String.format("%-8s", de41);
String de42Val = String.format("%-15s", de42);
String de43Val = String.format("%-40s", de43);
String de52Val = String.format("%-16s", de52);
balMsg.setDe02(de02Val);
balMsg.setDe03(de03Val);
balMsg.setDe28(de28Val);
balMsg.setDe32(de32Val);
balMsg.setDe41(de41Val);
balMsg.setDe42(de42Val);
balMsg.setDe43(de43Val);
balMsg.setDe52(de52Val);
int chk = service.checkRecord();
if(chk > 0)
service.update(balMsg);
else
service.save(balMsg);
}
}
| apache-2.0 |
lijunguan/AlbumSelector | imageseletor/src/main/java/io/github/lijunguan/imgselector/base/BaseFragment.java | 927 | package io.github.lijunguan.imgselector.base;
import android.content.Context;
import android.support.v4.app.Fragment;
import java.lang.ref.WeakReference;
import io.github.lijunguan.imgselector.album.AlbumActivity;
/**
* Created by lijunguan on 2016/4/8
* email: lijunguan199210@gmail.com
* blog : https://lijunguan.github.io
*/
public class BaseFragment extends Fragment {
protected AlbumActivity mContext;
@Override
public void onAttach(Context context) {
super.onAttach(context);
//在Fragment中用 弱引用的方式持有一份Activity 引用,方便在Fragment中使用Context
if (context instanceof AlbumActivity) {
WeakReference<AlbumActivity> mActivityRef = new WeakReference<>((AlbumActivity) getActivity());
mContext = mActivityRef.get();
} else {
throw new IllegalArgumentException("unexcepted context ");
}
}
}
| apache-2.0 |
dierobotsdie/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceManager.java | 14352 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.service.api.records.ComponentState;
import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.api.records.ServiceState;
import org.apache.hadoop.yarn.service.component.Component;
import org.apache.hadoop.yarn.service.component.ComponentEvent;
import org.apache.hadoop.yarn.service.component.ComponentEventType;
import org.apache.hadoop.yarn.service.component.ComponentRestartPolicy;
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.jsonSerDeser;
/**
* Manages the state of Service.
*/
public class ServiceManager implements EventHandler<ServiceEvent> {
private static final Logger LOG = LoggerFactory.getLogger(
ServiceManager.class);
private final Service serviceSpec;
private final ServiceContext context;
private final ServiceScheduler scheduler;
private final ReentrantReadWriteLock.ReadLock readLock;
private final ReentrantReadWriteLock.WriteLock writeLock;
private final StateMachine<State, ServiceEventType, ServiceEvent>
stateMachine;
private final UpgradeComponentsFinder componentsFinder;
private final AsyncDispatcher dispatcher;
private final SliderFileSystem fs;
private String upgradeVersion;
private Queue<org.apache.hadoop.yarn.service.api.records
.Component> compsToUpgradeInOrder;
private static final StateMachineFactory<ServiceManager, State,
ServiceEventType, ServiceEvent> STATE_MACHINE_FACTORY =
new StateMachineFactory<ServiceManager, State,
ServiceEventType, ServiceEvent>(State.STABLE)
.addTransition(State.STABLE, EnumSet.of(State.STABLE,
State.UPGRADING), ServiceEventType.UPGRADE,
new StartUpgradeTransition())
.addTransition(State.STABLE, EnumSet.of(State.STABLE),
ServiceEventType.CHECK_STABLE, new CheckStableTransition())
.addTransition(State.UPGRADING, EnumSet.of(State.STABLE,
State.UPGRADING), ServiceEventType.START,
new CheckStableTransition())
.addTransition(State.UPGRADING, EnumSet.of(State.STABLE,
State.UPGRADING), ServiceEventType.CHECK_STABLE,
new CheckStableTransition())
.installTopology();
public ServiceManager(ServiceContext context) {
Preconditions.checkNotNull(context);
this.context = context;
serviceSpec = context.service;
scheduler = context.scheduler;
stateMachine = STATE_MACHINE_FACTORY.make(this);
dispatcher = scheduler.getDispatcher();
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
readLock = lock.readLock();
writeLock = lock.writeLock();
fs = context.fs;
componentsFinder = new UpgradeComponentsFinder
.DefaultUpgradeComponentsFinder();
}
@Override
public void handle(ServiceEvent event) {
try {
writeLock.lock();
State oldState = getState();
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitionException e) {
LOG.error(MessageFormat.format(
"[SERVICE]: Invalid event {1} at {2}.", event.getType(),
oldState), e);
}
if (oldState != getState()) {
LOG.info("[SERVICE] Transitioned from {} to {} on {} event.",
oldState, getState(), event.getType());
}
} finally {
writeLock.unlock();
}
}
private State getState() {
this.readLock.lock();
try {
return this.stateMachine.getCurrentState();
} finally {
this.readLock.unlock();
}
}
private static class StartUpgradeTransition implements
MultipleArcTransition<ServiceManager, ServiceEvent, State> {
@Override
public State transition(ServiceManager serviceManager,
ServiceEvent event) {
serviceManager.upgradeVersion = event.getVersion();
try {
if (event.isExpressUpgrade()) {
serviceManager.serviceSpec.setState(ServiceState.EXPRESS_UPGRADING);
serviceManager.compsToUpgradeInOrder = event
.getCompsToUpgradeInOrder();
serviceManager.upgradeNextCompIfAny();
} else if (event.isAutoFinalize()) {
serviceManager.serviceSpec.setState(ServiceState
.UPGRADING_AUTO_FINALIZE);
} else {
serviceManager.serviceSpec.setState(
ServiceState.UPGRADING);
}
return State.UPGRADING;
} catch (Throwable e) {
LOG.error("[SERVICE]: Upgrade to version {} failed", event.getVersion(),
e);
return State.STABLE;
}
}
}
private static class CheckStableTransition implements
MultipleArcTransition<ServiceManager, ServiceEvent, State> {
@Override
public State transition(ServiceManager serviceManager,
ServiceEvent event) {
//trigger check of service state
ServiceState currState = serviceManager.serviceSpec.getState();
if (currState.equals(ServiceState.STABLE)) {
return State.STABLE;
}
if (currState.equals(ServiceState.EXPRESS_UPGRADING)) {
org.apache.hadoop.yarn.service.api.records.Component component =
serviceManager.compsToUpgradeInOrder.peek();
if (!component.getState().equals(ComponentState.NEEDS_UPGRADE) &&
!component.getState().equals(ComponentState.UPGRADING)) {
serviceManager.compsToUpgradeInOrder.remove();
}
serviceManager.upgradeNextCompIfAny();
}
if (currState.equals(ServiceState.UPGRADING_AUTO_FINALIZE) ||
event.getType().equals(ServiceEventType.START) ||
(currState.equals(ServiceState.EXPRESS_UPGRADING) &&
serviceManager.compsToUpgradeInOrder.isEmpty())) {
ServiceState targetState = checkIfStable(serviceManager.serviceSpec);
if (targetState.equals(ServiceState.STABLE)) {
if (serviceManager.finalizeUpgrade()) {
LOG.info("Service def state changed from {} -> {}", currState,
serviceManager.serviceSpec.getState());
return State.STABLE;
}
}
}
return State.UPGRADING;
}
}
private void upgradeNextCompIfAny() {
if (!compsToUpgradeInOrder.isEmpty()) {
org.apache.hadoop.yarn.service.api.records.Component component =
compsToUpgradeInOrder.peek();
ComponentEvent needUpgradeEvent = new ComponentEvent(
component.getName(), ComponentEventType.UPGRADE).setTargetSpec(
component).setUpgradeVersion(upgradeVersion).setExpressUpgrade(true);
context.scheduler.getDispatcher().getEventHandler().handle(
needUpgradeEvent);
}
}
/**
* @return whether finalization of upgrade was successful.
*/
private boolean finalizeUpgrade() {
try {
// save the application id and state to
Service targetSpec = ServiceApiUtil.loadServiceUpgrade(
fs, getName(), upgradeVersion);
targetSpec.setId(serviceSpec.getId());
targetSpec.setState(ServiceState.STABLE);
Map<String, Component> allComps = scheduler.getAllComponents();
targetSpec.getComponents().forEach(compSpec -> {
Component comp = allComps.get(compSpec.getName());
compSpec.setState(comp.getComponentSpec().getState());
});
jsonSerDeser.save(fs.getFileSystem(),
ServiceApiUtil.getServiceJsonPath(fs, getName()), targetSpec, true);
fs.deleteClusterUpgradeDir(getName(), upgradeVersion);
} catch (IOException e) {
LOG.error("Upgrade did not complete because unable to re-write the" +
" service definition", e);
return false;
}
try {
fs.deleteClusterUpgradeDir(getName(), upgradeVersion);
} catch (IOException e) {
LOG.warn("Unable to delete upgrade definition for service {} " +
"version {}", getName(), upgradeVersion);
}
serviceSpec.setState(ServiceState.STABLE);
serviceSpec.setVersion(upgradeVersion);
upgradeVersion = null;
return true;
}
private static ServiceState checkIfStable(Service service) {
// if desired == running
for (org.apache.hadoop.yarn.service.api.records.Component comp :
service.getComponents()) {
if (!comp.getState().equals(
org.apache.hadoop.yarn.service.api.records.ComponentState.STABLE)) {
return service.getState();
}
}
return ServiceState.STABLE;
}
/**
* Service state gets directly modified by ServiceMaster and Component.
* This is a problem for upgrade and flexing. For now, invoking
* ServiceMaster.checkAndUpdateServiceState here to make it easy to fix
* this in future.
*/
public void checkAndUpdateServiceState() {
writeLock.lock();
try {
if (!getState().equals(State.UPGRADING)) {
ServiceMaster.checkAndUpdateServiceState(this.scheduler);
}
} finally {
writeLock.unlock();
}
}
void processUpgradeRequest(String upgradeVersion,
boolean autoFinalize, boolean expressUpgrade) throws IOException {
Service targetSpec = ServiceApiUtil.loadServiceUpgrade(
context.fs, context.service.getName(), upgradeVersion);
List<org.apache.hadoop.yarn.service.api.records.Component>
compsNeedUpgradeList = componentsFinder.
findTargetComponentSpecs(context.service, targetSpec);
// remove all components from need upgrade list if there restart policy
// doesn't all upgrade.
if (compsNeedUpgradeList != null) {
compsNeedUpgradeList.removeIf(component -> {
org.apache.hadoop.yarn.service.api.records.Component.RestartPolicyEnum
restartPolicy = component.getRestartPolicy();
final ComponentRestartPolicy restartPolicyHandler =
Component.getRestartPolicyHandler(restartPolicy);
// Do not allow upgrades for components which have NEVER/ON_FAILURE
// restart policy
if (!restartPolicyHandler.allowUpgrades()) {
LOG.info("The component {} has a restart policy that doesnt " +
"allow upgrades {} ", component.getName(),
component.getRestartPolicy().toString());
return true;
}
return false;
});
}
ServiceEvent event = new ServiceEvent(ServiceEventType.UPGRADE)
.setVersion(upgradeVersion)
.setAutoFinalize(autoFinalize)
.setExpressUpgrade(expressUpgrade);
if (expressUpgrade) {
// In case of express upgrade components need to be upgraded in order.
// Once the service manager gets notified that a component finished
// upgrading, it then issues event to upgrade the next component.
Map<String, org.apache.hadoop.yarn.service.api.records.Component>
compsNeedUpgradeByName = new HashMap<>();
if (compsNeedUpgradeList != null) {
compsNeedUpgradeList.forEach(component ->
compsNeedUpgradeByName.put(component.getName(), component));
}
List<String> resolvedComps = ServiceApiUtil
.resolveCompsDependency(targetSpec);
Queue<org.apache.hadoop.yarn.service.api.records.Component>
orderedCompUpgrade = new LinkedList<>();
resolvedComps.forEach(compName -> {
org.apache.hadoop.yarn.service.api.records.Component component =
compsNeedUpgradeByName.get(compName);
if (component != null ) {
orderedCompUpgrade.add(component);
}
});
event.setCompsToUpgradeInOrder(orderedCompUpgrade);
}
context.scheduler.getDispatcher().getEventHandler().handle(event);
if (compsNeedUpgradeList != null && !compsNeedUpgradeList.isEmpty()) {
if (!expressUpgrade) {
compsNeedUpgradeList.forEach(component -> {
ComponentEvent needUpgradeEvent = new ComponentEvent(
component.getName(), ComponentEventType.UPGRADE).setTargetSpec(
component).setUpgradeVersion(event.getVersion());
context.scheduler.getDispatcher().getEventHandler().handle(
needUpgradeEvent);
});
}
} else if (autoFinalize) {
// nothing to upgrade if upgrade auto finalize is requested, trigger a
// state check.
context.scheduler.getDispatcher().getEventHandler().handle(
new ServiceEvent(ServiceEventType.CHECK_STABLE));
}
}
/**
* Returns the name of the service.
*/
public String getName() {
return serviceSpec.getName();
}
/**
* State of {@link ServiceManager}.
*/
public enum State {
STABLE, UPGRADING
}
@VisibleForTesting
Service getServiceSpec() {
return serviceSpec;
}
}
| apache-2.0 |
dkpro/dkpro-lab | dkpro-lab-core/src/main/java/org/dkpro/lab/engine/reporting/ReportingEngine.java | 2495 | /*******************************************************************************
* Copyright 2011
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.dkpro.lab.engine.reporting;
import org.dkpro.lab.engine.ExecutionException;
import org.dkpro.lab.engine.LifeCycleException;
import org.dkpro.lab.engine.TaskContext;
import org.dkpro.lab.engine.TaskContextFactory;
import org.dkpro.lab.engine.TaskExecutionEngine;
import org.dkpro.lab.task.ReportingTask;
import org.dkpro.lab.task.Task;
/**
* Task execution engine which skips the main execution steps and only executes reports.
*/
public class ReportingEngine
implements TaskExecutionEngine
{
private TaskContextFactory contextFactory;
@Override
public String run(Task aConfiguration)
throws ExecutionException, LifeCycleException
{
if (!(aConfiguration instanceof ReportingTask)) {
throw new ExecutionException("This engine can only execute ["
+ ReportingTask.class.getName() + "]");
}
// Create persistence service for injection into analysis components
TaskContext ctx = null;
try {
ctx = contextFactory.createContext(aConfiguration);
// Now the setup is complete
ctx.getLifeCycleManager().initialize(ctx, aConfiguration);
// Start recording
ctx.getLifeCycleManager().begin(ctx, aConfiguration);
// End recording (here the reports will nbe done)
ctx.getLifeCycleManager().complete(ctx, aConfiguration);
return ctx.getId();
}
finally {
if (ctx != null) {
ctx.getLifeCycleManager().destroy(ctx, aConfiguration);
}
}
}
@Override
public void setContextFactory(TaskContextFactory aContextFactory)
{
contextFactory = aContextFactory;
}
@Override
public TaskContextFactory getContextFactory()
{
return contextFactory;
}
}
| apache-2.0 |
bazelbuild/intellij | base/src/com/google/idea/blaze/base/ideinfo/CToolchainIdeInfo.java | 6032 | /*
* Copyright 2016 The Bazel Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.idea.blaze.base.ideinfo;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.devtools.intellij.ideinfo.IntellijIdeInfo;
import com.google.idea.blaze.base.model.primitives.ExecutionRootPath;
/** Represents toolchain info from a cc_toolchain or cc_toolchain_suite */
public final class CToolchainIdeInfo implements ProtoWrapper<IntellijIdeInfo.CToolchainIdeInfo> {
private final ImmutableList<String> cCompilerOptions;
private final ImmutableList<String> cppCompilerOptions;
private final ImmutableList<ExecutionRootPath> builtInIncludeDirectories;
private final ExecutionRootPath cppExecutable;
private final String targetName;
private CToolchainIdeInfo(
ImmutableList<String> cCompilerOptions,
ImmutableList<String> cppCompilerOptions,
ImmutableList<ExecutionRootPath> builtInIncludeDirectories,
ExecutionRootPath cppExecutable,
String targetName) {
this.cCompilerOptions = cCompilerOptions;
this.cppCompilerOptions = cppCompilerOptions;
this.builtInIncludeDirectories = builtInIncludeDirectories;
this.cppExecutable = cppExecutable;
this.targetName = targetName;
}
static CToolchainIdeInfo fromProto(IntellijIdeInfo.CToolchainIdeInfo proto) {
return new CToolchainIdeInfo(
ImmutableList.copyOf(proto.getCOptionList()),
ImmutableList.copyOf(proto.getCppOptionList()),
ProtoWrapper.map(proto.getBuiltInIncludeDirectoryList(), ExecutionRootPath::fromProto),
ExecutionRootPath.fromProto(proto.getCppExecutable()),
proto.getTargetName());
}
@Override
public IntellijIdeInfo.CToolchainIdeInfo toProto() {
return IntellijIdeInfo.CToolchainIdeInfo.newBuilder()
.addAllCOption(cCompilerOptions)
.addAllCppOption(cppCompilerOptions)
.addAllBuiltInIncludeDirectory(ProtoWrapper.mapToProtos(builtInIncludeDirectories))
.setCppExecutable(cppExecutable.toProto())
.setTargetName(targetName)
.build();
}
public ImmutableList<String> getcCompilerOptions() {
return cCompilerOptions;
}
public ImmutableList<String> getCppCompilerOptions() {
return cppCompilerOptions;
}
public ImmutableList<ExecutionRootPath> getBuiltInIncludeDirectories() {
return builtInIncludeDirectories;
}
public ExecutionRootPath getCppExecutable() {
return cppExecutable;
}
public String getTargetName() {
return targetName;
}
public static Builder builder() {
return new Builder();
}
/** Builder for c toolchain */
public static class Builder {
private final ImmutableList.Builder<String> cCompilerOptions = ImmutableList.builder();
private final ImmutableList.Builder<String> cppCompilerOptions = ImmutableList.builder();
private final ImmutableList.Builder<ExecutionRootPath> builtInIncludeDirectories =
ImmutableList.builder();
ExecutionRootPath cppExecutable;
String targetName = "";
public Builder addCCompilerOptions(Iterable<String> cCompilerOptions) {
this.cCompilerOptions.addAll(cCompilerOptions);
return this;
}
public Builder addCppCompilerOptions(Iterable<String> cppCompilerOptions) {
this.cppCompilerOptions.addAll(cppCompilerOptions);
return this;
}
public Builder addBuiltInIncludeDirectories(
Iterable<ExecutionRootPath> builtInIncludeDirectories) {
this.builtInIncludeDirectories.addAll(builtInIncludeDirectories);
return this;
}
public Builder setCppExecutable(ExecutionRootPath cppExecutable) {
this.cppExecutable = cppExecutable;
return this;
}
public Builder setTargetName(String targetName) {
this.targetName = targetName;
return this;
}
public CToolchainIdeInfo build() {
return new CToolchainIdeInfo(
cCompilerOptions.build(),
cppCompilerOptions.build(),
builtInIncludeDirectories.build(),
cppExecutable,
targetName);
}
}
@Override
public String toString() {
return "CToolchainIdeInfo{"
+ "\n"
+ " cCompilerOptions="
+ getcCompilerOptions()
+ "\n"
+ " cppCompilerOptions="
+ getCppCompilerOptions()
+ "\n"
+ " builtInIncludeDirectories="
+ getBuiltInIncludeDirectories()
+ "\n"
+ " cppExecutable='"
+ getCppExecutable()
+ '\''
+ "\n"
+ " targetName='"
+ getTargetName()
+ '\''
+ "\n"
+ '}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CToolchainIdeInfo that = (CToolchainIdeInfo) o;
return Objects.equal(getcCompilerOptions(), that.getcCompilerOptions())
&& Objects.equal(getCppCompilerOptions(), that.getCppCompilerOptions())
&& Objects.equal(getBuiltInIncludeDirectories(), that.getBuiltInIncludeDirectories())
&& Objects.equal(getCppExecutable(), that.getCppExecutable())
&& Objects.equal(getTargetName(), that.getTargetName());
}
@Override
public int hashCode() {
return Objects.hashCode(
getcCompilerOptions(),
getCppCompilerOptions(),
getBuiltInIncludeDirectories(),
getCppExecutable(),
getTargetName());
}
}
| apache-2.0 |
webfirmframework/wff | wffweb/src/main/java/com/webfirmframework/wffweb/tag/html/DocType.java | 16444 | /*
* Copyright 2014-2022 Web Firm Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webfirmframework.wffweb.tag.html;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import com.webfirmframework.wffweb.internal.constants.CommonConstants;
import com.webfirmframework.wffweb.tag.html.attribute.core.AbstractAttribute;
import com.webfirmframework.wffweb.tag.html.core.PreIndexedTagName;
/**
* @author WFF
* @since 1.0.0
* @version 1.0.0
*
*/
public abstract class DocType extends AbstractHtml {
private static final long serialVersionUID = 1_0_0L;
private boolean prependDocType;
private String docTypeTag = "<!DOCTYPE html>";
/**
* should be invoked to generate opening and closing tag
*
* @param tagName
* @param attributes
*/
public DocType(final String tagName, final AbstractHtml base, final AbstractAttribute[] attributes) {
super(tagName, base, attributes);
}
/**
* should be invoked to generate opening and closing tag
*
* @param preIndexedTagName
* @param attributes
* @since 3.0.3
*/
protected DocType(final PreIndexedTagName preIndexedTagName, final AbstractHtml base,
final AbstractAttribute[] attributes) {
super(preIndexedTagName, base, attributes);
}
/*
* (non-Javadoc)
*
* @see com.webfirmframework.wffweb.tag.html.AbstractHtml#toHtmlString()
*/
@Override
public String toHtmlString() {
if (prependDocType) {
return new StringBuilder(docTypeTag).append('\n').append(super.toHtmlString()).toString();
}
return super.toHtmlString();
}
/*
* (non-Javadoc)
*
* @see com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigHtmlString()
*/
@Override
public String toBigHtmlString() {
if (prependDocType) {
return new StringBuilder(docTypeTag).append('\n').append(super.toBigHtmlString()).toString();
}
return super.toBigHtmlString();
}
/*
* (non-Javadoc)
*
* @see com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigHtmlString(
* boolean)
*/
@Override
public String toBigHtmlString(final boolean rebuild) {
if (prependDocType) {
return new StringBuilder(docTypeTag).append('\n').append(super.toBigHtmlString(rebuild)).toString();
}
return super.toBigHtmlString(rebuild);
}
/*
* (non-Javadoc)
*
* @see com.webfirmframework.wffweb.tag.html.AbstractHtml#toHtmlString(boolean)
*/
@Override
public String toHtmlString(final boolean rebuild) {
if (prependDocType) {
return new StringBuilder(docTypeTag).append('\n').append(super.toHtmlString(rebuild)).toString();
}
return super.toHtmlString(rebuild);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toOutputStream(java.io.
* OutputStream)
*/
@Override
public int toOutputStream(final OutputStream os) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(CommonConstants.DEFAULT_CHARSET);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toOutputStream(java.io.
* OutputStream, boolean)
*/
@Override
public int toOutputStream(final OutputStream os, final boolean rebuild) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(CommonConstants.DEFAULT_CHARSET);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os, rebuild);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toOutputStream(java.io.
* OutputStream, boolean, boolean)
*/
@Override
public int toOutputStream(final OutputStream os, final boolean rebuild, final boolean flushOnWrite)
throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(CommonConstants.DEFAULT_CHARSET);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
if (flushOnWrite) {
os.flush();
}
}
return docTypeTagLength + super.toOutputStream(os, rebuild, flushOnWrite);
}
/**
* @param os object of OutputStream to which the bytes to be written
* @param charset the charset to encode for the bytes
*
* @return the total number of bytes written
* @throws IOException
*
* @since 1.0.0
* @author WFF
*/
@Override
public int toOutputStream(final OutputStream os, final Charset charset) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os, charset);
}
/**
* @param os object of OutputStream to which the bytes to be written
* @param charset the charset to encode for the bytes
* @param flushOnWrite true to flush on each write to OutputStream
*
* @return the total number of bytes written
* @throws IOException
*
* @since 3.0.2
* @author WFF
*/
@Override
public int toOutputStream(final OutputStream os, final Charset charset, final boolean flushOnWrite)
throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
if (flushOnWrite) {
os.flush();
}
}
return docTypeTagLength + super.toOutputStream(os, charset, flushOnWrite);
}
/**
* @param os object of OutputStream to which the bytes to be written
* @param charset the charset to encode for the bytes
* @return the total number of bytes written
* @throws IOException
* @since 1.0.0
* @author WFF
*/
@Override
public int toOutputStream(final OutputStream os, final String charset) throws IOException {
int docTypeTagLength = 0;
final Charset cs = Charset.forName(charset);
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(cs);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os, cs);
}
/**
* @param os object of OutputStream to which the bytes to be written
* @param rebuild true to rebuild the tags
* @param charset the charset to encode for the bytes
* @return the total number of bytes written
* @throws IOException
* @since 1.0.0
* @author WFF
* @return the total number of bytes written
*/
@Override
public int toOutputStream(final OutputStream os, final boolean rebuild, final Charset charset) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os, rebuild, charset);
}
/**
*
* @param os object of OutputStream to which the bytes to be written
* @param rebuild true to rebuild the tags
* @param charset the charset to encode for the bytes
* @param flushOnWrite true to flush on each write to OutputStream
* @return the total number of bytes written
* @throws IOException
* @since 3.0.2
* @author WFF
* @return the total number of bytes written
*/
@Override
public int toOutputStream(final OutputStream os, final boolean rebuild, final Charset charset,
final boolean flushOnWrite) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
if (flushOnWrite) {
os.flush();
}
}
return docTypeTagLength + super.toOutputStream(os, rebuild, charset, flushOnWrite);
}
/**
* @param os object of OutputStream to which the bytes to be written
* @param rebuild true to rebuild the tags
* @param charset the charset to encode for the bytes
* @return the total number of bytes written
* @throws IOException
* @since 1.0.0
* @author WFF
* @return the total number of bytes written
*/
@Override
public int toOutputStream(final OutputStream os, final boolean rebuild, final String charset) throws IOException {
int docTypeTagLength = 0;
final Charset cs = Charset.forName(charset);
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(cs);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toOutputStream(os, rebuild, cs);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream)
*/
@Override
public int toBigOutputStream(final OutputStream os) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(CommonConstants.DEFAULT_CHARSET);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, boolean)
*/
@Override
public int toBigOutputStream(final OutputStream os, final boolean rebuild) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(CommonConstants.DEFAULT_CHARSET);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os, rebuild);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, java.nio.charset.Charset)
*/
@Override
public int toBigOutputStream(final OutputStream os, final Charset charset) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os, charset);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, java.lang.String)
*/
@Override
public int toBigOutputStream(final OutputStream os, final String charset) throws IOException {
int docTypeTagLength = 0;
final Charset cs = Charset.forName(charset);
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(cs);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os, cs);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, boolean, java.nio.charset.Charset)
*/
@Override
public int toBigOutputStream(final OutputStream os, final boolean rebuild, final Charset charset)
throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os, rebuild, charset);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, boolean, java.lang.String)
*/
@Override
public int toBigOutputStream(final OutputStream os, final boolean rebuild, final String charset)
throws IOException {
int docTypeTagLength = 0;
final Charset cs = Charset.forName(charset);
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(cs);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
}
return docTypeTagLength + super.toBigOutputStream(os, rebuild, cs);
}
/*
* (non-Javadoc)
*
* @see
* com.webfirmframework.wffweb.tag.html.AbstractHtml#toBigOutputStream(java.
* io.OutputStream, boolean, java.nio.charset.Charset, boolean)
*/
@Override
public int toBigOutputStream(final OutputStream os, final boolean rebuild, final Charset charset,
final boolean flushOnWrite) throws IOException {
int docTypeTagLength = 0;
if (prependDocType) {
final byte[] docTypeTagBytes = (docTypeTag + "\n").getBytes(charset);
os.write(docTypeTagBytes);
docTypeTagLength = docTypeTagBytes.length;
if (flushOnWrite) {
os.flush();
}
}
return docTypeTagLength + super.toBigOutputStream(os, rebuild, charset, flushOnWrite);
}
/*
* (non-Javadoc)
*
* @see com.webfirmframework.wffweb.tag.html.AbstractHtml#toString()
*/
// it is not a best practice to print html string by this method because if
// it is used in ThreadLocal class it may cause memory leak.
@Override
public String toString() {
return super.toString();
}
/**
* @return the prependDocType
* @author WFF
* @since 1.0.0
*/
public boolean isPrependDocType() {
return prependDocType;
}
/**
* @param prependDocType the prependDocType to set
* @author WFF
* @since 1.0.0
*/
public void setPrependDocType(final boolean prependDocType) {
this.prependDocType = prependDocType;
}
/**
* @return the docTypeTag
* @author WFF
* @since 1.0.0
*/
public String getDocTypeTag() {
return docTypeTag;
}
/**
* the default doc type is <code><!DOCTYPE html></code>
*
* @param docTypeTag the docTypeTag to set
* @author WFF
* @since 1.0.0
*/
public void setDocTypeTag(final String docTypeTag) {
this.docTypeTag = docTypeTag;
}
}
| apache-2.0 |
helloShen/thinkinginjava | chapter11/Exercise4.java | 2631 | /**
* Exercise 4
*/
package com.ciaoshen.thinkinjava.chapter11;
import java.util.*;
public class Exercise4 {
public static interface Generator<T> {
public T next();
}
public static class FilmGenerator implements Generator<String> {
private static final String[] FILMNAME={"肖申克的救赎", "这个杀手不太冷", "阿甘正传", "霸王别姬", "美丽人生",
"千与千寻", "辛德勒的名单", "海上钢琴师", "机器人总动员", "盗梦空间", "泰坦尼克号",
"三傻大闹宝莱坞", "放牛班的春天", "忠犬八公的故事", "大话西游", "龙猫", "教父",
"乱世佳人", "天堂电影院", "当幸福来敲门", "搏击俱乐部", "楚门的世界", "触不可及",
"指环王3","罗马假日"};
private static final int LENGTH = FILMNAME.length;
private static int count = 0;
private final int id = ++count;
private int cursor = 0;
public String next() {
if (cursor == LENGTH) {
cursor = 0;
}
return FILMNAME[cursor++];
}
}
public static Generator<String> getFilmGenerator(){
return new FilmGenerator();
}
public static String[] getFilms(String[] array) {
Generator<String> gen = getFilmGenerator();
for (int i = 0; i < array.length; i++) {
array[i] = gen.next();
}
return array;
}
public static Collection<String> getFilms(Collection<String> c, int filmNum) {
Generator<String> gen = getFilmGenerator();
for (int i = 0; i < filmNum; i++) {
c.add(gen.next());
}
return c;
}
@SuppressWarnings({"unchecked","rawtypes"})
public static void main(String[] args){
int size = 10;
//fil array
System.out.println(">>>Array: ");
System.out.println(Arrays.toString(getFilms(new String[size])));
//fil arraylist
System.out.println(">>>ArrayList: ");
System.out.println(getFilms(new ArrayList(),size));
//fil lindelist
System.out.println(">>>LinkedList: ");
System.out.println(getFilms(new LinkedList(),size));
//fil hashset
System.out.println(">>>HashSet: ");
System.out.println(getFilms(new HashSet(),size));
//fil linkedhashset
System.out.println(">>>LinkedHashSet: ");
System.out.println(getFilms(new LinkedHashSet(),size));
//fil treeset
System.out.println(">>>TreeSet: ");
System.out.println(getFilms(new TreeSet(),size));
}
}
| apache-2.0 |
paulnguyen/cmpe279 | eclipse/Roller/src/org/apache/roller/ui/authoring/struts/formbeans/YourWebsitesForm.java | 1439 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.ui.authoring.struts.formbeans;
import org.apache.struts.action.ActionForm;
/**
* @struts.form name="yourWebsitesForm"
* @author Dave M Johnson
*/
public class YourWebsitesForm extends ActionForm
{
private String inviteId;
private String websiteId;
public String getWebsiteId()
{
return websiteId;
}
public void setWebsiteId(String websiteId)
{
this.websiteId = websiteId;
}
public String getInviteId()
{
return inviteId;
}
public void setInviteId(String inviteId)
{
this.inviteId = inviteId;
}
}
| apache-2.0 |
real-logic/Aeron | aeron-samples/src/main/java/io/aeron/samples/raw/WriteReceiveUdpPing.java | 4290 | /*
* Copyright 2014-2020 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.samples.raw;
import org.HdrHistogram.Histogram;
import org.agrona.concurrent.SigInt;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.DatagramChannel;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.LockSupport;
import static io.aeron.driver.Configuration.MTU_LENGTH_DEFAULT;
import static io.aeron.samples.raw.Common.PING_PORT;
import static io.aeron.samples.raw.Common.PONG_PORT;
import static io.aeron.samples.raw.Common.init;
import static org.agrona.BitUtil.SIZE_OF_LONG;
/**
* Benchmark used to calculate latency of underlying system.
*
* @see ReceiveWriteUdpPong
*/
public class WriteReceiveUdpPing
{
public static void main(final String[] args) throws IOException
{
int numChannels = 1;
if (1 == args.length)
{
numChannels = Integer.parseInt(args[0]);
}
final Histogram histogram = new Histogram(TimeUnit.SECONDS.toNanos(10), 3);
final ByteBuffer buffer = ByteBuffer.allocateDirect(MTU_LENGTH_DEFAULT);
final DatagramChannel[] receiveChannels = new DatagramChannel[numChannels];
for (int i = 0; i < receiveChannels.length; i++)
{
receiveChannels[i] = DatagramChannel.open();
init(receiveChannels[i]);
receiveChannels[i].bind(new InetSocketAddress("localhost", PONG_PORT + i));
}
final InetSocketAddress writeAddress = new InetSocketAddress("localhost", PING_PORT);
final DatagramChannel writeChannel = DatagramChannel.open();
init(writeChannel, writeAddress);
final AtomicBoolean running = new AtomicBoolean(true);
SigInt.register(() -> running.set(false));
while (running.get())
{
measureRoundTrip(histogram, buffer, receiveChannels, writeChannel, running);
histogram.reset();
System.gc();
LockSupport.parkNanos(1000_000_000);
}
}
private static void measureRoundTrip(
final Histogram histogram,
final ByteBuffer buffer,
final DatagramChannel[] receiveChannels,
final DatagramChannel writeChannel,
final AtomicBoolean running)
throws IOException
{
for (int sequenceNumber = 0; sequenceNumber < Common.NUM_MESSAGES; sequenceNumber++)
{
final long timestampNs = System.nanoTime();
buffer.clear();
buffer.putLong(sequenceNumber);
buffer.putLong(timestampNs);
buffer.flip();
writeChannel.write(buffer);
buffer.clear();
boolean available = false;
while (!available)
{
if (!running.get())
{
return;
}
for (int i = receiveChannels.length - 1; i >= 0; i--)
{
if (null != receiveChannels[i].receive(buffer))
{
available = true;
break;
}
}
}
final long receivedSequenceNumber = buffer.getLong(0);
if (receivedSequenceNumber != sequenceNumber)
{
throw new IllegalStateException("Data Loss:" + sequenceNumber + " to " + receivedSequenceNumber);
}
final long durationNs = System.nanoTime() - buffer.getLong(SIZE_OF_LONG);
histogram.recordValue(durationNs);
}
histogram.outputPercentileDistribution(System.out, 1000.0);
}
}
| apache-2.0 |
inbloom/secure-data-service | sli/domain/src/test/java/org/slc/sli/validation/schema/ApiNeutralSchemaValidatorTest.java | 4440 | /*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.validation.schema;
import static junit.framework.Assert.assertEquals;
import static org.mockito.Mockito.when;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.slc.sli.domain.Entity;
import org.slc.sli.domain.Repository;
import org.slc.sli.validation.NaturalKeyValidationException;
import org.slc.sli.validation.NoNaturalKeysDefinedException;
import org.slc.sli.validation.SchemaRepository;
import java.util.HashMap;
import java.util.Map;
/**
*
* TODO: add class javadoc
*
*/
public class ApiNeutralSchemaValidatorTest {
@InjectMocks
ApiNeutralSchemaValidator apiNeutralSchemaValidator = new ApiNeutralSchemaValidator();
@Mock
INaturalKeyExtractor mockNaturalKeyExtractor;
@Mock
SchemaRepository mockSchemaRepository;
@Mock
Repository<Entity> mockRepository;
@Before
public void initMocks() {
MockitoAnnotations.initMocks(this);
}
@Test
public void validateNaturalKeyValidationExceptionFromMissingFields() throws NoNaturalKeysDefinedException {
// setup
Entity mockEntity = Mockito.mock(Entity.class);
NaturalKeyValidationException e = new NaturalKeyValidationException(null, null);
String mockEntityType = "MockEntityType";
when(mockEntity.getType()).thenReturn(mockEntityType);
when(mockSchemaRepository.getSchema(mockEntityType)).thenReturn(null);
when(mockNaturalKeyExtractor.getNaturalKeyFields(mockEntity)).thenThrow(e);
// test
boolean result = apiNeutralSchemaValidator.validate(mockEntity);
// validate
Assert.assertEquals(true, result);
}
@Test
public void testGetValueOneLevel() {
Map<String, Object> data = new HashMap<String, Object>();
data.put("key1", "value1");
data.put("key2", "value2");
data.put("key3", "value3");
assertEquals("Should match", "value1", apiNeutralSchemaValidator.getValue("key1", data));
assertEquals("Should match", "value2", apiNeutralSchemaValidator.getValue("key2", data));
assertEquals("Should match", "value3", apiNeutralSchemaValidator.getValue("key3", data));
assertEquals("Should match", null, apiNeutralSchemaValidator.getValue("key4", data));
}
@Test
public void testGetValueMultiLevel() {
Map<String, Object> inner2 = new HashMap<String, Object>();
inner2.put("key7", "value7");
inner2.put("key8", "value8");
Map<String, Object> inner1 = new HashMap<String, Object>();
inner1.put("key4", "value4");
inner1.put("key5", "value5");
inner1.put("key6", inner2);
Map<String, Object> data = new HashMap<String, Object>();
data.put("key1", "value1");
data.put("key2", "value2");
data.put("key3", inner1);
assertEquals("Should match", "value1", apiNeutralSchemaValidator.getValue("key1", data));
assertEquals("Should match", "value2", apiNeutralSchemaValidator.getValue("key2", data));
assertEquals("Should match", inner1, apiNeutralSchemaValidator.getValue("key3", data));
assertEquals("Should match", "value4", apiNeutralSchemaValidator.getValue("key3.key4", data));
assertEquals("Should match", "value5", apiNeutralSchemaValidator.getValue("key3.key5", data));
assertEquals("Should match", inner2, apiNeutralSchemaValidator.getValue("key3.key6", data));
assertEquals("Should match", "value7", apiNeutralSchemaValidator.getValue("key3.key6.key7", data));
assertEquals("Should match", "value8", apiNeutralSchemaValidator.getValue("key3.key6.key8", data));
}
}
| apache-2.0 |
Nickname0806/Test_Q4 | java/org/apache/catalina/loader/WebappLoader.java | 20467 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.loader;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.File;
import java.io.FilePermission;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import javax.management.ObjectName;
import javax.servlet.ServletContext;
import org.apache.catalina.Context;
import org.apache.catalina.Globals;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleState;
import org.apache.catalina.Loader;
import org.apache.catalina.util.LifecycleMBeanBase;
import org.apache.catalina.util.ToStringUtil;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.res.StringManager;
/**
* Classloader implementation which is specialized for handling web
* applications in the most efficient way, while being Catalina aware (all
* accesses to resources are made through
* {@link org.apache.catalina.WebResourceRoot}).
* This class loader supports detection of modified
* Java classes, which can be used to implement auto-reload support.
* <p>
* This class loader is configured via the Resources children of its Context
* prior to calling <code>start()</code>. When a new class is required,
* these Resources will be consulted first to locate the class. If it
* is not present, the system class loader will be used instead.
*
* @author Craig R. McClanahan
* @author Remy Maucherat
*/
public class WebappLoader extends LifecycleMBeanBase
implements Loader, PropertyChangeListener {
// ----------------------------------------------------------- Constructors
/**
* Construct a new WebappLoader with no defined parent class loader
* (so that the actual parent will be the system class loader).
*/
public WebappLoader() {
this(null);
}
/**
* Construct a new WebappLoader with the specified class loader
* to be defined as the parent of the ClassLoader we ultimately create.
*
* @param parent The parent class loader
*/
public WebappLoader(ClassLoader parent) {
super();
this.parentClassLoader = parent;
}
// ----------------------------------------------------- Instance Variables
/**
* The class loader being managed by this Loader component.
*/
private WebappClassLoaderBase classLoader = null;
/**
* The Context with which this Loader has been associated.
*/
private Context context = null;
/**
* The "follow standard delegation model" flag that will be used to
* configure our ClassLoader.
*/
private boolean delegate = false;
/**
* The Java class name of the ClassLoader implementation to be used.
* This class should extend WebappClassLoaderBase, otherwise, a different
* loader implementation must be used.
*/
private String loaderClass = ParallelWebappClassLoader.class.getName();
/**
* The parent class loader of the class loader we will create.
*/
private ClassLoader parentClassLoader = null;
/**
* The reloadable flag for this Loader.
*/
private boolean reloadable = false;
/**
* The string manager for this package.
*/
protected static final StringManager sm =
StringManager.getManager(Constants.Package);
/**
* The property change support for this component.
*/
protected final PropertyChangeSupport support = new PropertyChangeSupport(this);
/**
* Classpath set in the loader.
*/
private String classpath = null;
// ------------------------------------------------------------- Properties
/**
* Return the Java class loader to be used by this Container.
*/
@Override
public ClassLoader getClassLoader() {
return classLoader;
}
@Override
public Context getContext() {
return context;
}
@Override
public void setContext(Context context) {
if (this.context == context) {
return;
}
if (getState().isAvailable()) {
throw new IllegalStateException(
sm.getString("webappLoader.setContext.ise"));
}
// Deregister from the old Context (if any)
if (this.context != null) {
this.context.removePropertyChangeListener(this);
}
// Process this property change
Context oldContext = this.context;
this.context = context;
support.firePropertyChange("context", oldContext, this.context);
// Register with the new Container (if any)
if (this.context != null) {
setReloadable(this.context.getReloadable());
this.context.addPropertyChangeListener(this);
}
}
/**
* Return the "follow standard delegation model" flag used to configure
* our ClassLoader.
*/
@Override
public boolean getDelegate() {
return this.delegate;
}
/**
* Set the "follow standard delegation model" flag used to configure
* our ClassLoader.
*
* @param delegate The new flag
*/
@Override
public void setDelegate(boolean delegate) {
boolean oldDelegate = this.delegate;
this.delegate = delegate;
support.firePropertyChange("delegate", Boolean.valueOf(oldDelegate),
Boolean.valueOf(this.delegate));
}
/**
* @return the ClassLoader class name.
*/
public String getLoaderClass() {
return (this.loaderClass);
}
/**
* Set the ClassLoader class name.
*
* @param loaderClass The new ClassLoader class name
*/
public void setLoaderClass(String loaderClass) {
this.loaderClass = loaderClass;
}
/**
* Return the reloadable flag for this Loader.
*/
@Override
public boolean getReloadable() {
return this.reloadable;
}
/**
* Set the reloadable flag for this Loader.
*
* @param reloadable The new reloadable flag
*/
@Override
public void setReloadable(boolean reloadable) {
// Process this property change
boolean oldReloadable = this.reloadable;
this.reloadable = reloadable;
support.firePropertyChange("reloadable",
Boolean.valueOf(oldReloadable),
Boolean.valueOf(this.reloadable));
}
// --------------------------------------------------------- Public Methods
/**
* Add a property change listener to this component.
*
* @param listener The listener to add
*/
@Override
public void addPropertyChangeListener(PropertyChangeListener listener) {
support.addPropertyChangeListener(listener);
}
/**
* Execute a periodic task, such as reloading, etc. This method will be
* invoked inside the classloading context of this container. Unexpected
* throwables will be caught and logged.
*/
@Override
public void backgroundProcess() {
if (reloadable && modified()) {
try {
Thread.currentThread().setContextClassLoader
(WebappLoader.class.getClassLoader());
if (context != null) {
context.reload();
}
} finally {
if (context != null && context.getLoader() != null) {
Thread.currentThread().setContextClassLoader
(context.getLoader().getClassLoader());
}
}
}
}
public String[] getLoaderRepositories() {
if (classLoader == null) {
return new String[0];
}
URL[] urls = classLoader.getURLs();
String[] result = new String[urls.length];
for (int i = 0; i < urls.length; i++) {
result[i] = urls[i].toExternalForm();
}
return result;
}
public String getLoaderRepositoriesString() {
String repositories[]=getLoaderRepositories();
StringBuilder sb=new StringBuilder();
for( int i=0; i<repositories.length ; i++ ) {
sb.append( repositories[i]).append(":");
}
return sb.toString();
}
/**
* Classpath, as set in org.apache.catalina.jsp_classpath context
* property
*
* @return The classpath
*/
public String getClasspath() {
return classpath;
}
/**
* Has the internal repository associated with this Loader been modified,
* such that the loaded classes should be reloaded?
*/
@Override
public boolean modified() {
return classLoader != null ? classLoader.modified() : false ;
}
/**
* Remove a property change listener from this component.
*
* @param listener The listener to remove
*/
@Override
public void removePropertyChangeListener(PropertyChangeListener listener) {
support.removePropertyChangeListener(listener);
}
/**
* Return a String representation of this component.
*/
@Override
public String toString() {
return ToStringUtil.toString(this, context);
}
/**
* Start associated {@link ClassLoader} and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#startInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected void startInternal() throws LifecycleException {
if (log.isDebugEnabled())
log.debug(sm.getString("webappLoader.starting"));
if (context.getResources() == null) {
log.info("No resources for " + context);
setState(LifecycleState.STARTING);
return;
}
// Construct a class loader based on our current repositories list
try {
classLoader = createClassLoader();
classLoader.setResources(context.getResources());
classLoader.setDelegate(this.delegate);
// Configure our repositories
setClassPath();
setPermissions();
((Lifecycle) classLoader).start();
String contextName = context.getName();
if (!contextName.startsWith("/")) {
contextName = "/" + contextName;
}
ObjectName cloname = new ObjectName(context.getDomain() + ":type=" +
classLoader.getClass().getSimpleName() + ",host=" +
context.getParent().getName() + ",context=" + contextName);
Registry.getRegistry(null, null)
.registerComponent(classLoader, cloname, null);
} catch (Throwable t) {
t = ExceptionUtils.unwrapInvocationTargetException(t);
ExceptionUtils.handleThrowable(t);
log.error( "LifecycleException ", t );
throw new LifecycleException("start: ", t);
}
setState(LifecycleState.STARTING);
}
/**
* Stop associated {@link ClassLoader} and implement the requirements
* of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents this component from being used
*/
@Override
protected void stopInternal() throws LifecycleException {
if (log.isDebugEnabled())
log.debug(sm.getString("webappLoader.stopping"));
setState(LifecycleState.STOPPING);
// Remove context attributes as appropriate
ServletContext servletContext = context.getServletContext();
servletContext.removeAttribute(Globals.CLASS_PATH_ATTR);
// Throw away our current class loader if any
if (classLoader != null) {
try {
classLoader.stop();
} finally {
classLoader.destroy();
}
// classLoader must be non-null to have been registered
try {
String contextName = context.getName();
if (!contextName.startsWith("/")) {
contextName = "/" + contextName;
}
ObjectName cloname = new ObjectName(context.getDomain() + ":type=" +
classLoader.getClass().getSimpleName() + ",host=" +
context.getParent().getName() + ",context=" + contextName);
Registry.getRegistry(null, null).unregisterComponent(cloname);
} catch (Exception e) {
log.error("LifecycleException ", e);
}
}
classLoader = null;
}
// ----------------------------------------- PropertyChangeListener Methods
/**
* Process property change events from our associated Context.
*
* @param event The property change event that has occurred
*/
@Override
public void propertyChange(PropertyChangeEvent event) {
// Validate the source of this event
if (!(event.getSource() instanceof Context))
return;
// Process a relevant property change
if (event.getPropertyName().equals("reloadable")) {
try {
setReloadable
( ((Boolean) event.getNewValue()).booleanValue() );
} catch (NumberFormatException e) {
log.error(sm.getString("webappLoader.reloadable",
event.getNewValue().toString()));
}
}
}
// ------------------------------------------------------- Private Methods
/**
* Create associated classLoader.
*/
private WebappClassLoaderBase createClassLoader()
throws Exception {
Class<?> clazz = Class.forName(loaderClass);
WebappClassLoaderBase classLoader = null;
if (parentClassLoader == null) {
parentClassLoader = context.getParentClassLoader();
}
Class<?>[] argTypes = { ClassLoader.class };
Object[] args = { parentClassLoader };
Constructor<?> constr = clazz.getConstructor(argTypes);
classLoader = (WebappClassLoaderBase) constr.newInstance(args);
return classLoader;
}
/**
* Configure associated class loader permissions.
*/
private void setPermissions() {
if (!Globals.IS_SECURITY_ENABLED)
return;
if (context == null)
return;
// Tell the class loader the root of the context
ServletContext servletContext = context.getServletContext();
// Assigning permissions for the work directory
File workDir =
(File) servletContext.getAttribute(ServletContext.TEMPDIR);
if (workDir != null) {
try {
String workDirPath = workDir.getCanonicalPath();
classLoader.addPermission
(new FilePermission(workDirPath, "read,write"));
classLoader.addPermission
(new FilePermission(workDirPath + File.separator + "-",
"read,write,delete"));
} catch (IOException e) {
// Ignore
}
}
for (URL url : context.getResources().getBaseUrls()) {
classLoader.addPermission(url);
}
}
/**
* Set the appropriate context attribute for our class path. This
* is required only because Jasper depends on it.
*/
private void setClassPath() {
// Validate our current state information
if (context == null)
return;
ServletContext servletContext = context.getServletContext();
if (servletContext == null)
return;
StringBuilder classpath = new StringBuilder();
// Assemble the class path information from our class loader chain
ClassLoader loader = getClassLoader();
if (delegate && loader != null) {
// Skip the webapp loader for now as delegation is enabled
loader = loader.getParent();
}
while (loader != null) {
if (!buildClassPath(classpath, loader)) {
break;
}
loader = loader.getParent();
}
if (delegate) {
// Delegation was enabled, go back and add the webapp paths
loader = getClassLoader();
if (loader != null) {
buildClassPath(classpath, loader);
}
}
this.classpath = classpath.toString();
// Store the assembled class path as a servlet context attribute
servletContext.setAttribute(Globals.CLASS_PATH_ATTR, this.classpath);
}
private boolean buildClassPath(StringBuilder classpath, ClassLoader loader) {
if (loader instanceof URLClassLoader) {
URL repositories[] = ((URLClassLoader) loader).getURLs();
for (int i = 0; i < repositories.length; i++) {
String repository = repositories[i].toString();
if (repository.startsWith("file://"))
repository = utf8Decode(repository.substring(7));
else if (repository.startsWith("file:"))
repository = utf8Decode(repository.substring(5));
else
continue;
if (repository == null)
continue;
if (classpath.length() > 0)
classpath.append(File.pathSeparator);
classpath.append(repository);
}
} else if (loader == ClassLoader.getSystemClassLoader()){
// Java 9 onwards. The internal class loaders no longer extend
// URLCLassLoader
String cp = System.getProperty("java.class.path");
if (cp != null && cp.length() > 0) {
if (classpath.length() > 0) {
classpath.append(File.pathSeparator);
}
classpath.append(cp);
}
return false;
} else {
log.info( "Unknown loader " + loader + " " + loader.getClass());
return false;
}
return true;
}
private String utf8Decode(String input) {
String result = null;
try {
result = URLDecoder.decode(input, "UTF-8");
} catch (UnsupportedEncodingException uee) {
// Impossible. All JVMs are required to support UTF-8.
}
return result;
}
private static final Log log = LogFactory.getLog(WebappLoader.class);
@Override
protected String getDomainInternal() {
return context.getDomain();
}
@Override
protected String getObjectNameKeyProperties() {
StringBuilder name = new StringBuilder("type=Loader");
name.append(",host=");
name.append(context.getParent().getName());
name.append(",context=");
String contextName = context.getName();
if (!contextName.startsWith("/")) {
name.append("/");
}
name.append(contextName);
return name.toString();
}
}
| apache-2.0 |