repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
roberthafner/flowable-engine | modules/flowable5-engine/src/main/java/org/activiti5/engine/query/QueryProperty.java | 802 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti5.engine.query;
import java.io.Serializable;
/**
* Describes a property that can be used in a Query.
*
* @author Frederik Heremans
*/
public interface QueryProperty extends Serializable {
String getName();
}
| apache-2.0 |
georgecodes/betamax | betamax-core/src/main/java/co/freeside/betamax/util/Network.java | 1151 | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Converted from Groovy to Java by Sean Freitag
*/
package co.freeside.betamax.util;
import java.net.*;
import java.util.*;
import com.google.common.collect.*;
public class Network {
public static Collection<String> getLocalAddresses() {
try {
InetAddress local = InetAddress.getLocalHost();
return ImmutableList.of(local.getHostName(), local.getHostAddress(), "localhost", "127.0.0.1");
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
mosoft521/wicket | wicket-core/src/test/java/org/apache/wicket/core/util/tester/WicketTesterCookieTest.java | 15710 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.core.util.tester;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import jakarta.servlet.http.Cookie;
import org.apache.wicket.core.util.tester.apps_1.CreateBook;
import org.apache.wicket.core.util.tester.cookies.CollectAllRequestCookiesPage;
import org.apache.wicket.core.util.tester.cookies.EndPage;
import org.apache.wicket.core.util.tester.cookies.SetCookiePage;
import org.apache.wicket.protocol.http.mock.Cookies;
import org.apache.wicket.util.tester.WicketTestCase;
import org.apache.wicket.util.tester.WicketTester;
import org.junit.jupiter.api.Test;
/**
* test code for wicket tester cookie handling
*
* @author mosmann
*/
class WicketTesterCookieTest extends WicketTestCase
{
/**
* creates a new cookie with maxAge set
*
* @param name
* name
* @param value
* value
* @param maxAge
* maxAge
* @return a cookie
*/
private static Cookie newCookie(String name, String value, int maxAge)
{
Cookie cookie = new Cookie(name, value);
cookie.setMaxAge(maxAge);
return cookie;
}
/**
* make cookie map more readable
*
* @param cookieMap
* cookie map
* @return string
*/
private static String asString(Map<String, Cookie> cookieMap)
{
StringBuilder sb = new StringBuilder();
sb.append('{');
for (Map.Entry<String, Cookie> e : cookieMap.entrySet())
{
sb.append(e.getKey()).append('=').append(asString(e.getValue()));
sb.append(",");
}
sb.append('}');
return sb.toString();
}
/**
* make cookie more readable
*
* @param c
* cookie
* @return string
*/
private static String asString(Cookie c)
{
StringBuilder sb = new StringBuilder();
sb.append('[');
sb.append("name=").append(c.getName()).append(',');
sb.append("value=").append(c.getValue()).append(',');
sb.append("maxAge=").append(c.getMaxAge());
sb.append(']');
return sb.toString();
}
/**
* create a cookie map based on cookie name
*
* @param cookies
* cookie list
* @return as map
* @throws RuntimeException
* if more than one cookie with the same name
*/
private static Map<String, Cookie> cookiesFromList(List<Cookie> cookies)
{
Map<String, Cookie> ret = new LinkedHashMap<String, Cookie>();
for (Cookie cookie : cookies)
{
Cookie oldValue = ret.put(cookie.getName(), cookie);
if (oldValue != null)
{
throw new RuntimeException(
String.format("Cookie with name '%s' ('%s') already in map %s",
cookie.getName(), asString(oldValue), asString(ret)));
}
}
return ret;
}
/**
*
*/
@Test
void cookieIsFoundWhenAddedToRequest()
{
tester.getRequest().addCookie(new Cookie("name", "value"));
assertEquals("value", tester.getRequest().getCookie("name").getValue());
}
/**
*
*/
@Test
void cookieIsFoundWhenAddedToResponse()
{
tester.startPage(CreateBook.class);
tester.getLastResponse().addCookie(new Cookie("name", "value"));
Collection<Cookie> cookies = tester.getLastResponse().getCookies();
assertEquals(cookies.iterator().next().getValue(), "value");
}
/**
* Tests that setting a cookie with age > 0 before creating the page will survive after the
* rendering of the page and it will be used for the next request cycle.
*/
@Test
void transferCookies()
{
String cookieName = "wicket4289Name";
String cookieValue = "wicket4289Value";
int cookieAge = 1; // age > 0 => the cookie will be preserved for the the next request cycle
Cookie cookie = new Cookie(cookieName, cookieValue);
cookie.setMaxAge(cookieAge);
tester.getRequest().addCookie(cookie);
CookiePage page = new CookiePage(cookieName, cookieValue);
tester.startPage(page);
// assert that the cookie was in the response
List<Cookie> cookies = tester.getLastResponse().getCookies();
assertEquals(1, cookies.size());
Cookie cookie2 = cookies.get(0);
assertEquals(cookieName, cookie2.getName());
assertEquals(cookieValue, cookie2.getValue());
assertEquals(cookieAge, cookie2.getMaxAge());
// assert that the cookie will be preserved for the next request
assertEquals(cookieValue, tester.getRequest().getCookie(cookieName).getValue());
}
/**
* Tests that setting a cookie with age == 0 will not be stored after the request cycle.
*/
@Test
void dontTransferCookiesWithNegativeAge()
{
String cookieName = "wicket4289Name";
String cookieValue = "wicket4289Value";
int cookieAge = 0; // age = 0 => do not store it
Cookie cookie = new Cookie(cookieName, cookieValue);
cookie.setMaxAge(cookieAge);
tester.getRequest().addCookie(cookie);
CookiePage page = new CookiePage(cookieName, cookieValue);
tester.startPage(page);
// assert that the cookie is not preserved for the next request cycle
assertNull(tester.getRequest().getCookies());
}
/**
* Tests that setting a cookie with age < 0 will not be stored after the request cycle.
*/
@Test
void dontTransferCookiesWithZeroAge()
{
String cookieName = "wicket4289Name";
String cookieValue = "wicket4289Value";
int cookieAge = 0; // age == 0 => delete the cookie
Cookie cookie = new Cookie(cookieName, cookieValue);
cookie.setMaxAge(cookieAge);
tester.getRequest().addCookie(cookie);
CookiePage page = new CookiePage(cookieName, cookieValue);
tester.startPage(page);
// assert that the cookie is not preserved for the next request cycle
assertNull(tester.getRequest().getCookies());
}
/**
* A cookie set in the request headers should not be expected in the response headers unless the
* page sets it explicitly.
*
* https://issues.apache.org/jira/browse/WICKET-4989
*/
@Test
void cookieSetInRequestShouldNotBeInResponse()
{
// start and render the test page
tester.getRequest().addCookie(new Cookie("dummy", "sample"));
tester.startPage(tester.getApplication().getHomePage());
// assert rendered page class
tester.assertRenderedPage(tester.getApplication().getHomePage());
assertEquals(0, tester.getLastResponse().getCookies().size(),
"The cookie should not be in the response unless explicitly set");
// The cookie should be in each following request unless the server code
// schedules it for removal it with cookie.setMaxAge(0)
assertEquals(1, tester.getRequest().getCookies().length,
"The cookie should be in each following request");
}
/**
* The response cookie should not be the same instance as the request cookie.
*
* https://issues.apache.org/jira/browse/WICKET-4989
*/
@Test
void doNotReuseTheSameInstanceOfTheCookieForRequestAndResponse()
{
// start and render the test page
String cookieName = "cookieName";
String cookieValue = "cookieValue";
Cookie requestCookie = new Cookie(cookieName, cookieValue);
tester.getRequest().addCookie(requestCookie);
tester.startPage(new CookiePage(cookieName, cookieValue));
// assert rendered page class
tester.assertRenderedPage(CookiePage.class);
Cookie responseCookie = tester.getLastResponse().getCookies().get(0);
requestCookie.setValue("valueChanged");
assertEquals(cookieValue, responseCookie.getValue());
}
/**
* @see WicketTester
*
* TODO add a cookie to request, which should override cookie from last response and last
* request https://issues.apache.org/jira/browse/WICKET-5147
*/
@Test
void wicketTesterCookieHandlingWithoutRedirect()
{
// no cookies set
CollectAllRequestCookiesPage collectingPage = collectAllRequestCookiesOnThisPage();
assertTrue(collectingPage.getCookies().isEmpty(), "no cookie in first request");
lastResponseDoesNotHaveAnyCookies();
responseDoesNotHaveAnyCookies();
requestDoesNotHaveAnyCookies();
// set cookie on request
Cookie firstCookie = newCookie("a", "firstValue", 1);
tester.getRequest().addCookie(firstCookie);
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage, firstCookie);
lastResponseDoesNotHaveAnyCookies();
requestShouldHaveTheseCookies(firstCookie);
responseDoesNotHaveAnyCookies();
// cookies from last request should appear on following requests
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage, firstCookie);
lastResponseDoesNotHaveAnyCookies();
requestShouldHaveTheseCookies(firstCookie);
responseDoesNotHaveAnyCookies();
// cookie will be overwritten if response will do so
Cookie cookieSetInResponse = newCookie("a", "overwriteWithNewValue", 1);
setCookieInResponse(cookieSetInResponse);
lastResponseShouldHaveTheseCookies(cookieSetInResponse);
requestShouldHaveTheseCookies(cookieSetInResponse);
// cookies from last response then should appear on following requests
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage, cookieSetInResponse);
lastResponseDoesNotHaveAnyCookies();
requestShouldHaveTheseCookies(cookieSetInResponse);
// cookies from requests will be deleted if the response will do so
Cookie expiredCookieSetInResponse = newCookie("a", "removeMe", 0);
setCookieInResponse(expiredCookieSetInResponse);
lastResponseShouldHaveTheseCookies(expiredCookieSetInResponse);
responseDoesNotHaveAnyCookies();
requestDoesNotHaveAnyCookies();
// no cookies in next request while last cookie was deleted
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage);
lastResponseDoesNotHaveAnyCookies();
requestDoesNotHaveAnyCookies();
responseDoesNotHaveAnyCookies();
}
/**
* @see WicketTesterCookieTest#wicketTesterCookieHandlingWithoutRedirect()
*
* https://issues.apache.org/jira/browse/WICKET-5147
*/
@Test
void wicketTesterCookieHandlingWithRedirect()
{
// set cookie in response then redirect to other page
Cookie firstCookie = newCookie("a", "firstValue", 1);
setCookieInResponseAndRedirect(firstCookie);
lastResponseShouldHaveTheseCookies(firstCookie);
requestShouldHaveTheseCookies(firstCookie);
// cookie in response after redirect should appear in next request
CollectAllRequestCookiesPage collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage, firstCookie);
lastResponseDoesNotHaveAnyCookies();
requestShouldHaveTheseCookies(firstCookie);
responseDoesNotHaveAnyCookies();
// set cookie on request and overwrite in response then redirect to other page
Cookie cookieSetInRequest = newCookie("a", "valueFromRequest", 1);
Cookie cookieSetInResponse = newCookie("a", "overwriteInResponse", 1);
tester.getRequest().addCookie(cookieSetInRequest);
setCookieInResponseAndRedirect(cookieSetInResponse);
lastResponseShouldHaveTheseCookies(cookieSetInResponse);
requestShouldHaveTheseCookies(cookieSetInResponse);
// cookie in response after redirect should appear in next request
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage, cookieSetInResponse);
lastResponseDoesNotHaveAnyCookies();
requestShouldHaveTheseCookies(cookieSetInResponse);
responseDoesNotHaveAnyCookies();
// set cookie on request and remove it in response then redirect to other page
Cookie nextCookieSetInRequest = newCookie("a", "nextValueFromRequest", 1);
Cookie nextCookieSetInResponse = newCookie("a", "newValue", 0);
tester.getRequest().addCookie(nextCookieSetInRequest);
setCookieInResponseAndRedirect(nextCookieSetInResponse);
lastResponseShouldHaveTheseCookies(nextCookieSetInResponse);
requestDoesNotHaveAnyCookies();
responseDoesNotHaveAnyCookies();
// no cookies left
collectingPage = collectAllRequestCookiesOnThisPage();
requestOnPageShouldHaveTheseCookies(collectingPage);
lastResponseDoesNotHaveAnyCookies();
requestDoesNotHaveAnyCookies();
responseDoesNotHaveAnyCookies();
}
/**
* start a page which collects all cookies from request
*
* @return the page
*/
private CollectAllRequestCookiesPage collectAllRequestCookiesOnThisPage()
{
return tester.startPage(CollectAllRequestCookiesPage.class);
}
/**
* start a page which set a cookie in response
*
* @param cookie
* cookie
*/
private void setCookieInResponse(Cookie cookie)
{
tester.startPage(new SetCookiePage(cookie));
}
/**
* start a page which set a cookie in response and then redirect to different page
*
* @param cookie
* cookie
*/
private void setCookieInResponseAndRedirect(Cookie cookie)
{
tester.startPage(new SetCookiePage(cookie, EndPage.class));
}
/**
* check cookies collected by page
*
* @param page
* page
* @param cookies
* cookies
*/
private void requestOnPageShouldHaveTheseCookies(CollectAllRequestCookiesPage page,
Cookie... cookies)
{
listShouldMatchAll(page.getCookies(), cookies);
}
/**
* check cookies in current request
*
* @param cookies
* cookies
*/
private void requestShouldHaveTheseCookies(Cookie... cookies)
{
Cookie[] cookieFromRequest = tester.getRequest().getCookies();
listShouldMatchAll(
cookieFromRequest != null ? Arrays.asList(cookieFromRequest) : new ArrayList<Cookie>(),
cookies);
}
/**
* check if every cookie is found in the list and no cookie is left
*
* @param cookieList
* cookie list
* @param cookies
* cookies to check
*/
private void listShouldMatchAll(List<Cookie> cookieList, Cookie... cookies)
{
Map<String, Cookie> cookieMap = cookiesFromList(cookieList);
for (Cookie cookie : cookies)
{
Cookie removed = cookieMap.remove(cookie.getName());
assertNotNull(removed, "Cookie " + cookie.getName());
assertTrue(Cookies.isEqual(cookie, removed), "Cookie " + cookie.getName() + " matches");
}
assertTrue(cookieMap.isEmpty(), "no cookies left " + asString(cookieMap));
}
/**
* check last response cookies
*
* @param cookies
* cookies
*/
private void lastResponseShouldHaveTheseCookies(Cookie... cookies)
{
listShouldMatchAll(tester.getLastResponse().getCookies(), cookies);
}
/**
* response should not have any cookies
*/
private void lastResponseDoesNotHaveAnyCookies()
{
listShouldMatchAll(tester.getLastResponse().getCookies());
}
/**
* response should not have any cookies
*/
private void responseDoesNotHaveAnyCookies()
{
listShouldMatchAll(tester.getResponse().getCookies());
}
/**
* request should not have any cookies
*/
private void requestDoesNotHaveAnyCookies()
{
requestShouldHaveTheseCookies();
}
}
| apache-2.0 |
rpinzon/kiji-schema | kiji-schema/src/main/java/org/kiji/schema/impl/hbase/HBaseMetaTable.java | 18178 | /**
* (c) Copyright 2012 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.impl.hbase;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.kiji.annotations.ApiAudience;
import org.kiji.schema.KijiMetaTable;
import org.kiji.schema.KijiSchemaTable;
import org.kiji.schema.KijiTableKeyValueDatabase;
import org.kiji.schema.KijiURI;
import org.kiji.schema.avro.KeyValueBackup;
import org.kiji.schema.avro.MetaTableBackup;
import org.kiji.schema.avro.TableBackup;
import org.kiji.schema.avro.TableLayoutDesc;
import org.kiji.schema.avro.TableLayoutsBackup;
import org.kiji.schema.hbase.KijiManagedHBaseTableName;
import org.kiji.schema.impl.HTableInterfaceFactory;
import org.kiji.schema.layout.KijiTableLayout;
import org.kiji.schema.layout.KijiTableLayoutDatabase;
import org.kiji.schema.layout.impl.HBaseTableLayoutDatabase;
import org.kiji.schema.util.DebugResourceTracker;
/**
* An implementation of the KijiMetaTable that uses the 'kiji-meta' HBase table as the backing
* store.
*/
@ApiAudience.Private
public final class HBaseMetaTable implements KijiMetaTable {
private static final Logger LOG = LoggerFactory.getLogger(HBaseMetaTable.class);
/** The HBase column family that will store table layout specific metadata. */
private static final String LAYOUT_COLUMN_FAMILY = "layout";
/** The HBase column family that will store user defined metadata. */
private static final String META_COLUMN_FAMILY = "meta";
/** URI of the Kiji instance this meta-table belongs to. */
private final KijiURI mKijiURI;
/** The HBase table that stores Kiji metadata. */
private final HTableInterface mTable;
/** States of a SchemaTable instance. */
private static enum State {
UNINITIALIZED,
OPEN,
CLOSED
}
/** Tracks the state of this SchemaTable instance. */
private AtomicReference<State> mState = new AtomicReference<State>(State.UNINITIALIZED);
/** The layout table that we delegate the work of storing table layout metadata to. */
private final KijiTableLayoutDatabase mTableLayoutDatabase;
/** The table we delegate storing per table meta data, in the form of key value pairs. */
private final KijiTableKeyValueDatabase<?> mTableKeyValueDatabase;
// TODO: Make KijiTableLayoutDatabase thread-safe,
// so we can call HBaseMetaTable thread-safe, too.
/**
* Creates an HTableInterface for the specified table.
*
* @param kijiURI the KijiURI.
* @param conf Hadoop configuration.
* @param factory HTableInterface factory to use.
* @return a new HTableInterface for the specified table.
* @throws IOException on I/O error.
*/
public static HTableInterface newMetaTable(
KijiURI kijiURI,
Configuration conf,
HTableInterfaceFactory factory)
throws IOException {
final String hbaseTableName =
KijiManagedHBaseTableName.getMetaTableName(kijiURI.getInstance()).toString();
return factory.create(conf, hbaseTableName);
}
/**
* Create a connection to a Kiji meta table backed by an HTable within HBase.
*
* @param kijiURI URI of the Kiji instance this meta-table belongs to.
* @param conf The Hadoop configuration.
* @param schemaTable The Kiji schema table.
* @param factory HTableInterface factory.
* @throws IOException If there is an error.
*/
HBaseMetaTable(
KijiURI kijiURI,
Configuration conf,
KijiSchemaTable schemaTable,
HTableInterfaceFactory factory)
throws IOException {
this(kijiURI, newMetaTable(kijiURI, conf, factory), schemaTable);
}
/**
* Create a connection to a Kiji meta table backed by an HTable within HBase.
*
* <p>This class takes ownership of the HTable. It will be closed when this instance is
* closed.</p>
*
* @param kijiURI URI of the Kiji instance this meta-table belongs to.
* @param htable The HTable to use for storing Kiji meta data.
* @param schemaTable The Kiji schema table.
* @throws IOException If there is an error.
*/
private HBaseMetaTable(
KijiURI kijiURI,
HTableInterface htable,
KijiSchemaTable schemaTable)
throws IOException {
this(
kijiURI,
htable,
new HBaseTableLayoutDatabase(kijiURI, htable, LAYOUT_COLUMN_FAMILY, schemaTable),
new HBaseTableKeyValueDatabase(htable, META_COLUMN_FAMILY));
}
/**
* Create a connection to a Kiji meta table backed by an HTable within HBase.
*
* <p>This class takes ownership of the HTable. It will be closed when this instance is
* closed.</p>
*
* @param kijiURI URI of the Kiji instance this meta-table belongs to.
* @param htable The HTable to use for storing Kiji meta data.
* @param tableLayoutDatabase A database of table layouts to delegate layout storage to.
* @param tableKeyValueDatabase A database of key-value pairs to delegate metadata storage to.
*/
private HBaseMetaTable(
KijiURI kijiURI,
HTableInterface htable,
KijiTableLayoutDatabase tableLayoutDatabase,
KijiTableKeyValueDatabase<?> tableKeyValueDatabase) {
mKijiURI = kijiURI;
mTable = htable;
mTableLayoutDatabase = tableLayoutDatabase;
mTableKeyValueDatabase = tableKeyValueDatabase;
final State oldState = mState.getAndSet(State.OPEN);
Preconditions.checkState(oldState == State.UNINITIALIZED,
"Cannot open MetaTable instance in state %s.", oldState);
DebugResourceTracker.get().registerResource(this);
}
/** {@inheritDoc} */
@Override
public synchronized void deleteTable(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot delete table from MetaTable instance in state %s.", state);
mTableLayoutDatabase.removeAllTableLayoutVersions(table);
mTableKeyValueDatabase.removeAllValues(table);
}
/** {@inheritDoc} */
@Override
public synchronized KijiTableLayout updateTableLayout(String table, TableLayoutDesc layoutUpdate)
throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot update table layout in MetaTable instance in state %s.", state);
return mTableLayoutDatabase.updateTableLayout(table, layoutUpdate);
}
/** {@inheritDoc} */
@Override
public synchronized KijiTableLayout getTableLayout(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get table layout from MetaTable instance in state %s.", state);
return mTableLayoutDatabase.getTableLayout(table);
}
/** {@inheritDoc} */
@Override
public synchronized List<KijiTableLayout> getTableLayoutVersions(String table, int numVersions)
throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get table layout versions from MetaTable instance in state %s.", state);
return mTableLayoutDatabase.getTableLayoutVersions(table, numVersions);
}
/** {@inheritDoc} */
@Override
public synchronized NavigableMap<Long, KijiTableLayout> getTimedTableLayoutVersions(String table,
int numVersions) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get timed table layout versions from MetaTable instance in state %s.", state);
return mTableLayoutDatabase.getTimedTableLayoutVersions(table, numVersions);
}
/** {@inheritDoc} */
@Override
public synchronized void removeAllTableLayoutVersions(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot remove all table layout versions from MetaTable instance in state %s.", state);
mTableLayoutDatabase.removeAllTableLayoutVersions(table);
}
/** {@inheritDoc} */
@Override
public synchronized void removeRecentTableLayoutVersions(String table, int numVersions)
throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot remove recent table layout versions from MetaTable instance in state %s.", state);
mTableLayoutDatabase.removeRecentTableLayoutVersions(table, numVersions);
}
/** {@inheritDoc} */
@Override
public synchronized List<String> listTables() throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot list tables in MetaTable instance in state %s.", state);
return mTableLayoutDatabase.listTables();
}
/** {@inheritDoc} */
@Override
public synchronized boolean tableExists(String tableName) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot check if table exists in MetaTable instance in state %s.", state);
return mTableLayoutDatabase.tableExists(tableName);
}
/** {@inheritDoc} */
@Override
public synchronized void close() throws IOException {
final State oldState = mState.getAndSet(State.CLOSED);
Preconditions.checkState(oldState == State.OPEN,
"Cannot close MetaTable instance in state %s.", oldState);
DebugResourceTracker.get().unregisterResource(this);
mTable.close();
}
/** {@inheritDoc} */
@Override
public synchronized byte[] getValue(String table, String key) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get value from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.getValue(table, key);
}
/** {@inheritDoc} */
@Override
public synchronized KijiMetaTable putValue(String table, String key, byte[] value)
throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot put value into MetaTable instance in state %s.", state);
mTableKeyValueDatabase.putValue(table, key, value);
return this; // Don't expose the delegate object.
}
/** {@inheritDoc} */
@Override
public synchronized void removeValues(String table, String key) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get removed values from MetaTable instance in state %s.", state);
mTableKeyValueDatabase.removeValues(table, key);
}
/** {@inheritDoc} */
@Override
public Set<String> tableSet() throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get table set from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.tableSet();
}
/** {@inheritDoc} */
@Override
public Set<String> keySet(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get key set from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.keySet(table);
}
/** {@inheritDoc} */
@Override
public void removeAllValues(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot remove all values from MetaTable instance in state %s.", state);
mTableKeyValueDatabase.removeAllValues(table);
}
/**
* Install the meta table into a Kiji instance.
*
* @param admin The HBase Admin interface for the HBase cluster to install into.
* @param uri The uri of the Kiji instance to install.
* @throws IOException If there is an error.
*/
public static void install(HBaseAdmin admin, KijiURI uri)
throws IOException {
HTableDescriptor tableDescriptor = new HTableDescriptor(
KijiManagedHBaseTableName.getMetaTableName(uri.getInstance()).toString());
tableDescriptor.addFamily(
HBaseTableLayoutDatabase.getHColumnDescriptor(LAYOUT_COLUMN_FAMILY));
tableDescriptor.addFamily(
HBaseTableLayoutDatabase.getHColumnDescriptor(META_COLUMN_FAMILY));
admin.createTable(tableDescriptor);
}
/**
* Removes the meta table from HBase.
*
* @param admin The HBase admin object.
* @param uri The uri of the Kiji instance to uninstall.
* @throws IOException If there is an error.
*/
public static void uninstall(HBaseAdmin admin, KijiURI uri)
throws IOException {
String tableName = KijiManagedHBaseTableName.getMetaTableName(uri.getInstance()).toString();
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
/** {@inheritDoc} */
@Override
public MetaTableBackup toBackup() throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot backup MetaTable instance in state %s.", state);
Map<String, TableBackup> backupEntries = new HashMap<String, TableBackup>();
List<String> tables = listTables();
for (String table : tables) {
TableLayoutsBackup layouts = mTableLayoutDatabase.layoutsToBackup(table);
KeyValueBackup keyValues = mTableKeyValueDatabase.keyValuesToBackup(table);
final TableBackup tableBackup = TableBackup.newBuilder()
.setName(table)
.setTableLayoutsBackup(layouts)
.setKeyValueBackup(keyValues)
.build();
backupEntries.put(table, tableBackup);
}
return MetaTableBackup.newBuilder().setTables(backupEntries).build();
}
/** {@inheritDoc} */
@Override
public void fromBackup(MetaTableBackup backup) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot restore backup to MetaTable instance in state %s.", state);
LOG.info(String.format("Restoring meta table from backup with %d entries.",
backup.getTables().size()));
for (Map.Entry<String, TableBackup> tableEntry: backup.getTables().entrySet()) {
final String tableName = tableEntry.getKey();
final TableBackup tableBackup = tableEntry.getValue();
Preconditions.checkState(tableName.equals(tableBackup.getName()), String.format(
"Inconsistent table backup: entry '%s' does not match table name '%s'.",
tableName, tableBackup.getName()));
restoreLayoutsFromBackup(tableName, tableBackup.getTableLayoutsBackup());
restoreKeyValuesFromBackup(tableName, tableBackup.getKeyValueBackup());
}
mTable.flushCommits();
LOG.info("Flushing commits to table '{}'", Bytes.toString(mTable.getTableName()));
}
/** {@inheritDoc} */
@Override
public TableLayoutsBackup layoutsToBackup(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get layouts to backup from MetaTable instance in state %s.", state);
return mTableLayoutDatabase.layoutsToBackup(table);
}
/** {@inheritDoc} */
@Override
public List<byte[]> getValues(String table, String key, int numVersions) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get values from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.getValues(table, key, numVersions);
}
/** {@inheritDoc} */
@Override
public NavigableMap<Long, byte[]> getTimedValues(String table, String key, int numVersions)
throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get timed values from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.getTimedValues(table, key, numVersions);
}
/** {@inheritDoc} */
@Override
public KeyValueBackup keyValuesToBackup(String table) throws IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot get key values to backup from MetaTable instance in state %s.", state);
return mTableKeyValueDatabase.keyValuesToBackup(table);
}
/** {@inheritDoc} */
@Override
public void restoreKeyValuesFromBackup(String table, KeyValueBackup tableBackup) throws
IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot restore key values from backup from MetaTable instance in state %s.", state);
mTableKeyValueDatabase.restoreKeyValuesFromBackup(table, tableBackup);
}
@Override
public void restoreLayoutsFromBackup(String tableName, TableLayoutsBackup tableBackup) throws
IOException {
final State state = mState.get();
Preconditions.checkState(state == State.OPEN,
"Cannot restore layouts from backup from MetaTable instance in state %s.", state);
mTableLayoutDatabase.restoreLayoutsFromBackup(tableName, tableBackup);
}
/** {@inheritDoc} */
@Override
public String toString() {
return Objects.toStringHelper(HBaseMetaTable.class)
.add("uri", mKijiURI)
.add("state", mState.get())
.toString();
}
}
| apache-2.0 |
chaostrigger/rl-library | projects/environments/experimental/mario/src/com/mojang/mario/sprites/Mario.java | 18645 | package com.mojang.mario.sprites;
import com.mojang.mario.Art;
import com.mojang.mario.Scene;
import com.mojang.mario.level.*;
import com.mojang.mario.LevelScene;
import com.mojang.sonar.FixedSoundSource;
import edu.rutgers.rl3.comp.mario.GlueMario;
public class Mario extends Sprite
{
public static boolean large = false;
public static boolean fire = false;
public static int coins = 0;
public static int kills = 0;
public static int lives = 3;
public static String levelString = "none";
public static void resetStatic()
{
large = false;
fire = false;
coins = 0;
lives = 3;
levelString = "none";
}
public static final int KEY_LEFT = 0;
public static final int KEY_RIGHT = 1;
public static final int KEY_DOWN = 2;
public static final int KEY_UP = 3;
public static final int KEY_JUMP = 4;
public static final int KEY_SPEED = 5;
private static float GROUND_INERTIA = 0.89f;
private static float AIR_INERTIA = 0.89f;
public boolean[] keys;
private float runTime;
boolean wasOnGround = false;
boolean onGround = false;
private boolean mayJump = false;
private boolean ducking = false;
private boolean sliding = false;
private int jumpTime = 0;
private float xJumpSpeed;
private float yJumpSpeed;
private boolean canShoot = false;
int width = 4;
int height = 24;
private LevelScene world;
public int facing;
private int powerUpTime = 0;
public int xDeathPos, yDeathPos;
public int deathTime = 0;
public int winTime = 0;
private int invulnerableTime = 0;
public Sprite carried = null;
public static Mario instance;
public Mario(LevelScene world)
{
Mario.instance = this;
this.world = world;
keys = Scene.keys;
x = 32;
y = 0;
facing = 1;
setLarge(Mario.large, Mario.fire);
}
private boolean lastLarge;
private boolean lastFire;
private boolean newLarge;
private boolean newFire;
private void blink(boolean on)
{
Mario.large = on?newLarge:lastLarge;
Mario.fire = on?newFire:lastFire;
if (large)
{
sheet = Art.mario;
if (fire)
sheet = Art.fireMario;
xPicO = 16;
yPicO = 31;
wPic = hPic = 32;
}
else
{
sheet = Art.smallMario;
xPicO = 8;
yPicO = 15;
wPic = hPic = 16;
}
calcPic();
}
public void setLarge(boolean large, boolean fire)
{
if (fire) large = true;
if (!large) fire = false;
lastLarge = Mario.large;
lastFire = Mario.fire;
Mario.large = large;
Mario.fire = fire;
newLarge = Mario.large;
newFire = Mario.fire;
blink(true);
}
public void move()
{
if (winTime > 0)
{
winTime++;
xa = 0;
ya = 0;
return;
}
if (deathTime > 0)
{
deathTime++;
if (deathTime < 11)
{
xa = 0;
ya = 0;
}
else if (deathTime == 11)
{
ya = -15;
}
else
{
ya += 2;
}
x += xa;
y += ya;
return;
}
if (powerUpTime != 0)
{
if (powerUpTime > 0)
{
powerUpTime--;
blink(((powerUpTime / 3) & 1) == 0);
}
else
{
powerUpTime++;
blink(((-powerUpTime / 3) & 1) == 0);
}
if (powerUpTime == 0) world.paused = false;
calcPic();
return;
}
if (invulnerableTime > 0) invulnerableTime--;
visible = ((invulnerableTime / 2) & 1) == 0;
wasOnGround = onGround;
float sideWaysSpeed = (float)(keys[KEY_SPEED] ? GlueMario.param.speed_run : GlueMario.param.speed_walk);
// float sideWaysSpeed = onGround ? 2.5f : 1.2f;
if (onGround)
{
if (keys[KEY_DOWN] && large)
{
ducking = true;
}
else
{
ducking = false;
}
}
if (xa > 2)
{
facing = 1;
}
if (xa < -2)
{
facing = -1;
}
if (keys[KEY_JUMP] || (jumpTime < 0 && !onGround && !sliding))
{
if (jumpTime < 0)
{
xa = xJumpSpeed;
ya = -jumpTime * yJumpSpeed;
jumpTime++;
}
else if (onGround && mayJump)
{
world.sound.play(Art.samples[Art.SAMPLE_MARIO_JUMP], this, 1, 1, 1);
xJumpSpeed = 0;
yJumpSpeed = (float)GlueMario.param.speed_jump;
jumpTime = GlueMario.param.jump_time;
ya = jumpTime * yJumpSpeed;
onGround = false;
sliding = false;
}
else if (sliding && mayJump)
{
world.sound.play(Art.samples[Art.SAMPLE_MARIO_JUMP], this, 1, 1, 1);
xJumpSpeed = -facing * 6.0f;
yJumpSpeed = (float)GlueMario.param.speed_jump_sliding;
jumpTime = GlueMario.param.jump_time_sliding;
xa = xJumpSpeed;
ya = -jumpTime * yJumpSpeed;
onGround = false;
sliding = false;
facing = -facing;
}
else if (jumpTime > 0)
{
xa += xJumpSpeed;
ya = jumpTime * yJumpSpeed;
jumpTime--;
}
}
else
{
jumpTime = 0;
}
if (keys[KEY_LEFT] && !ducking)
{
if (facing == 1) sliding = false;
xa -= sideWaysSpeed;
if (jumpTime >= 0) facing = -1;
}
if (keys[KEY_RIGHT] && !ducking)
{
if (facing == -1) sliding = false;
xa += sideWaysSpeed;
if (jumpTime >= 0) facing = 1;
}
if ((!keys[KEY_LEFT] && !keys[KEY_RIGHT]) || ducking || ya < 0 || onGround)
{
sliding = false;
}
if (keys[KEY_SPEED] && canShoot && Mario.fire && world.fireballsOnScreen<2)
{
world.sound.play(Art.samples[Art.SAMPLE_MARIO_FIREBALL], this, 1, 1, 1);
world.addSprite(new Fireball(world, x+facing*6, y-20, facing));
}
canShoot = !keys[KEY_SPEED];
mayJump = (onGround || sliding) && !keys[KEY_JUMP];
xFlipPic = facing == -1;
runTime += (Math.abs(xa)) + 5;
if (Math.abs(xa) < 0.5f)
{
runTime = 0;
xa = 0;
}
calcPic();
if (sliding)
{
for (int i = 0; i < 1; i++)
{
world.addSprite(new Sparkle((int) (x + Math.random() * 4 - 2) + facing * 8, (int) (y + Math.random() * 4) - 24, (float) (Math.random() * 2 - 1), (float) Math.random() * 1, 0, 1, 5));
}
ya *= 0.5f;
}
onGround = false;
move(xa, 0);
move(0, ya);
if (y > world.level.height * 16 + 16)
{
die();
}
if (x < 0)
{
x = 0;
xa = 0;
}
if (x > world.level.xExit * 16)
{
win();
}
if (x > world.level.width * 16)
{
x = world.level.width * 16;
xa = 0;
}
ya *= GlueMario.param.accel_gravity;
if (onGround)
{
xa *= GROUND_INERTIA;
}
else
{
xa *= AIR_INERTIA;
}
if (!onGround)
{
ya += 3;
}
if (carried != null)
{
carried.x = x + facing * 8;
carried.y = y - 2;
if (!keys[KEY_SPEED])
{
carried.release(this);
carried = null;
}
}
}
private void calcPic()
{
int runFrame = 0;
if (large)
{
runFrame = ((int) (runTime / 20)) % 4;
if (runFrame == 3) runFrame = 1;
if (carried == null && Math.abs(xa) > 10) runFrame += 3;
if (carried != null) runFrame += 10;
if (!onGround)
{
if (carried != null) runFrame = 12;
else if (Math.abs(xa) > 10) runFrame = 7;
else runFrame = 6;
}
}
else
{
runFrame = ((int) (runTime / 20)) % 2;
if (carried == null && Math.abs(xa) > 10) runFrame += 2;
if (carried != null) runFrame += 8;
if (!onGround)
{
if (carried != null) runFrame = 9;
else if (Math.abs(xa) > 10) runFrame = 5;
else runFrame = 4;
}
}
if (onGround && ((facing == -1 && xa > 0) || (facing == 1 && xa < 0)))
{
if (xa > 1 || xa < -1) runFrame = large ? 9 : 7;
if (xa > 3 || xa < -3)
{
for (int i = 0; i < 3; i++)
{
world.addSprite(new Sparkle((int) (x + Math.random() * 8 - 4), (int) (y + Math.random() * 4), (float) (Math.random() * 2 - 1), (float) Math.random() * -1, 0, 1, 5));
}
}
}
if (large)
{
if (ducking) runFrame = 14;
height = ducking ? 12 : 24;
}
else
{
height = 12;
}
xPic = runFrame;
}
private boolean move(float xa, float ya)
{
while (xa > 8)
{
if (!move(8, 0)) return false;
xa -= 8;
}
while (xa < -8)
{
if (!move(-8, 0)) return false;
xa += 8;
}
while (ya > 8)
{
if (!move(0, 8)) return false;
ya -= 8;
}
while (ya < -8)
{
if (!move(0, -8)) return false;
ya += 8;
}
boolean collide = false;
if (ya > 0)
{
if (isBlocking(x + xa - width, y + ya, xa, 0)) collide = true;
else if (isBlocking(x + xa + width, y + ya, xa, 0)) collide = true;
else if (isBlocking(x + xa - width, y + ya + 1, xa, ya)) collide = true;
else if (isBlocking(x + xa + width, y + ya + 1, xa, ya)) collide = true;
}
if (ya < 0)
{
if (isBlocking(x + xa, y + ya - height, xa, ya)) collide = true;
else if (collide || isBlocking(x + xa - width, y + ya - height, xa, ya)) collide = true;
else if (collide || isBlocking(x + xa + width, y + ya - height, xa, ya)) collide = true;
}
if (xa > 0)
{
sliding = true;
if (isBlocking(x + xa + width, y + ya - height, xa, ya)) collide = true;
else sliding = false;
if (isBlocking(x + xa + width, y + ya - height / 2, xa, ya)) collide = true;
else sliding = false;
if (isBlocking(x + xa + width, y + ya, xa, ya)) collide = true;
else sliding = false;
}
if (xa < 0)
{
sliding = true;
if (isBlocking(x + xa - width, y + ya - height, xa, ya)) collide = true;
else sliding = false;
if (isBlocking(x + xa - width, y + ya - height / 2, xa, ya)) collide = true;
else sliding = false;
if (isBlocking(x + xa - width, y + ya, xa, ya)) collide = true;
else sliding = false;
}
if (collide)
{
if (xa < 0)
{
x = (int) ((x - width) / 16) * 16 + width;
this.xa = 0;
}
if (xa > 0)
{
x = (int) ((x + width) / 16 + 1) * 16 - width - 1;
this.xa = 0;
}
if (ya < 0)
{
y = (int) ((y - height) / 16) * 16 + height;
jumpTime = 0;
this.ya = 0;
}
if (ya > 0)
{
y = (int) ((y - 1) / 16 + 1) * 16 - 1;
onGround = true;
}
return false;
}
else
{
x += xa;
y += ya;
return true;
}
}
private boolean isBlocking(float _x, float _y, float xa, float ya)
{
int x = (int) (_x / 16);
int y = (int) (_y / 16);
if (x == (int) (this.x / 16) && y == (int) (this.y / 16)) return false;
boolean blocking = world.level.isBlocking(x, y, xa, ya);
byte block = world.level.getBlock(x, y);
if (((Level.TILE_BEHAVIORS[block & 0xff]) & Level.BIT_PICKUPABLE) > 0)
{
Mario.getCoin();
world.sound.play(Art.samples[Art.SAMPLE_GET_COIN], new FixedSoundSource(x * 16 + 8, y * 16 + 8), 1, 1, 1);
world.level.setBlock(x, y, (byte) 0);
for (int xx = 0; xx < 2; xx++)
for (int yy = 0; yy < 2; yy++)
world.addSprite(new Sparkle(x * 16 + xx * 8 + (int) (Math.random() * 8), y * 16 + yy * 8 + (int) (Math.random() * 8), 0, 0, 0, 2, 5));
}
if (blocking && ya < 0)
{
world.bump(x, y, large);
}
return blocking;
}
public void stomp(Enemy enemy)
{
if (deathTime > 0 || world.paused) return;
if (!enemy.winged)
Mario.kills++;
float targetY = enemy.y - enemy.height / 2;
move(0, targetY - y);
world.sound.play(Art.samples[Art.SAMPLE_MARIO_KICK], this, 1, 1, 1);
xJumpSpeed = 0;
yJumpSpeed = -1.9f;
jumpTime = 8;
ya = jumpTime * yJumpSpeed;
onGround = false;
sliding = false;
invulnerableTime = 1;
}
public void stomp(Shell shell)
{
if (deathTime > 0 || world.paused) return;
if (keys[KEY_SPEED] && shell.facing == 0)
{
carried = shell;
shell.carried = true;
}
else
{
float targetY = shell.y - shell.height / 2;
move(0, targetY - y);
world.sound.play(Art.samples[Art.SAMPLE_MARIO_KICK], this, 1, 1, 1);
xJumpSpeed = 0;
yJumpSpeed = -1.9f;
jumpTime = 8;
ya = jumpTime * yJumpSpeed;
onGround = false;
sliding = false;
invulnerableTime = 1;
}
}
public void getHurt()
{
if (deathTime > 0 || world.paused) return;
if (invulnerableTime > 0) return;
if (large)
{
world.paused = true;
powerUpTime = -3 * 6;
world.sound.play(Art.samples[Art.SAMPLE_MARIO_POWER_DOWN], this, 1, 1, 1);
if (fire)
{
world.mario.setLarge(true, false);
}
else
{
world.mario.setLarge(false, false);
}
invulnerableTime = 32;
}
else
{
die();
}
}
private void win()
{
xDeathPos = (int) x;
yDeathPos = (int) y;
world.paused = true;
winTime = 1;
Art.stopMusic();
world.sound.play(Art.samples[Art.SAMPLE_LEVEL_EXIT], this, 1, 1, 1);
}
public void die()
{
xDeathPos = (int) x;
yDeathPos = (int) y;
world.paused = true;
deathTime = 1;
Art.stopMusic();
world.sound.play(Art.samples[Art.SAMPLE_MARIO_DEATH], this, 1, 1, 1);
}
public void getFlower()
{
if (deathTime > 0 || world.paused) return;
if (!fire)
{
world.paused = true;
powerUpTime = 3 * 6;
world.sound.play(Art.samples[Art.SAMPLE_MARIO_POWER_UP], this, 1, 1, 1);
world.mario.setLarge(true, true);
}
else
{
Mario.getCoin();
world.sound.play(Art.samples[Art.SAMPLE_GET_COIN], this, 1, 1, 1);
}
}
public void getMushroom()
{
if (deathTime > 0 || world.paused) return;
if (!large)
{
world.paused = true;
powerUpTime = 3 * 6;
world.sound.play(Art.samples[Art.SAMPLE_MARIO_POWER_UP], this, 1, 1, 1);
world.mario.setLarge(true, false);
}
else
{
Mario.getCoin();
world.sound.play(Art.samples[Art.SAMPLE_GET_COIN], this, 1, 1, 1);
}
}
public void kick(Shell shell)
{
if (deathTime > 0 || world.paused) return;
if (keys[KEY_SPEED])
{
carried = shell;
shell.carried = true;
}
else
{
world.sound.play(Art.samples[Art.SAMPLE_MARIO_KICK], this, 1, 1, 1);
invulnerableTime = 1;
}
}
public void stomp(BulletBill bill)
{
if (deathTime > 0 || world.paused) return;
float targetY = bill.y - bill.height / 2;
move(0, targetY - y);
world.sound.play(Art.samples[Art.SAMPLE_MARIO_KICK], this, 1, 1, 1);
xJumpSpeed = 0;
yJumpSpeed = -1.9f;
jumpTime = 8;
ya = jumpTime * yJumpSpeed;
onGround = false;
sliding = false;
invulnerableTime = 1;
}
public byte getKeyMask()
{
int mask = 0;
for (int i = 0; i < 7; i++)
{
if (keys[i]) mask |= (1 << i);
}
return (byte) mask;
}
public void setKeys(byte mask)
{
for (int i = 0; i < 7; i++)
{
keys[i] = (mask & (1 << i)) > 0;
}
}
public static void get1Up()
{
instance.world.sound.play(Art.samples[Art.SAMPLE_MARIO_1UP], instance, 1, 1, 1);
if (GlueMario.glue_running)
return;
lives++;
if (lives==99)
{
lives = 99;
}
}
public static void getCoin()
{
coins++;
if (coins==100 && !GlueMario.glue_running)
{
coins = 0;
get1Up();
}
}
} | apache-2.0 |
bither/bitherj | bitherj/src/main/java/net/bither/bitherj/core/DesktopHDMKeychain.java | 36554 | /*
*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package net.bither.bitherj.core;
import net.bither.bitherj.AbstractApp;
import net.bither.bitherj.api.CreateHDMAddressApi;
import net.bither.bitherj.crypto.ECKey;
import net.bither.bitherj.crypto.EncryptedData;
import net.bither.bitherj.crypto.TransactionSignature;
import net.bither.bitherj.crypto.hd.DeterministicKey;
import net.bither.bitherj.crypto.hd.HDKeyDerivation;
import net.bither.bitherj.crypto.mnemonic.MnemonicCode;
import net.bither.bitherj.crypto.mnemonic.MnemonicException;
import net.bither.bitherj.db.AbstractDb;
import net.bither.bitherj.exception.TxBuilderException;
import net.bither.bitherj.qrcode.QRCodeUtil;
import net.bither.bitherj.script.ScriptBuilder;
import net.bither.bitherj.utils.Base58;
import net.bither.bitherj.utils.PrivateKeyUtil;
import net.bither.bitherj.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigInteger;
import java.security.SecureRandom;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
public class DesktopHDMKeychain extends AbstractHD {
public static final String DesktopHDMKeychainPlaceHolder = "DesktopHDMKeychain";
private long balance = 0;
private static final int LOOK_AHEAD_SIZE = 100;
private LinkedBlockingQueue<HashMap<String, Long>> sendRequestList = new LinkedBlockingQueue<HashMap<String, Long>>();
public static interface DesktopHDMFetchOtherSignatureDelegate {
List<TransactionSignature> getOtherSignature(Tx tx,
List<byte[]> unsignHash, List<PathTypeIndex> pathTypeIndexLsit);
}
private static final Logger log = LoggerFactory.getLogger(DesktopHDMKeychain.class);
public DesktopHDMKeychain(byte[] mnemonicSeed, CharSequence password) throws MnemonicException
.MnemonicLengthException {
this.mnemonicSeed = mnemonicSeed;
String firstAddress = null;
EncryptedData encryptedMnemonicSeed = null;
EncryptedData encryptedHDSeed = null;
ECKey k = new ECKey(mnemonicSeed, null);
String address = k.toAddress();
k.clearPrivateKey();
hdSeed = seedFromMnemonic(mnemonicSeed);
encryptedHDSeed = new EncryptedData(hdSeed, password, isFromXRandom);
encryptedMnemonicSeed = new EncryptedData(mnemonicSeed, password, isFromXRandom);
DeterministicKey master = HDKeyDerivation.createMasterPrivateKey(hdSeed);
initHDAccount(master, encryptedMnemonicSeed, encryptedHDSeed, true);
}
// Create With Random
public DesktopHDMKeychain(SecureRandom random, CharSequence password) {
isFromXRandom = random.getClass().getCanonicalName().indexOf("XRandom") >= 0;
mnemonicSeed = new byte[32];
EncryptedData encryptedMnemonicSeed = null;
EncryptedData encryptedHDSeed = null;
try {
random.nextBytes(mnemonicSeed);
hdSeed = seedFromMnemonic(mnemonicSeed);
encryptedHDSeed = new EncryptedData(hdSeed, password, isFromXRandom);
encryptedMnemonicSeed = new EncryptedData(mnemonicSeed, password, isFromXRandom);
} catch (Exception e) {
e.printStackTrace();
}
DeterministicKey master = HDKeyDerivation.createMasterPrivateKey(hdSeed);
initHDAccount(master, encryptedMnemonicSeed, encryptedHDSeed, true);
}
// From DB
public DesktopHDMKeychain(int seedId) {
this.hdSeedId = seedId;
isFromXRandom = AbstractDb.desktopAddressProvider.isHDSeedFromXRandom(getHdSeedId());
updateBalance();
}
// Import
public DesktopHDMKeychain(EncryptedData encryptedMnemonicSeed, CharSequence password) throws
HDMBitherIdNotMatchException, MnemonicException.MnemonicLengthException {
mnemonicSeed = encryptedMnemonicSeed.decrypt(password);
hdSeed = seedFromMnemonic(mnemonicSeed);
isFromXRandom = encryptedMnemonicSeed.isXRandom();
EncryptedData encryptedHDSeed = new EncryptedData(hdSeed, password, isFromXRandom);
ArrayList<DesktopHDMAddress> as = new ArrayList<DesktopHDMAddress>();
ArrayList<HDMAddress.Pubs> uncompPubs = new ArrayList<HDMAddress.Pubs>();
ECKey k = new ECKey(mnemonicSeed, null);
String address = k.toAddress();
k.clearPrivateKey();
String firstAddress = getFirstAddressFromSeed(password);
wipeMnemonicSeed();
wipeHDSeed();
this.hdSeedId = AbstractDb.desktopAddressProvider.addHDKey(encryptedMnemonicSeed
.toEncryptedString(), encryptedHDSeed.toEncryptedString(), firstAddress,
isFromXRandom, address, null, null);
if (as.size() > 0) {
// EnDesktopAddressProvider.getInstance().completeHDMAddresses(getHdSeedId(), as);
if (uncompPubs.size() > 0) {
// EnDesktopAddressProvider.getInstance().prepareHDMAddresses(getHdSeedId(), uncompPubs);
for (HDMAddress.Pubs p : uncompPubs) {
AbstractDb.addressProvider.setHDMPubsRemote(getHdSeedId(), p.index, p.remote);
}
}
}
}
private void initHDAccount(DeterministicKey master, EncryptedData encryptedMnemonicSeed,
EncryptedData encryptedHDSeed, boolean isSyncedComplete) {
String firstAddress;
ECKey k = new ECKey(mnemonicSeed, null);
String address = k.toAddress();
k.clearPrivateKey();
DeterministicKey accountKey = getAccount(master);
DeterministicKey internalKey = getChainRootKey(accountKey, AbstractHD.PathType.INTERNAL_ROOT_PATH);
DeterministicKey externalKey = getChainRootKey(accountKey, AbstractHD.PathType.EXTERNAL_ROOT_PATH);
DeterministicKey key = externalKey.deriveSoftened(0);
firstAddress = key.toAddress();
accountKey.wipe();
master.wipe();
wipeHDSeed();
wipeMnemonicSeed();
hdSeedId = AbstractDb.desktopAddressProvider.addHDKey(encryptedMnemonicSeed.toEncryptedString(),
encryptedHDSeed.toEncryptedString(), firstAddress, isFromXRandom, address, externalKey.getPubKeyExtended(), internalKey
.getPubKeyExtended());
internalKey.wipe();
externalKey.wipe();
}
public void addAccountKey(byte[] firstByte, byte[] secondByte) {
if (new BigInteger(1, firstByte).compareTo(new BigInteger(1, secondByte)) > 0) {
byte[] temp = firstByte;
firstByte = secondByte;
secondByte = temp;
}
DeterministicKey firstAccountKey = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(firstByte);
DeterministicKey secondAccountKey = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(secondByte);
DeterministicKey firestInternalKey = getChainRootKey(firstAccountKey, AbstractHD.PathType.INTERNAL_ROOT_PATH);
DeterministicKey firestExternalKey = getChainRootKey(firstAccountKey, AbstractHD.PathType.EXTERNAL_ROOT_PATH);
DeterministicKey secondInternalKey = getChainRootKey(secondAccountKey, AbstractHD.PathType.INTERNAL_ROOT_PATH);
DeterministicKey secondExternalKey = getChainRootKey(secondAccountKey, AbstractHD.PathType.EXTERNAL_ROOT_PATH);
List<byte[]> externalPubs = new ArrayList<byte[]>();
List<byte[]> internalPubs = new ArrayList<byte[]>();
externalPubs.add(firestExternalKey.getPubKeyExtended());
externalPubs.add(secondExternalKey.getPubKeyExtended());
internalPubs.add(firestInternalKey.getPubKeyExtended());
internalPubs.add(secondInternalKey.getPubKeyExtended());
AbstractDb.desktopAddressProvider.addHDMPub(externalPubs, internalPubs);
addDesktopAddress(PathType.EXTERNAL_ROOT_PATH, LOOK_AHEAD_SIZE);
addDesktopAddress(PathType.INTERNAL_ROOT_PATH, LOOK_AHEAD_SIZE);
}
private void addDesktopAddress(PathType pathType, int count) {
if (pathType == PathType.EXTERNAL_ROOT_PATH) {
List<DesktopHDMAddress> desktopHDMAddresses = new ArrayList<DesktopHDMAddress>();
List<byte[]> externalPubs = AbstractDb.desktopAddressProvider.getExternalPubs();
DeterministicKey externalKey1 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(0));
DeterministicKey externalKey2 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(1));
DeterministicKey externalKey3 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(2));
for (int i = 0;
i < count;
i++) {
byte[] subExternalPub1 = externalKey1.deriveSoftened(i).getPubKey();
byte[] subExternalPub2 = externalKey2.deriveSoftened(i).getPubKey();
byte[] subExternalPub3 = externalKey3.deriveSoftened(i).getPubKey();
HDMAddress.Pubs pubs = new HDMAddress.Pubs();
pubs.hot = subExternalPub1;
pubs.cold = subExternalPub2;
pubs.remote = subExternalPub3;
pubs.index = i;
DesktopHDMAddress desktopHDMAddress = new DesktopHDMAddress(pubs, pathType, DesktopHDMKeychain.this, false);
desktopHDMAddresses.add(desktopHDMAddress);
}
AbstractDb.desktopTxProvider.addAddress(desktopHDMAddresses);
} else {
List<DesktopHDMAddress> desktopHDMAddresses = new ArrayList<DesktopHDMAddress>();
List<byte[]> internalPubs = AbstractDb.desktopAddressProvider.getInternalPubs();
DeterministicKey internalKey1 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(0));
DeterministicKey internalKey2 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(1));
DeterministicKey internalKey3 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(2));
for (int i = 0;
i < count;
i++) {
byte[] subInternalPub1 = internalKey1.deriveSoftened(i).getPubKey();
byte[] subInternalPub2 = internalKey2.deriveSoftened(i).getPubKey();
byte[] subInternalPub3 = internalKey3.deriveSoftened(i).getPubKey();
HDMAddress.Pubs pubs = new HDMAddress.Pubs();
pubs.hot = subInternalPub1;
pubs.cold = subInternalPub2;
pubs.remote = subInternalPub3;
pubs.index = i;
DesktopHDMAddress desktopHDMAddress = new DesktopHDMAddress(pubs, pathType, DesktopHDMKeychain.this, false);
desktopHDMAddresses.add(desktopHDMAddress);
}
AbstractDb.desktopTxProvider.addAddress(desktopHDMAddresses);
}
}
private void supplyNewInternalKey(int count, boolean isSyncedComplete) {
List<DesktopHDMAddress> desktopHDMAddresses = new ArrayList<DesktopHDMAddress>();
List<byte[]> internalPubs = AbstractDb.desktopAddressProvider.getInternalPubs();
DeterministicKey internalKey1 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(0));
DeterministicKey internalKey2 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(1));
DeterministicKey internalKey3 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(internalPubs.get(2));
int firstIndex = allGeneratedInternalAddressCount();
for (int i = firstIndex;
i < count + firstIndex;
i++) {
byte[] subInternalPub1 = internalKey1.deriveSoftened(i).getPubKey();
byte[] subInternalPub2 = internalKey2.deriveSoftened(i).getPubKey();
byte[] subInternalPub3 = internalKey3.deriveSoftened(i).getPubKey();
HDMAddress.Pubs pubs = new HDMAddress.Pubs();
pubs.hot = subInternalPub1;
pubs.cold = subInternalPub2;
pubs.remote = subInternalPub3;
pubs.index = i;
DesktopHDMAddress desktopHDMAddress = new DesktopHDMAddress(pubs, PathType.INTERNAL_ROOT_PATH, DesktopHDMKeychain.this, isSyncedComplete);
desktopHDMAddresses.add(desktopHDMAddress);
}
AbstractDb.desktopTxProvider.addAddress(desktopHDMAddresses);
}
private void supplyNewExternalKey(int count, boolean isSyncedComplete) {
List<byte[]> externalPubs = AbstractDb.desktopAddressProvider.getExternalPubs();
DeterministicKey externalKey1 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(0));
DeterministicKey externalKey2 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(1));
DeterministicKey externalKey3 = HDKeyDerivation.createMasterPubKeyFromExtendedBytes
(externalPubs.get(2));
List<DesktopHDMAddress> desktopHDMAddresses = new ArrayList<DesktopHDMAddress>();
int firstIndex = allGeneratedExternalAddressCount();
for (int i = firstIndex;
i < count + firstIndex;
i++) {
byte[] subExternalPub1 = externalKey1.deriveSoftened(i).getPubKey();
byte[] subExternalPub2 = externalKey2.deriveSoftened(i).getPubKey();
byte[] subExternalPub3 = externalKey3.deriveSoftened(i).getPubKey();
HDMAddress.Pubs pubs = new HDMAddress.Pubs();
pubs.hot = subExternalPub1;
pubs.cold = subExternalPub2;
pubs.remote = subExternalPub3;
pubs.index = i;
DesktopHDMAddress desktopHDMAddress = new DesktopHDMAddress(pubs, PathType.EXTERNAL_ROOT_PATH, DesktopHDMKeychain.this, isSyncedComplete);
desktopHDMAddresses.add(desktopHDMAddress);
}
AbstractDb.desktopTxProvider.addAddress(desktopHDMAddresses);
log.info("HD supplied {} internal addresses", desktopHDMAddresses.size());
}
public boolean initTxs(List<Tx> txs) {
AbstractDb.txProvider.addTxs(txs);
notificatTx(null, Tx.TxNotificationType.txFromApi);
return true;
}
public void notificatTx(Tx tx, Tx.TxNotificationType txNotificationType) {
long deltaBalance = getDeltaBalance();
AbstractApp.notificationService.notificatTx(DesktopHDMKeychainPlaceHolder
, tx, txNotificationType, deltaBalance);
}
public boolean hasDesktopHDMAddress() {
return AbstractDb.desktopTxProvider.hasAddress();
}
public void updateSyncComplete(DesktopHDMAddress accountAddress) {
AbstractDb.desktopTxProvider.updateSyncdComplete(accountAddress);
}
private DeterministicKey externalChainRoot(CharSequence password) throws MnemonicException.MnemonicLengthException {
DeterministicKey master = masterKey(password);
DeterministicKey accountKey = getAccount(master);
DeterministicKey externalKey = getChainRootKey(accountKey, PathType.EXTERNAL_ROOT_PATH);
master.wipe();
accountKey.wipe();
return externalKey;
}
public byte[] getExternalChainRootPubExtended(CharSequence password) throws MnemonicException
.MnemonicLengthException {
DeterministicKey ex = externalChainRoot(password);
byte[] pub = ex.getPubKeyExtended();
ex.wipe();
return pub;
}
public String getExternalChainRootPubExtendedAsHex(CharSequence password) throws
MnemonicException.MnemonicLengthException {
return Utils.bytesToHexString(getExternalChainRootPubExtended(password)).toUpperCase();
}
public boolean isTxRelated(Tx tx, List<String> inAddresses) {
return getRelatedAddressesForTx(tx, inAddresses).size() > 0;
}
public Tx newTx(String toAddress, Long amount) throws
TxBuilderException, MnemonicException.MnemonicLengthException {
return newTx(new String[]{toAddress}, new Long[]{amount});
}
public Tx newTx(String[] toAddresses, Long[] amounts) throws
TxBuilderException, MnemonicException.MnemonicLengthException {
List<Out> outs = AbstractDb.desktopTxProvider.getUnspendOutByHDAccount(hdSeedId);
Tx tx = TxBuilder.getInstance().buildTxFromAllAddress(outs, getNewChangeAddress(), Arrays
.asList(amounts), Arrays.asList(toAddresses));
List<DesktopHDMAddress> signingAddresses = getSigningAddressesForInputs(tx.getIns());
assert signingAddresses.size() == tx.getIns().size();
List<byte[]> unsignedHashes = tx.getUnsignedInHashes();
assert unsignedHashes.size() == signingAddresses.size();
// DeterministicKey master = masterKey(password);
// if (master == null) {
// return null;
// }
// DeterministicKey accountKey = getAccount(master);
// DeterministicKey external = getChainRootKey(accountKey, AbstractHD.PathType.EXTERNAL_ROOT_PATH);
// DeterministicKey internal = getChainRootKey(accountKey, AbstractHD.PathType.INTERNAL_ROOT_PATH);
// accountKey.wipe();
// master.wipe();
// ArrayList<byte[]> signatures = new ArrayList<byte[]>();
// HashMap<String, DeterministicKey> addressToKeyMap = new HashMap<String, DeterministicKey>
// (signingAddresses.size());
//
// for (int i = 0;
// i < signingAddresses.size();
// i++) {
// DesktopHDMAddress a = signingAddresses.get(i);
// byte[] unsigned = unsignedHashes.get(i);
//
// if (!addressToKeyMap.containsKey(a.getAddress())) {
// if (a.getPathType() == AbstractHD.PathType.EXTERNAL_ROOT_PATH) {
// addressToKeyMap.put(a.getAddress(), external.deriveSoftened(a.getIndex()));
// } else {
// addressToKeyMap.put(a.getAddress(), internal.deriveSoftened(a.getIndex()));
// }
// }
//
// DeterministicKey key = addressToKeyMap.get(a.getAddress());
// assert key != null;
//
// TransactionSignature signature = new TransactionSignature(key.sign(unsigned, null),
// TransactionSignature.SigHash.ALL, false);
// signatures.add(ScriptBuilder.createInputScript(signature, key).getProgram());
// }
//
// tx.signWithSignatures(signatures);
assert tx.verifySignatures();
// external.wipe();
// internal.wipe();
// for (DeterministicKey key : addressToKeyMap.values()) {
// key.wipe();
// }
return tx;
}
public void signTx(Tx tx, List<byte[]> unSignHash, CharSequence passphrase, List<DesktopHDMAddress> desktopHDMAddresslist,
DesktopHDMFetchOtherSignatureDelegate delegate) {
tx.signWithSignatures(this.signWithOther(unSignHash,
passphrase, tx, desktopHDMAddresslist, delegate));
}
public List<byte[]> signWithOther(List<byte[]> unsignHash, CharSequence password, Tx tx, List<DesktopHDMAddress> desktopHDMAddresslist,
DesktopHDMFetchOtherSignatureDelegate delegate
) {
List<PathTypeIndex> pathTypeIndexList = new ArrayList<PathTypeIndex>();
for (DesktopHDMAddress desktopHDMAddress : desktopHDMAddresslist) {
PathTypeIndex pathTypeIndex = new PathTypeIndex();
pathTypeIndex.index = desktopHDMAddress.getIndex();
pathTypeIndex.pathType = desktopHDMAddress.getPathType();
pathTypeIndexList.add(pathTypeIndex);
}
ArrayList<TransactionSignature> hotSigs = signMyPart(unsignHash, password, pathTypeIndexList);
List<TransactionSignature> otherSigs = delegate.getOtherSignature(
tx, unsignHash, pathTypeIndexList);
assert hotSigs.size() == otherSigs.size() && hotSigs.size() == unsignHash.size();
return formatInScript(hotSigs, otherSigs, desktopHDMAddresslist);
}
public ArrayList<byte[]> signWithCold(List<byte[]> unsignedHashes,
CharSequence password,
List<PathTypeIndex> pathTypeIndexList) {
ArrayList<byte[]> sigs = new ArrayList<byte[]>();
for (int i = 0;
i < unsignedHashes.size();
i++) {
PathTypeIndex pathTypeIndex = pathTypeIndexList.get(i);
DeterministicKey key;
if (pathTypeIndex.pathType == PathType.EXTERNAL_ROOT_PATH) {
key = getExternalKey(pathTypeIndex.index, password);
System.out.println("pub:" + Base58.encode(key.getPubKey()));
} else {
key = getInternalKey(pathTypeIndex.index, password);
}
ECKey.ECDSASignature signed = key.sign(unsignedHashes.get(i));
sigs.add(signed.encodeToDER());
key.wipe();
}
return sigs;
}
public ArrayList<TransactionSignature> signMyPart(List<byte[]> unsignedHashes,
CharSequence password,
List<PathTypeIndex> pathTypeIndexList) {
ArrayList<TransactionSignature> sigs = new ArrayList<TransactionSignature>();
for (int i = 0;
i < unsignedHashes.size();
i++) {
PathTypeIndex pathTypeIndex = pathTypeIndexList.get(i);
DeterministicKey key;
if (pathTypeIndex.pathType == PathType.EXTERNAL_ROOT_PATH) {
key = getExternalKey(pathTypeIndex.index, password);
} else {
key = getInternalKey(pathTypeIndex.index, password);
}
TransactionSignature transactionSignature = new TransactionSignature(key.sign
(unsignedHashes.get(i)), TransactionSignature.SigHash.ALL, false);
sigs.add(transactionSignature);
key.wipe();
}
return sigs;
}
public static List<byte[]> formatInScript(List<TransactionSignature> signs1,
List<TransactionSignature> signs2,
List<DesktopHDMAddress> addressList) {
List<byte[]> result = new ArrayList<byte[]>();
for (int i = 0;
i < signs1.size();
i++) {
DesktopHDMAddress a = addressList.get(i);
List<TransactionSignature> signs = new ArrayList<TransactionSignature>(2);
signs.add(signs1.get(i));
signs.add(signs2.get(i));
result.add(ScriptBuilder.createP2SHMultiSigInputScript(signs,
a.getPubKey()).getProgram());
}
return result;
}
public List<DesktopHDMAddress> getRelatedAddressesForTx(Tx tx, List<String> inAddresses) {
List<String> outAddressList = new ArrayList<String>();
List<DesktopHDMAddress> hdAccountAddressList = new ArrayList<DesktopHDMAddress>();
for (Out out : tx.getOuts()) {
String outAddress = out.getOutAddress();
outAddressList.add(outAddress);
}
List<DesktopHDMAddress> belongAccountOfOutList = AbstractDb.desktopTxProvider.belongAccount(DesktopHDMKeychain.this, outAddressList);
if (belongAccountOfOutList != null
&& belongAccountOfOutList.size() > 0) {
hdAccountAddressList.addAll(belongAccountOfOutList);
}
List<DesktopHDMAddress> belongAccountOfInList = getAddressFromIn(inAddresses);
if (belongAccountOfInList != null && belongAccountOfInList.size() > 0) {
hdAccountAddressList.addAll(belongAccountOfInList);
}
return hdAccountAddressList;
}
private List<DesktopHDMAddress> getAddressFromIn(List<String> addresses) {
List<DesktopHDMAddress> hdAccountAddressList = AbstractDb.desktopTxProvider.belongAccount(DesktopHDMKeychain.this, addresses);
return hdAccountAddressList;
}
public String getNewChangeAddress() {
return addressForPath(AbstractHD.PathType.INTERNAL_ROOT_PATH, issuedInternalIndex() + 1).getAddress();
}
private DesktopHDMAddress addressForPath(AbstractHD.PathType type, int index) {
assert index < (type == AbstractHD.PathType.EXTERNAL_ROOT_PATH ? allGeneratedExternalAddressCount()
: allGeneratedInternalAddressCount());
return AbstractDb.desktopTxProvider.addressForPath(DesktopHDMKeychain.this, type, index);
}
public boolean isFromXRandom() {
return isFromXRandom;
}
public String getFullEncryptPrivKey() {
String encryptPrivKey = getEncryptedMnemonicSeed();
return PrivateKeyUtil.getFullencryptHDMKeyChain(isFromXRandom, encryptPrivKey);
}
public List<DesktopHDMAddress> getSigningAddressesForInputs(List<In> inputs) {
return AbstractDb.desktopTxProvider.getSigningAddressesForInputs(DesktopHDMKeychain.this, inputs);
}
public String getQRCodeFullEncryptPrivKey() {
return QRCodeUtil.HDM_QR_CODE_FLAG
+ getFullEncryptPrivKey();
}
@Override
protected String getEncryptedHDSeed() {
String encrypted = AbstractDb.desktopAddressProvider.getEncryptHDSeed(hdSeedId);
if (encrypted == null) {
return null;
}
return encrypted.toUpperCase();
}
@Override
public String getEncryptedMnemonicSeed() {
return AbstractDb.desktopAddressProvider.getEncryptMnemonicSeed(hdSeedId).toUpperCase();
}
public String getFirstAddressFromDb() {
return AbstractDb.desktopAddressProvider.getHDMFristAddress(hdSeedId);
}
public boolean checkWithPassword(CharSequence password) {
try {
decryptHDSeed(password);
decryptMnemonicSeed(password);
byte[] hdCopy = Arrays.copyOf(hdSeed, hdSeed.length);
boolean hdSeedSafe = Utils.compareString(getFirstAddressFromDb(),
getFirstAddressFromSeed(null));
boolean mnemonicSeedSafe = Arrays.equals(seedFromMnemonic(mnemonicSeed), hdCopy);
Utils.wipeBytes(hdCopy);
wipeHDSeed();
wipeMnemonicSeed();
return hdSeedSafe && mnemonicSeedSafe;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
public static void getRemotePublicKeys(HDMBId hdmBId, CharSequence password,
List<HDMAddress.Pubs> partialPubs) throws Exception {
byte[] decryptedPassword = hdmBId.decryptHDMBIdPassword(password);
CreateHDMAddressApi createHDMAddressApi = new CreateHDMAddressApi(hdmBId.getAddress(),
partialPubs, decryptedPassword);
createHDMAddressApi.handleHttpPost();
List<byte[]> remotePubs = createHDMAddressApi.getResult();
for (int i = 0;
i < partialPubs.size();
i++) {
HDMAddress.Pubs pubs = partialPubs.get(i);
pubs.remote = remotePubs.get(i);
}
}
public static final class HDMBitherIdNotMatchException extends RuntimeException {
public static final String msg = "HDM Bid Not Match";
public HDMBitherIdNotMatchException() {
super(msg);
}
}
public static boolean checkPassword(String keysString, CharSequence password) throws
MnemonicException.MnemonicLengthException {
String[] passwordSeeds = QRCodeUtil.splitOfPasswordSeed(keysString);
String address = Base58.hexToBase58WithAddress(passwordSeeds[0]);
String encreyptString = Utils.joinString(new String[]{passwordSeeds[1], passwordSeeds[2],
passwordSeeds[3]}, QRCodeUtil.QR_CODE_SPLIT);
byte[] seed = new EncryptedData(encreyptString).decrypt(password);
MnemonicCode mnemonic = MnemonicCode.instance();
byte[] s = mnemonic.toSeed(mnemonic.toMnemonic(seed), "");
DeterministicKey master = HDKeyDerivation.createMasterPrivateKey(s);
DeterministicKey purpose = master.deriveHardened(44);
DeterministicKey coinType = purpose.deriveHardened(0);
DeterministicKey account = coinType.deriveHardened(0);
DeterministicKey external = account.deriveSoftened(0);
external.clearPrivateKey();
DeterministicKey key = external.deriveSoftened(0);
boolean result = Utils.compareString(address, Utils.toAddress(key.getPubKeyHash()));
key.wipe();
return result;
}
public void supplyEnoughKeys(boolean isSyncedComplete) {
int lackOfExternal = issuedExternalIndex() + 1 + LOOK_AHEAD_SIZE -
allGeneratedExternalAddressCount();
if (lackOfExternal > 0) {
supplyNewExternalKey(lackOfExternal, isSyncedComplete);
}
int lackOfInternal = issuedInternalIndex() + 1 + LOOK_AHEAD_SIZE -
allGeneratedInternalAddressCount();
if (lackOfInternal > 0) {
supplyNewInternalKey(lackOfInternal, isSyncedComplete);
}
}
public void onNewTx(Tx tx, List<DesktopHDMAddress> relatedAddresses, Tx.TxNotificationType txNotificationType) {
if (relatedAddresses == null || relatedAddresses.size() == 0) {
return;
}
int maxInternal = -1, maxExternal = -1;
for (DesktopHDMAddress a : relatedAddresses) {
if (a.getPathType() == AbstractHD.PathType.EXTERNAL_ROOT_PATH) {
if (a.getIndex() > maxExternal) {
maxExternal = a.getIndex();
}
} else {
if (a.getIndex() > maxInternal) {
maxInternal = a.getIndex();
}
}
}
log.info("HD on new tx issued ex {}, issued in {}", maxExternal, maxInternal);
if (maxExternal >= 0 && maxExternal > issuedExternalIndex()) {
updateIssuedExternalIndex(maxExternal);
}
if (maxInternal >= 0 && maxInternal > issuedInternalIndex()) {
updateIssuedInternalIndex(maxInternal);
}
supplyEnoughKeys(true);
long deltaBalance = getDeltaBalance();
AbstractApp.notificationService.notificatTx(DesktopHDMKeychainPlaceHolder, tx, txNotificationType,
deltaBalance);
}
public int elementCountForBloomFilter() {
return allGeneratedExternalAddressCount() * 2 + AbstractDb.desktopTxProvider
.getUnspendOutCountByHDAccountWithPath(getHdSeedId(), AbstractHD.PathType
.INTERNAL_ROOT_PATH);
}
public void addElementsForBloomFilter(BloomFilter filter) {
List<HDMAddress.Pubs> pubses = AbstractDb.desktopTxProvider.getPubs(AbstractHD.PathType.EXTERNAL_ROOT_PATH);
for (HDMAddress.Pubs pub : pubses) {
byte[] pubByte = pub.getMultiSigScript().getProgram();
filter.insert(pubByte);
filter.insert(Utils.sha256hash160(pubByte));
// System.out.println("address:" + Utils.toP2SHAddress(Utils.sha256hash160(pubByte)));
}
List<Out> outs = AbstractDb.desktopTxProvider.getUnspendOutByHDAccountWithPath
(getHdSeedId(), AbstractHD.PathType.INTERNAL_ROOT_PATH);
for (Out out : outs) {
filter.insert(out.getOutpointData());
}
}
private long calculateUnconfirmedBalance() {
long balance = 0;
List<Tx> txs = AbstractDb.desktopTxProvider.getHDAccountUnconfirmedTx();
Collections.sort(txs);
Set<byte[]> invalidTx = new HashSet<byte[]>();
Set<OutPoint> spentOut = new HashSet<OutPoint>();
Set<OutPoint> unspendOut = new HashSet<OutPoint>();
for (int i = txs.size() - 1; i >= 0; i--) {
Set<OutPoint> spent = new HashSet<OutPoint>();
Tx tx = txs.get(i);
Set<byte[]> inHashes = new HashSet<byte[]>();
for (In in : tx.getIns()) {
spent.add(new OutPoint(in.getPrevTxHash(), in.getPrevOutSn()));
inHashes.add(in.getPrevTxHash());
}
if (tx.getBlockNo() == Tx.TX_UNCONFIRMED
&& (Utils.isIntersects(spent, spentOut) || Utils.isIntersects(inHashes, invalidTx))) {
invalidTx.add(tx.getTxHash());
continue;
}
spentOut.addAll(spent);
HashSet<String> addressSet = getBelongAccountAddresses(tx.getOutAddressList());
for (Out out : tx.getOuts()) {
if (addressSet.contains(out.getOutAddress())) {
unspendOut.add(new OutPoint(tx.getTxHash(), out.getOutSn()));
balance += out.getOutValue();
}
}
spent.clear();
spent.addAll(unspendOut);
spent.retainAll(spentOut);
for (OutPoint o : spent) {
Tx tx1 = AbstractDb.txProvider.getTxDetailByTxHash(o.getTxHash());
unspendOut.remove(o);
for (Out out : tx1.getOuts()) {
if (out.getOutSn() == o.getOutSn()) {
balance -= out.getOutValue();
}
}
}
}
return balance;
}
private long getDeltaBalance() {
long oldBalance = this.balance;
this.updateBalance();
return this.balance - oldBalance;
}
public void updateBalance() {
this.balance = AbstractDb.desktopTxProvider.getHDAccountConfirmedBanlance(hdSeedId)
+ calculateUnconfirmedBalance();
}
public long getBalance() {
return this.balance;
}
public HashSet<String> getBelongAccountAddresses(List<String> addressList) {
return AbstractDb.desktopTxProvider.getBelongAccountAddresses(addressList);
}
public void updateIssuedInternalIndex(int index) {
AbstractDb.desktopTxProvider.updateIssuedIndex(PathType.INTERNAL_ROOT_PATH, index);
}
public void updateIssuedExternalIndex(int index) {
AbstractDb.desktopTxProvider.updateIssuedIndex(PathType.EXTERNAL_ROOT_PATH, index);
}
public byte[] getInternalPub() {
// return AbstractDb.addressProvider.getInternalPub(hdSeedId);
return new byte[]{};
}
public byte[] getExternalPub() {
//return AbstractDb.addressProvider.getExternalPub(hdSeedId);
return new byte[]{};
}
public int issuedInternalIndex() {
return AbstractDb.desktopTxProvider.issuedIndex(PathType.INTERNAL_ROOT_PATH);
}
public int issuedExternalIndex() {
return AbstractDb.desktopTxProvider.issuedIndex(PathType.EXTERNAL_ROOT_PATH);
}
private int allGeneratedInternalAddressCount() {
return AbstractDb.desktopTxProvider.allGeneratedAddressCount(PathType
.INTERNAL_ROOT_PATH);
}
private int allGeneratedExternalAddressCount() {
return AbstractDb.desktopTxProvider.allGeneratedAddressCount(PathType
.EXTERNAL_ROOT_PATH);
}
public String getMasterPubKeyExtendedStr(CharSequence password) {
byte[] bytes = getMasterPubKeyExtended(password);
return Utils.bytesToHexString(bytes).toUpperCase(Locale.US);
}
public boolean isSyncComplete() {
int unsyncedAddressCount = AbstractDb.desktopTxProvider.unSyncedAddressCount();
return unsyncedAddressCount == 0;
}
public String externalAddress() {
return AbstractDb.desktopTxProvider.externalAddress();
}
public LinkedBlockingQueue<HashMap<String, Long>> getSendRequestList() {
return this.sendRequestList;
}
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/UserBucketStaxUnmarshaller.java | 2671 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import java.util.Map;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* UserBucket StAX Unmarshaller
*/
public class UserBucketStaxUnmarshaller implements
Unmarshaller<UserBucket, StaxUnmarshallerContext> {
public UserBucket unmarshall(StaxUnmarshallerContext context)
throws Exception {
UserBucket userBucket = new UserBucket();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return userBucket;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("S3Bucket", targetDepth)) {
userBucket.setS3Bucket(StringStaxUnmarshaller.getInstance()
.unmarshall(context));
continue;
}
if (context.testExpression("S3Key", targetDepth)) {
userBucket.setS3Key(StringStaxUnmarshaller.getInstance()
.unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return userBucket;
}
}
}
}
private static UserBucketStaxUnmarshaller instance;
public static UserBucketStaxUnmarshaller getInstance() {
if (instance == null)
instance = new UserBucketStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
AndroidX/androidx | leanback/leanback/src/androidTest/java/androidx/leanback/media/PlaybackTransportControlGlueTest.java | 13527 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.leanback.media;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.when;
import android.content.Context;
import android.view.ContextThemeWrapper;
import android.view.KeyEvent;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import androidx.leanback.widget.PlaybackControlsRow;
import androidx.leanback.widget.PlaybackRowPresenter;
import androidx.leanback.widget.PlaybackTransportRowPresenter;
import androidx.leanback.widget.RowPresenter;
import androidx.test.filters.MediumTest;
import androidx.test.platform.app.InstrumentationRegistry;
import org.junit.Test;
import org.mockito.Mockito;
@MediumTest
public class PlaybackTransportControlGlueTest {
public static class PlayerAdapterSample extends PlayerAdapter {
@Override
public void play() {
}
@Override
public void pause() {
}
}
@SuppressWarnings("unchecked")
public static class PlaybackTransportControlGlueImpl
extends PlaybackTransportControlGlue {
public PlaybackTransportControlGlueImpl(Context context) {
super(context, new PlayerAdapterSample());
}
public PlaybackTransportControlGlueImpl(Context context, PlayerAdapter impl) {
super(context, impl);
}
}
Context mContext;
PlaybackTransportControlGlueImpl mGlue;
PlaybackTransportRowPresenter.ViewHolder mViewHolder;
PlayerAdapter mAdapter;
void setupWithMockAdapterAndViewHolder() {
mContext = new ContextThemeWrapper(
InstrumentationRegistry.getInstrumentation().getTargetContext(),
androidx.leanback.test.R.style.Theme_Leanback);
mAdapter = Mockito.mock(PlayerAdapter.class);
when(mAdapter.isPrepared()).thenReturn(true);
when(mAdapter.getCurrentPosition()).thenReturn(123L);
when(mAdapter.getDuration()).thenReturn(20000L);
when(mAdapter.getBufferedPosition()).thenReturn(321L);
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext, mAdapter);
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
mGlue.setHost(host);
PlaybackTransportRowPresenter presenter = (PlaybackTransportRowPresenter)
mGlue.getPlaybackRowPresenter();
FrameLayout parent = new FrameLayout(mContext);
mViewHolder = (PlaybackTransportRowPresenter.ViewHolder)
presenter.onCreateViewHolder(parent);
presenter.onBindViewHolder(mViewHolder, mGlue.getControlsRow());
}
});
}
void playMockAdapter() {
mGlue.play();
Mockito.verify(mAdapter, times(1)).play();
when(mAdapter.isPlaying()).thenReturn(true);
mAdapter.getCallback().onPlayStateChanged(mAdapter);
}
@Test
public void usingDefaultRowAndPresenter() {
mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext);
}
});
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
mGlue.setHost(host);
assertSame(mGlue, host.mGlue);
assertSame(host, mGlue.getHost());
assertTrue(host.mPlaybackRowPresenter instanceof PlaybackTransportRowPresenter);
assertTrue(host.mRow instanceof PlaybackControlsRow);
}
@Test
public void customRowPresenter() {
mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext);
}
});
PlaybackRowPresenter presenter = new PlaybackRowPresenter() {
@Override
protected RowPresenter.ViewHolder createRowViewHolder(ViewGroup parent) {
return new RowPresenter.ViewHolder(new LinearLayout(parent.getContext()));
}
};
mGlue.setPlaybackRowPresenter(presenter);
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
mGlue.setHost(host);
assertSame(mGlue, host.mGlue);
assertSame(host, mGlue.getHost());
assertSame(host.mPlaybackRowPresenter, presenter);
assertTrue(host.mRow instanceof PlaybackControlsRow);
}
@Test
public void customControlsRow() {
mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext);
}
});
PlaybackControlsRow row = new PlaybackControlsRow(mContext);
mGlue.setControlsRow(row);
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
mGlue.setHost(host);
assertSame(mGlue, host.mGlue);
assertSame(host, mGlue.getHost());
assertTrue(host.mPlaybackRowPresenter instanceof PlaybackTransportRowPresenter);
assertSame(host.mRow, row);
}
@Test
public void customRowAndPresenter() {
mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext);
}
});
PlaybackControlsRow row = new PlaybackControlsRow(mContext);
mGlue.setControlsRow(row);
PlaybackRowPresenter presenter = new PlaybackRowPresenter() {
@Override
protected RowPresenter.ViewHolder createRowViewHolder(ViewGroup parent) {
return new RowPresenter.ViewHolder(new LinearLayout(parent.getContext()));
}
};
mGlue.setPlaybackRowPresenter(presenter);
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
mGlue.setHost(host);
assertSame(mGlue, host.mGlue);
assertSame(host, mGlue.getHost());
assertSame(host.mPlaybackRowPresenter, presenter);
assertSame(host.mRow, row);
}
@Test
public void playerAdapterTest() {
setupWithMockAdapterAndViewHolder();
mGlue.play();
Mockito.verify(mAdapter, times(1)).play();
mGlue.pause();
Mockito.verify(mAdapter, times(1)).pause();
mGlue.seekTo(123L);
Mockito.verify(mAdapter, times(1)).seekTo(123L);
assertEquals(123L, mGlue.getCurrentPosition());
assertEquals(20000L, mGlue.getDuration());
assertEquals(321L, mGlue.getBufferedPosition());
assertSame(mGlue.mAdapterCallback, mAdapter.getCallback());
when(mAdapter.getCurrentPosition()).thenReturn(124L);
mAdapter.getCallback().onCurrentPositionChanged(mAdapter);
assertEquals(124L, mGlue.getControlsRow().getCurrentPosition());
when(mAdapter.getBufferedPosition()).thenReturn(333L);
mAdapter.getCallback().onBufferedPositionChanged(mAdapter);
assertEquals(333L, mGlue.getControlsRow().getBufferedPosition());
when(mAdapter.getDuration()).thenReturn((long) (Integer.MAX_VALUE) * 2);
mAdapter.getCallback().onDurationChanged(mAdapter);
assertEquals((long) (Integer.MAX_VALUE) * 2, mGlue.getControlsRow().getDuration());
}
@Test
public void savePlayerAdapterEventBeforeAttachToHost() {
mContext = new ContextThemeWrapper(
InstrumentationRegistry.getInstrumentation().getTargetContext(),
androidx.leanback.test.R.style.Theme_Leanback);
final PlayerAdapter impl = Mockito.mock(PlayerAdapter.class);
when(impl.isPrepared()).thenReturn(true);
when(impl.getCurrentPosition()).thenReturn(123L);
when(impl.getDuration()).thenReturn(20000L);
when(impl.getBufferedPosition()).thenReturn(321L);
final PlaybackGlueHost.PlayerCallback hostCallback = Mockito.mock(
PlaybackGlueHost.PlayerCallback.class);
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mGlue = new PlaybackTransportControlGlueImpl(mContext, impl);
// fire events before attach to host.
impl.getCallback().onBufferingStateChanged(impl, true);
impl.getCallback().onVideoSizeChanged(impl, 200, 150);
impl.getCallback().onError(impl, 12, "abc");
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
host.setPlayerCallback(hostCallback);
mGlue.setHost(host);
}
});
// when attach to host, should pass the buffering state, video size and last error message
// to the host.
Mockito.verify(hostCallback, times(1)).onBufferingStateChanged(true);
Mockito.verify(hostCallback, times(1)).onVideoSizeChanged(200, 150);
Mockito.verify(hostCallback, times(1)).onError(12, "abc");
Mockito.reset(hostCallback);
final PlaybackGlueHost.PlayerCallback hostCallback2 = Mockito.mock(
PlaybackGlueHost.PlayerCallback.class);
InstrumentationRegistry.getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
PlaybackGlueHostImpl host = new PlaybackGlueHostImpl();
host.setPlayerCallback(hostCallback2);
mGlue.setHost(host);
}
});
// when detach from host, should have host stop buffering.
Mockito.verify(hostCallback, times(1)).onBufferingStateChanged(false);
Mockito.verify(hostCallback, times(0)).onVideoSizeChanged(anyInt(), anyInt());
Mockito.verify(hostCallback, times(0)).onError(anyInt(), anyString());
// attach to a different host, buffering state and video size should be saved, one time
// error state is not saved.
Mockito.verify(hostCallback2, times(1)).onBufferingStateChanged(true);
Mockito.verify(hostCallback2, times(1)).onVideoSizeChanged(200, 150);
Mockito.verify(hostCallback2, times(0)).onError(anyInt(), anyString());
}
@Test
public void playStateReceivePlayPause() {
setupWithMockAdapterAndViewHolder();
playMockAdapter();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE));
Mockito.verify(mAdapter, times(1)).pause();
}
@Test
public void playStateReceivePause() {
setupWithMockAdapterAndViewHolder();
playMockAdapter();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PAUSE,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PAUSE));
Mockito.verify(mAdapter, times(1)).pause();
}
@Test
public void playStateReceivePlay() {
setupWithMockAdapterAndViewHolder();
playMockAdapter();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PLAY,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY));
Mockito.verify(mAdapter, never()).pause();
}
@Test
public void pauseStateReceivePlayPause() {
setupWithMockAdapterAndViewHolder();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE));
Mockito.verify(mAdapter, times(1)).play();
}
@Test
public void pauseStateReceivePause() {
setupWithMockAdapterAndViewHolder();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PAUSE,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PAUSE));
Mockito.verify(mAdapter, never()).play();
}
@Test
public void pauseStateReceivePlay() {
setupWithMockAdapterAndViewHolder();
mGlue.onKey(null, KeyEvent.KEYCODE_MEDIA_PLAY,
new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY));
Mockito.verify(mAdapter, times(1)).play();
}
}
| apache-2.0 |
AndroidX/androidx | car/app/app-projected/src/test/java/androidx/car/app/hardware/common/CarHardwareHostDispatcherTest.java | 5196 | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.car.app.hardware.common;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.os.IBinder;
import android.os.RemoteException;
import androidx.car.app.CarContext;
import androidx.car.app.HostDispatcher;
import androidx.car.app.ICarHost;
import androidx.car.app.hardware.ICarHardwareHost;
import androidx.car.app.hardware.ICarHardwareResult;
import androidx.car.app.hardware.ICarHardwareResultTypes;
import androidx.car.app.serialization.Bundleable;
import androidx.car.app.serialization.BundlerException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.internal.DoNotInstrument;
@RunWith(RobolectricTestRunner.class)
@DoNotInstrument
public class CarHardwareHostDispatcherTest {
@Mock
private ICarHost mMockCarHost;
@Mock
private ICarHardwareHost.Stub mMockCarHardwareHost;
private HostDispatcher mHostDispatcher = new HostDispatcher();
private CarHardwareHostDispatcher mCarHardwareHostDispatcher =
new CarHardwareHostDispatcher(mHostDispatcher);
private TestCarHardwareHostStub mCarHardwareHost;
@Before
public void setUp() throws RemoteException {
MockitoAnnotations.initMocks(this);
// Perform after mocks initialized.
mCarHardwareHost = new TestCarHardwareHostStub(mMockCarHardwareHost);
when(mMockCarHost.getHost(CarContext.HARDWARE_SERVICE))
.thenReturn(mCarHardwareHost.asBinder());
mHostDispatcher.setCarHost(mMockCarHost);
}
@Test
public void dispatchGetCarHardwareResult() throws BundlerException,
RemoteException {
Integer desiredResult = 5;
Bundleable desiredBundleable = Bundleable.create(desiredResult);
int desiredResultType = ICarHardwareResultTypes.TYPE_INFO_MODEL;
String param = "param";
Bundleable paramBundle = Bundleable.create(param);
mCarHardwareHostDispatcher.dispatchGetCarHardwareResult(desiredResultType, paramBundle,
new ICarHardwareResult.Stub() {
@Override
public void onCarHardwareResult(int resultType, boolean isSupported,
Bundleable result, IBinder callback) throws RemoteException {
assertThat(resultType).isEqualTo(desiredResultType);
assertThat(isSupported).isTrue();
assertThat(result).isEqualTo(desiredBundleable);
}
});
verify(mMockCarHardwareHost).getCarHardwareResult(eq(desiredResultType),
eq(paramBundle), any());
}
@Test
public void dispatchSubscribeCarHardwareResult() throws BundlerException, RemoteException {
Integer desiredResult = 5;
Bundleable desiredBundleable = Bundleable.create(desiredResult);
int desiredResultType = ICarHardwareResultTypes.TYPE_SENSOR_ACCELEROMETER;
String param = "param";
Bundleable paramBundle = Bundleable.create(param);
mCarHardwareHostDispatcher.dispatchSubscribeCarHardwareResult(desiredResultType,
paramBundle,
new ICarHardwareResult.Stub() {
@Override
public void onCarHardwareResult(int resultType, boolean isSupported,
Bundleable result, IBinder callback) throws RemoteException {
assertThat(resultType).isEqualTo(desiredResultType);
assertThat(isSupported).isTrue();
assertThat(result).isEqualTo(desiredBundleable);
}
});
verify(mMockCarHardwareHost).subscribeCarHardwareResult(eq(desiredResultType),
eq(paramBundle), any());
}
@Test
public void dispatchUnsubscribeCarHardwareResult() throws RemoteException, BundlerException {
int desiredResultType = ICarHardwareResultTypes.TYPE_SENSOR_ACCELEROMETER;
Bundleable bundle = Bundleable.create(10);
mCarHardwareHostDispatcher.dispatchUnsubscribeCarHardwareResult(desiredResultType, bundle);
verify(mMockCarHardwareHost).unsubscribeCarHardwareResult(eq(desiredResultType),
eq(bundle));
}
}
| apache-2.0 |
qixiaobo/glu | orchestration/org.linkedin.glu.orchestration-engine/src/main/java/org/linkedin/glu/orchestration/engine/delta/impl/RedeployDeltaSystemModelFilter.java | 1411 | /*
* Copyright (c) 2011 Yan Pujante
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.linkedin.glu.orchestration.engine.delta.impl;
import org.linkedin.glu.orchestration.engine.delta.DeltaSystemModelFilter;
import org.linkedin.glu.provisioner.core.model.SystemEntry;
import org.linkedin.glu.provisioner.core.model.SystemFilter;
/**
* @author yan@pongasoft.com
*/
public class RedeployDeltaSystemModelFilter implements DeltaSystemModelFilter
{
private final SystemFilter _expectedSystemFilter;
/**
* Constructor
*/
public RedeployDeltaSystemModelFilter(SystemFilter expectedSystemFilter)
{
_expectedSystemFilter = expectedSystemFilter;
}
@Override
public boolean filter(SystemEntry expectedEntry, SystemEntry currentEntry)
{
return expectedEntry != null &&
(_expectedSystemFilter == null || _expectedSystemFilter.filter(expectedEntry));
}
}
| apache-2.0 |
snorrees/artemis-odb | artemis-weaver/src/test/java/com/artemis/system/IteratingPoorFellowSystem.java | 332 | package com.artemis.system;
import com.artemis.Aspect;
import com.artemis.systems.IteratingSystem;
public final class IteratingPoorFellowSystem extends IteratingSystem {
public IteratingPoorFellowSystem(Aspect aspect) {
super(Aspect.all());
}
@Override
protected void process(int e) {
System.out.println("hello!");
}
}
| apache-2.0 |
jbank/aws-ant-tasks | src/main/java/com/amazonaws/ant/opsworks/CreateLayerTask.java | 19325 | /*
* Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.ant.opsworks;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.tools.ant.BuildException;
import com.amazonaws.ant.AWSAntTask;
import com.amazonaws.ant.KeyValueNestedElement;
import com.amazonaws.ant.SimpleNestedElement;
import com.amazonaws.services.opsworks.AWSOpsWorksClient;
import com.amazonaws.services.opsworks.model.CreateLayerRequest;
import com.amazonaws.services.opsworks.model.Recipes;
import com.amazonaws.services.opsworks.model.VolumeConfiguration;
public class CreateLayerTask extends AWSAntTask {
private Map<String, String> attributes = new HashMap<String, String>();
private List<String> customSecurityGroupIds = new LinkedList<String>();
private List<String> packages = new LinkedList<String>();
private List<VolumeConfiguration> volumeConfigurations = new LinkedList<VolumeConfiguration>();
private List<String> configureRecipes = new LinkedList<String>();
private List<String> deployRecipes = new LinkedList<String>();
private List<String> setupRecipes = new LinkedList<String>();
private List<String> shutdownRecipes = new LinkedList<String>();
private List<String> undeployRecipes = new LinkedList<String>();
private String stackId;
private String type;
private String name;
private String shortname;
private String customInstanceProfileArn;
private String propertyNameForLayerId;
private boolean enableAutoHealing = true;
private boolean autoAssignElasticIps;
private boolean autoAssignPublicIps = true;
private boolean installUpdatesOnBoot = true;
private boolean useEbsOptimizedInstances = true;
private boolean isSetCustomRecipe;
/**
* Allows you to add any number of preconfigured LayerAttribute nested
* elements. LayerAttributes are simply user-defined key/value pairs to
* associate with this layer.
*
* @param attribute
* a preconfigured LayerAttribute object
*/
public void addConfiguredLayerAttribute(LayerAttribute attribute) {
attributes.put(attribute.getKey(), attribute.getValue());
}
/**
* Allows you to add any number of preconfigured CustomSecurityGroupId
* nested elements.
*
* @param customSecurityGroupId
* A preconfigured CustomSecrutiyGroupId object
*/
public void addCustomSecurityGroupId(
CustomSecurityGroupId customSecurityGroupId) {
customSecurityGroupIds.add(customSecurityGroupId.getValue());
}
/**
* Allows you to add any number of preconfigured LayerPackage nested
* elements
*
* @param layerPackage
* a preconfigured LayerPackage object
*/
public void addLayerPackage(LayerPackage layerPackage) {
packages.add(layerPackage.getValue());
}
/**
* Allows you to add any number of preconfigured LayerVolumeConfiguration
* nested elements
*
* @param layerVolumeConfiguration
* a preconfigured LayerVolumeConfiguration object
*/
public void addConfiguredLayerVolumeConfiguration(
LayerVolumeConfiguration layerVolumeConfiguration) {
volumeConfigurations.add(new VolumeConfiguration()
.withIops(layerVolumeConfiguration.getIops())
.withMountPoint(layerVolumeConfiguration.getMountPoint())
.withNumberOfDisks(layerVolumeConfiguration.getNumberOfDisks())
.withRaidLevel(layerVolumeConfiguration.getRaidLevel())
.withSize(layerVolumeConfiguration.getSize())
.withVolumeType(layerVolumeConfiguration.getVolumeType()));
}
/**
* Allows you to add any number of preconfigured LayerRecipe nested elements
*
* @param layerRecipe
* a preconfigured LayerRecipe object
*/
public void addConfiguredLayerRecipe(LayerRecipe layerRecipe) {
isSetCustomRecipe = true;
String phase = layerRecipe.getPhase();
if ("configure".equalsIgnoreCase(phase)) {
configureRecipes.add(layerRecipe.getName());
} else if ("deploy".equalsIgnoreCase(phase)) {
deployRecipes.add(layerRecipe.getName());
} else if ("setup".equalsIgnoreCase(phase)) {
setupRecipes.add(layerRecipe.getName());
} else if ("shutdown".equalsIgnoreCase(phase)) {
shutdownRecipes.add(layerRecipe.getName());
} else if ("undeploy".equalsIgnoreCase(phase)) {
undeployRecipes.add(layerRecipe.getName());
} else {
throw new BuildException(
"The specified phase "
+ phase
+ " was not a valid phase. Valid phases are: configure, deploy, setup, shutdown, undeploy.");
}
}
/**
* Set the opsworks ID of the stack for this layer to reside in. You can
* find the ID of your stack in the opsworks console. If you create a stack
* earlier in this task, it will be assigned to the "stackId" property. If
* you have already set the "stackId" property, you do not need to set this
* attribute--it will automatically search for the "stackId" attribute. You
* are required to either set the "stackId" attribute or this parameter.
*
* @param stackId
* The ID of the stack for this app to reside in.
*/
public void setStackId(String stackId) {
this.stackId = stackId;
}
/**
* Set the layer type. Note that a stack can only have one built-in layer of
* the same type, but can have any number of custom layers. This parameter
* is required. This parameter must be set to one of:
* custom: A custom layer
* db-master: A MySQL layer
* java-app: A Java App Server layer
* rails-app: A Rails App Server layer
* lb: An HAProxy layer
* memcached: A Memcached layer
* monitoring-master: A Ganglia layer
* nodejs-app: A Node.js App Server layer
* php-app: A PHP App Server layer
* web: A Static Web Server layer
*
* @param type
* The layer type
*/
public void setType(String type) {
this.type = type;
}
/**
* Set the name of this layer. Required.
*
* @param name
* The name of this layer.
*/
public void setName(String name) {
this.name = name;
}
/**
* Set the shortname of this layer. Must have up to 200 characters,
* consisting only of alphanumeric characters, "-", "_", and ".". Required.
*
* @param shortname
* The shortname of this layer
*/
public void setShortname(String shortname) {
this.shortname = shortname;
}
/**
* Set the ARN of an IAM profile to use for this layer's EC2 instances. Not
* required.
*
* @param customInstanceProfileArn
* The ARN of an IAM profile to use for this layer's EC2
* instances.
*/
public void setCustomInstanceProfileArn(String customInstanceProfileArn) {
this.customInstanceProfileArn = customInstanceProfileArn;
}
/**
* Set whether to enable auto healing for this layer. Not required, default
* is true.
*
* @param enableAutoHealing
* Whether to enable auto healing for this layer.
*/
public void setEnableAutoHealing(boolean enableAutoHealing) {
this.enableAutoHealing = enableAutoHealing;
}
/**
* Set whether to automatically assign an elastic IP address to this layer's
* instances. Not required.
*
* @param autoAssignElasticIps
* Whether to automatically assign an elastic IP address to this
* layer's instances.
*/
public void setAutoAssignElasticIps(boolean autoAssignElasticIps) {
this.autoAssignElasticIps = autoAssignElasticIps;
}
/**
* Set whether to automatically assign a public IP address to the layer's
* instances, for stacks that are running in a VPC. Not required, default is
* true.
*
* @param autoAssignPublicIps
* Whether to automatically assign a public IP address to the
* layer's instances
*/
public void setAutoAssignPublicIps(boolean autoAssignPublicIps) {
this.autoAssignPublicIps = autoAssignPublicIps;
}
/**
* Set whether to install operating system and package updates on boot. Not
* required, default is true. It is highly recommended that you leave this
* as true.
*
* @param installUpdatesOnBoot
* Whether to install operating system and package updates on
* boot.
*/
public void setInstallUpdatesOnBoot(boolean installUpdatesOnBoot) {
this.installUpdatesOnBoot = installUpdatesOnBoot;
}
/**
* Set whether to use Amazon EBS-Optimized instances. Not required.
*
* @param useEbsOptimizedInstances
* Whether to use Amazon EBS-Optimized instances.
*/
public void setUseEbsOptimizedInstances(boolean useEbsOptimizedInstances) {
this.useEbsOptimizedInstances = useEbsOptimizedInstances;
}
/**
* Set which property to assign the ID of this layer to
*
* @param propertyToSet
* The property to assign the ID of this layer to
*/
public void setPropertyNameForLayerId(String propertyToSet) {
this.propertyNameForLayerId = propertyToSet;
}
private void checkParams() {
boolean areMalformedParams = false;
StringBuilder errors = new StringBuilder("");
if (stackId == null) {
if (!Boolean.TRUE.equals(getProject().getReference(Constants.STACK_ID_REFERENCE))) {
stackId = getProject().getProperty(Constants.STACK_ID_PROPERTY);
}
if (stackId == null) {
areMalformedParams = true;
errors.append("Missing parameter: stackId is required \n");
} else {
System.out.println("Using " + Constants.STACK_ID_PROPERTY
+ " property as stackId");
}
}
if (type == null) {
areMalformedParams = true;
errors.append("Missing parameter: type is required \n");
}
if (name == null) {
areMalformedParams = true;
errors.append("Missing parameter: name is required \n");
}
if (shortname == null) {
areMalformedParams = true;
errors.append("Missing parameter: shortName is required \n");
}
if (areMalformedParams) {
throw new BuildException(errors.toString());
}
}
/**
* Creates a layer according to the set parameters. Also sets a layerId
* property. Which property will be set depends on what order this layer is
* created in the project. If it is the first layer created in this Ant
* build, layerId1 is set. If it's the second, layerId2 is set, etc. The ID
* is also printed for you to set to your own property for later use.
*/
public void execute() {
checkParams();
AWSOpsWorksClient client = getOrCreateClient(AWSOpsWorksClient.class);
CreateLayerRequest createLayerRequest = new CreateLayerRequest()
.withStackId(stackId).withType(type).withName(name)
.withShortname(shortname)
.withEnableAutoHealing(enableAutoHealing)
.withAutoAssignElasticIps(autoAssignElasticIps)
.withAutoAssignPublicIps(autoAssignPublicIps)
.withInstallUpdatesOnBoot(installUpdatesOnBoot)
.withUseEbsOptimizedInstances(useEbsOptimizedInstances)
.withCustomInstanceProfileArn(customInstanceProfileArn);
if (attributes.size() > 0) {
createLayerRequest.setAttributes(attributes);
}
if (packages.size() > 0) {
createLayerRequest.setPackages(packages);
}
if (volumeConfigurations.size() > 0) {
createLayerRequest.setVolumeConfigurations(volumeConfigurations);
}
if (customSecurityGroupIds.size() > 0) {
createLayerRequest
.setCustomSecurityGroupIds(customSecurityGroupIds);
}
if (isSetCustomRecipe) {
Recipes customRecipes = new Recipes()
.withConfigure(configureRecipes).withDeploy(deployRecipes)
.withSetup(setupRecipes).withShutdown(shutdownRecipes)
.withUndeploy(undeployRecipes);
createLayerRequest.setCustomRecipes(customRecipes);
}
String layerId;
try {
layerId = client.createLayer(createLayerRequest).getLayerId();
} catch (Exception e) {
throw new BuildException("Could not create layer: "
+ e.getMessage(), e);
}
System.out.println("Created layer with ID " + layerId);
if (layerId != null) {
if (getProject().getProperty(Constants.LAYER_IDS_PROPERTY) == null) {
getProject().setProperty(Constants.LAYER_IDS_PROPERTY, layerId);
} else {
getProject().setProperty(
Constants.LAYER_IDS_PROPERTY,
getProject().getProperty(Constants.LAYER_IDS_PROPERTY) + ","
+ layerId);
}
if (propertyNameForLayerId != null) {
getProject().setProperty(propertyNameForLayerId, layerId);
}
}
}
/**
* A class to be used as a nested element. Use to make attributes (Key-value
* pairs) to associate with this instance.
*/
public static class LayerAttribute extends KeyValueNestedElement {
}
/**
* A class to be used as a nested element. Use to add any number of custom
* security group IDs to use in this layer.
*/
public class CustomSecurityGroupId extends SimpleNestedElement {
}
/**
* A class to use as a nested element. Use to create objects that describe
* the layer packages.
*/
public static class LayerPackage extends SimpleNestedElement {
}
/**
* A class to be used as a nested element. Use to make volume configuration
* objects that describe the layer's EBS volumes.
*/
public static class LayerVolumeConfiguration {
private int iops;
private int numberOfDisks;
private int raidLevel;
private int size;
private String volumeType;
private String mountPoint;
/**
* Get the IOPS per disk (For PIOPS volumes)
*
* @return The IOPS per disk
*/
public int getIops() {
return iops;
}
/**
* Set the IOPS per disk (For PIOPS volumes)
*
* @param iops
* The IOPS per disk
*/
public void setIops(int iops) {
this.iops = iops;
}
/**
* Get the number of disks in this volume.
*
* @return The number of disks in this volume.
*/
public int getNumberOfDisks() {
return numberOfDisks;
}
/**
* Set the number of disks in this volume.
*
* @param numberOfDisks
* The number of disks in this volume.
*/
public void setNumberOfDisks(int numberOfDisks) {
this.numberOfDisks = numberOfDisks;
}
/**
* Get the volume RAID level
*
* @return The volume RAID level
*/
public int getRaidLevel() {
return raidLevel;
}
/**
* Set the volume RAID level
*
* @param raidLevel
* The volume RAID level
*/
public void setRaidLevel(int raidLevel) {
this.raidLevel = raidLevel;
}
/**
* Get the size of this volume
*
* @return The size of this volume
*/
public int getSize() {
return size;
}
/**
* Set the size of this volume
*
* @param size
* The size of this volume
*/
public void setSize(int size) {
this.size = size;
}
/**
* Get the volume type
*
* @return The volume type
*/
public String getVolumeType() {
return volumeType;
}
/**
* Set the volume type, must be standard or PIOPS
*
* @param volumeType
* The volume type
*/
public void setVolumeType(String volumeType) {
this.volumeType = volumeType;
}
/**
* Get the volume mount point
*
* @return The volume mount point
*/
public String getMountPoint() {
return mountPoint;
}
/**
* Set the volume mount point
*
* @param mountPoint
* The volume mount point
*/
public void setMountPoint(String mountPoint) {
this.mountPoint = mountPoint;
}
public LayerVolumeConfiguration() {
// required by Ant
}
}
/**
* A class to be used as a nested element. Use to describe recipes to use in
* the layer configuration, and what phase to use them in.
*/
public static class LayerRecipe {
private String name;
private String phase;
/**
* Get the name of this recipe
*
* @return The name of this recipe
*/
public String getName() {
return name;
}
/**
* Set the name of this recipe
*
* @param name
* The name of this recipe
*/
public void setName(String name) {
this.name = name;
}
/**
* Get the phase this recipe will execute in
*
* @return The phase this recipe will execute in
*/
public String getPhase() {
return phase;
}
/**
* Set the phase this recipe will execute in
*
* @param phase
* The phase this recipe will execute in
*/
public void setPhase(String phase) {
this.phase = phase;
}
}
}
| apache-2.0 |
AndroidX/androidx | mediarouter/mediarouter/src/main/java/androidx/mediarouter/media/MediaRouteProviderProtocol.java | 10793 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.mediarouter.media;
import android.content.Intent;
import android.os.Messenger;
/**
* Defines the communication protocol for media route provider services.
*/
abstract class MediaRouteProviderProtocol {
/**
* The {@link Intent} that must be declared as handled by the service.
* Put this in your manifest.
*/
public static final String SERVICE_INTERFACE =
"android.media.MediaRouteProviderService";
/*
* Messages sent from the client to the service.
* DO NOT RENUMBER THESE!
*/
/** (client v1)
* Register client.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : client version
*/
public static final int CLIENT_MSG_REGISTER = 1;
/** (client v1)
* Unregister client.
* - replyTo : client messenger
* - arg1 : request id
*/
public static final int CLIENT_MSG_UNREGISTER = 2;
/** (client v1)
* Create route controller.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_ROUTE_ID : route id string
*/
public static final int CLIENT_MSG_CREATE_ROUTE_CONTROLLER = 3;
/** (client v1)
* Release route controller.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
*/
public static final int CLIENT_MSG_RELEASE_ROUTE_CONTROLLER = 4;
/** (client v1)
* Select route.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
*/
public static final int CLIENT_MSG_SELECT_ROUTE = 5;
/** (client v1)
* Unselect route.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
*/
public static final int CLIENT_MSG_UNSELECT_ROUTE = 6;
/** (client v1)
* Set route volume.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_VOLUME : volume integer
* - CLIENT_DATA_ROUTE_ID : (client v4, only used for MediaRouter2) original route ID
*/
public static final int CLIENT_MSG_SET_ROUTE_VOLUME = 7;
/** (client v1)
* Update route volume.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_VOLUME : volume delta integer
* - CLIENT_DATA_ROUTE_ID : (client v4, only used for MediaRouter2) original route ID
*/
public static final int CLIENT_MSG_UPDATE_ROUTE_VOLUME = 8;
/** (client v1)
* Route control request.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - obj : media control intent
*/
public static final int CLIENT_MSG_ROUTE_CONTROL_REQUEST = 9;
/** (client v1)
* Sets the discovery request.
* - replyTo : client messenger
* - arg1 : request id
* - obj : discovery request bundle, or null if none
*/
public static final int CLIENT_MSG_SET_DISCOVERY_REQUEST = 10;
/** (client v3)
* Create dynamic group route controller.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_MEMBER_ROUTE_ID : initial member route id string
*/
public static final int CLIENT_MSG_CREATE_DYNAMIC_GROUP_ROUTE_CONTROLLER = 11;
/** (client v3)
* Adds a member route to a dynamic group route.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_MEMBER_ROUTE_ID : member route id to be added
*/
public static final int CLIENT_MSG_ADD_MEMBER_ROUTE = 12;
/** (client v3)
* Removes a member route from a dynamic group route.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_MEMBER_ROUTE_ID : member route id to be removed
*/
public static final int CLIENT_MSG_REMOVE_MEMBER_ROUTE = 13;
/** (client v3)
* Updates member routes of a dynamic group route.
* - replyTo : client messenger
* - arg1 : request id
* - arg2 : route controller id
* - CLIENT_DATA_MEMBER_ROUTE_IDS : array list of member route ids
*/
public static final int CLIENT_MSG_UPDATE_MEMBER_ROUTES = 14;
public static final String CLIENT_DATA_ROUTE_ID = "routeId";
public static final String CLIENT_DATA_ROUTE_LIBRARY_GROUP = "routeGroupId";
public static final String CLIENT_DATA_VOLUME = "volume";
public static final String CLIENT_DATA_UNSELECT_REASON = "unselectReason";
public static final String CLIENT_DATA_MEMBER_ROUTE_IDS = "memberRouteIds";
public static final String CLIENT_DATA_MEMBER_ROUTE_ID = "memberRouteId";
public static final String DATA_KEY_GROUPABLE_SECION_TITLE = "groupableTitle";
public static final String DATA_KEY_TRANSFERABLE_SECTION_TITLE = "transferableTitle";
public static final String DATA_KEY_GROUP_ROUTE_DESCRIPTOR = "groupRoute";
public static final String DATA_KEY_DYNAMIC_ROUTE_DESCRIPTORS = "dynamicRoutes";
/*
* Messages sent from the service to the client.
* DO NOT RENUMBER THESE!
*/
/** (service v1)
* Generic failure sent in response to any unrecognized or malformed request.
* - arg1 : request id
*/
public static final int SERVICE_MSG_GENERIC_FAILURE = 0;
/** (service v1)
* Generic failure sent in response to a successful message.
* - arg1 : request id
*/
public static final int SERVICE_MSG_GENERIC_SUCCESS = 1;
/** (service v1)
* Registration succeeded.
* - arg1 : request id
* - arg2 : server version
* - obj : route provider descriptor bundle, or null
*/
public static final int SERVICE_MSG_REGISTERED = 2;
/** (service v1)
* Route control request success result.
* - arg1 : request id
* - obj : result data bundle, or null
*/
public static final int SERVICE_MSG_CONTROL_REQUEST_SUCCEEDED = 3;
/** (service v1)
* Route control request failure result.
* - arg1 : request id
* - obj : result data bundle, or null
* - SERVICE_DATA_ERROR: error message
*/
public static final int SERVICE_MSG_CONTROL_REQUEST_FAILED = 4;
/** (service v1)
* Route provider descriptor changed. (unsolicited event)
* - arg1 : reserved (0)
* - obj : route provider descriptor bundle, or null
*/
public static final int SERVICE_MSG_DESCRIPTOR_CHANGED = 5;
/** (service v2)
* Dynamic route controller created. Sends back related data.
* - arg1 : request id
* - arg2 : service version
* - obj : bundle
* - CLIENT_DATA_ROUTE_ID: (string) dynamic group route id
* - DATA_KEY_GROUPABLE_SECION_TITLE: (string) groupable section title
* - DATA_KEY_TRANSFERABLE_SECTION_TITLE: (string) transferable section title
*/
public static final int SERVICE_MSG_DYNAMIC_ROUTE_CREATED = 6;
/** (service v2)
* Dynamic route descriptors changed. (unsolicited event)
* - arg1 : reserved (0)
* - arg2 : controllerId
* - obj : bundle
* - DATA_KEY_DYNAMIC_ROUTE_DESCRIPTORS: (list of bundle)
*/
public static final int SERVICE_MSG_DYNAMIC_ROUTE_DESCRIPTORS_CHANGED = 7;
/** (service v3) / (client v4)
* Route controller released by the provider. (unsolicited event)
* - arg1 : reserved(0)
* - arg2 : controllerId
*/
public static final int SERVICE_MSG_CONTROLLER_RELEASED = 8;
public static final String SERVICE_DATA_ERROR = "error";
/*
* Recognized client version numbers. (Reserved for future use.)
* DO NOT RENUMBER THESE!
*/
/**
* The client version used from the beginning.
*/
public static final int CLIENT_VERSION_1 = 1;
/**
* The client version used from support library v24.1.0.
*/
public static final int CLIENT_VERSION_2 = 2;
/**
* The client version used from androidx 1.0.0.
*/
public static final int CLIENT_VERSION_3 = 3;
/**
* The client version used from androidx 1.2.0.
* Media transfer feature is added in this version.
*/
public static final int CLIENT_VERSION_4 = 4;
/**
* The current client version.
*/
public static final int CLIENT_VERSION_CURRENT = CLIENT_VERSION_4;
/*
* Recognized server version numbers. (Reserved for future use.)
* DO NOT RENUMBER THESE!
*/
/**
* The service version used from the beginning.
*/
public static final int SERVICE_VERSION_1 = 1;
/**
* The service version used from androidx 1.0.0.
*/
public static final int SERVICE_VERSION_2 = 2;
/**
* The service version used from androidx 1.2.0.
* Media transfer feature is added in this version.
*/
public static final int SERVICE_VERSION_3 = 3;
/**
* The current service version.
*/
public static final int SERVICE_VERSION_CURRENT = SERVICE_VERSION_3;
static final int CLIENT_VERSION_START = CLIENT_VERSION_1;
/**
* Returns true if the messenger object is valid.
* <p>
* The messenger constructor and unparceling code does not check whether the
* provided IBinder is a valid IMessenger object. As a result, it's possible
* for a peer to send an invalid IBinder that will result in crashes downstream.
* This method checks that the messenger is in a valid state.
* </p>
*/
public static boolean isValidRemoteMessenger(Messenger messenger) {
try {
return messenger != null && messenger.getBinder() != null;
} catch (NullPointerException ex) {
// If the messenger was constructed with a binder interface other than
// IMessenger then the call to getBinder() will crash with an NPE.
return false;
}
}
private MediaRouteProviderProtocol() {
}
}
| apache-2.0 |
mozilla/mentat | sdks/android/Mentat/library/src/main/java/org/mozilla/mentat/ScalarResultHandler.java | 869 | /* -*- Mode: Java; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*-
* Copyright 2018 Mozilla
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. */
package org.mozilla.mentat;
/**
* Interface defining the structure of a callback from a query returning a single {@link TypedValue}.
*/
public interface ScalarResultHandler {
void handleValue(TypedValue value);
}
| apache-2.0 |
mxm/incubator-beam | runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingServiceTest.java | 9466 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow.worker.fn.logging;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
import com.google.common.net.HostAndPort;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.beam.model.fnexecution.v1.BeamFnApi;
import org.apache.beam.model.fnexecution.v1.BeamFnLoggingGrpc;
import org.apache.beam.model.pipeline.v1.Endpoints;
import org.apache.beam.runners.dataflow.harness.test.TestStreams;
import org.apache.beam.runners.dataflow.worker.fn.stream.ServerStreamObserverFactory;
import org.apache.beam.runners.fnexecution.GrpcContextHeaderAccessorProvider;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.BindableService;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.ManagedChannel;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.Server;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.inprocess.InProcessChannelBuilder;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.inprocess.InProcessServerBuilder;
import org.apache.beam.vendor.grpc.v1_13_1.io.grpc.stub.StreamObserver;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link BeamFnLoggingService}. */
@RunWith(JUnit4.class)
public class BeamFnLoggingServiceTest {
private Server server;
private Endpoints.ApiServiceDescriptor findOpenPort() throws Exception {
InetAddress address = InetAddress.getLoopbackAddress();
try (ServerSocket socket = new ServerSocket(0, -1, address)) {
return Endpoints.ApiServiceDescriptor.newBuilder()
.setUrl(HostAndPort.fromParts(address.getHostAddress(), socket.getLocalPort()).toString())
.build();
}
}
@After
public void tearDown() {
server.shutdownNow();
}
@Test
public void testMultipleClientsSuccessfullyProcessed() throws Exception {
ConcurrentLinkedQueue<BeamFnApi.LogEntry> logs = new ConcurrentLinkedQueue<>();
try (BeamFnLoggingService service =
new BeamFnLoggingService(
findOpenPort(),
logs::add,
ServerStreamObserverFactory.fromOptions(PipelineOptionsFactory.create())::from,
GrpcContextHeaderAccessorProvider.getHeaderAccessor())) {
server = createServer(service, service.getApiServiceDescriptor());
Collection<Callable<Void>> tasks = new ArrayList<>();
for (int i = 1; i <= 3; ++i) {
int instructionReference = i;
tasks.add(
() -> {
CountDownLatch waitForServerHangup = new CountDownLatch(1);
ManagedChannel channel =
InProcessChannelBuilder.forName(service.getApiServiceDescriptor().getUrl())
.build();
StreamObserver<BeamFnApi.LogEntry.List> outboundObserver =
BeamFnLoggingGrpc.newStub(channel)
.logging(
TestStreams.withOnNext(BeamFnLoggingServiceTest::discardMessage)
.withOnCompleted(waitForServerHangup::countDown)
.build());
outboundObserver.onNext(
createLogsWithIds(instructionReference, -instructionReference));
outboundObserver.onCompleted();
waitForServerHangup.await();
return null;
});
}
ExecutorService executorService = Executors.newCachedThreadPool();
executorService.invokeAll(tasks);
assertThat(
logs,
containsInAnyOrder(
createLogWithId(1L),
createLogWithId(2L),
createLogWithId(3L),
createLogWithId(-1L),
createLogWithId(-2L),
createLogWithId(-3L)));
}
}
@Test
public void testMultipleClientsFailingIsHandledGracefullyByServer() throws Exception {
Collection<Callable<Void>> tasks = new ArrayList<>();
ConcurrentLinkedQueue<BeamFnApi.LogEntry> logs = new ConcurrentLinkedQueue<>();
try (BeamFnLoggingService service =
new BeamFnLoggingService(
findOpenPort(),
logs::add,
ServerStreamObserverFactory.fromOptions(PipelineOptionsFactory.create())::from,
GrpcContextHeaderAccessorProvider.getHeaderAccessor())) {
server = createServer(service, service.getApiServiceDescriptor());
for (int i = 1; i <= 3; ++i) {
int instructionReference = i;
tasks.add(
() -> {
CountDownLatch waitForTermination = new CountDownLatch(1);
ManagedChannel channel =
InProcessChannelBuilder.forName(service.getApiServiceDescriptor().getUrl())
.build();
StreamObserver<BeamFnApi.LogEntry.List> outboundObserver =
BeamFnLoggingGrpc.newStub(channel)
.logging(
TestStreams.withOnNext(BeamFnLoggingServiceTest::discardMessage)
.withOnError(waitForTermination::countDown)
.build());
outboundObserver.onNext(
createLogsWithIds(instructionReference, -instructionReference));
outboundObserver.onError(new RuntimeException("Client " + instructionReference));
waitForTermination.await();
return null;
});
}
ExecutorService executorService = Executors.newCachedThreadPool();
executorService.invokeAll(tasks);
}
}
@Test
public void testServerCloseHangsUpClients() throws Exception {
LinkedBlockingQueue<BeamFnApi.LogEntry> logs = new LinkedBlockingQueue<>();
ExecutorService executorService = Executors.newCachedThreadPool();
Collection<Future<Void>> futures = new ArrayList<>();
try (BeamFnLoggingService service =
new BeamFnLoggingService(
findOpenPort(),
logs::add,
ServerStreamObserverFactory.fromOptions(PipelineOptionsFactory.create())::from,
GrpcContextHeaderAccessorProvider.getHeaderAccessor())) {
server = createServer(service, service.getApiServiceDescriptor());
for (int i = 1; i <= 3; ++i) {
long instructionReference = i;
futures.add(
executorService.submit(
() -> {
CountDownLatch waitForServerHangup = new CountDownLatch(1);
ManagedChannel channel =
InProcessChannelBuilder.forName(service.getApiServiceDescriptor().getUrl())
.build();
StreamObserver<BeamFnApi.LogEntry.List> outboundObserver =
BeamFnLoggingGrpc.newStub(channel)
.logging(
TestStreams.withOnNext(BeamFnLoggingServiceTest::discardMessage)
.withOnCompleted(waitForServerHangup::countDown)
.build());
outboundObserver.onNext(createLogsWithIds(instructionReference));
waitForServerHangup.await();
return null;
}));
}
// Wait till each client has sent their message showing that they have connected.
for (int i = 1; i <= 3; ++i) {
logs.take();
}
service.close();
server.shutdownNow();
}
for (Future<Void> future : futures) {
future.get();
}
}
private static void discardMessage(BeamFnApi.LogControl ignored) {}
private BeamFnApi.LogEntry.List createLogsWithIds(long... ids) {
BeamFnApi.LogEntry.List.Builder builder = BeamFnApi.LogEntry.List.newBuilder();
for (long id : ids) {
builder.addLogEntries(createLogWithId(id));
}
return builder.build();
}
private BeamFnApi.LogEntry createLogWithId(long id) {
return BeamFnApi.LogEntry.newBuilder().setInstructionReference(Long.toString(id)).build();
}
private Server createServer(
BindableService service, Endpoints.ApiServiceDescriptor serviceDescriptor) throws Exception {
Server server =
InProcessServerBuilder.forName(serviceDescriptor.getUrl()).addService(service).build();
server.start();
return server;
}
}
| apache-2.0 |
imasahiro/armeria | core/src/main/java/com/linecorp/armeria/server/GracefulShutdownSupport.java | 5198 | /*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server;
import java.time.Duration;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.base.Ticker;
/**
* Keeps track of pending requests to allow shutdown to happen after a fixed quiet period passes
* after the last pending request.
*/
public abstract class GracefulShutdownSupport {
private static final GracefulShutdownSupport DISABLED =
new DisabledGracefulShutdownSupport();
static GracefulShutdownSupport create(Duration quietPeriod, Executor blockingTaskExecutor) {
return create(quietPeriod, blockingTaskExecutor, Ticker.systemTicker());
}
static GracefulShutdownSupport create(Duration quietPeriod, Executor blockingTaskExecutor, Ticker ticker) {
return new DefaultGracefulShutdownSupport(quietPeriod, blockingTaskExecutor, ticker);
}
static GracefulShutdownSupport disabled() {
return DISABLED;
}
GracefulShutdownSupport() {}
/**
* Increases the number of pending responses.
*/
public abstract void inc();
/**
* Decreases the number of pending responses.
*/
public abstract void dec();
/**
* Returns the number of pending responses.
*/
public abstract int pendingResponses();
/**
* Indicates the quiet period duration has passed since the last request.
*/
abstract boolean completedQuietPeriod();
private static final class DisabledGracefulShutdownSupport extends GracefulShutdownSupport {
@Override
public void inc() {}
@Override
public void dec() {}
@Override
public int pendingResponses() {
return 0;
}
@Override
boolean completedQuietPeriod() {
return true;
}
}
private static final class DefaultGracefulShutdownSupport extends GracefulShutdownSupport {
private final long quietPeriodNanos;
private final Ticker ticker;
private final Executor blockingTaskExecutor;
/**
* NOTE: {@link #updatedLastResTimeNanos} and {@link #lastResTimeNanos} are declared as non-volatile
* while using this field as a memory barrier.
*/
private final AtomicInteger pendingResponses = new AtomicInteger();
private boolean updatedLastResTimeNanos;
private long lastResTimeNanos;
private boolean setShutdownStartTimeNanos;
private long shutdownStartTimeNanos;
DefaultGracefulShutdownSupport(Duration quietPeriod, Executor blockingTaskExecutor, Ticker ticker) {
quietPeriodNanos = quietPeriod.toNanos();
this.blockingTaskExecutor = blockingTaskExecutor;
this.ticker = ticker;
}
@Override
public void inc() {
pendingResponses.incrementAndGet();
}
@Override
public void dec() {
lastResTimeNanos = ticker.read();
updatedLastResTimeNanos = true;
pendingResponses.decrementAndGet();
}
@Override
public int pendingResponses() {
return pendingResponses.get();
}
@Override
boolean completedQuietPeriod() {
if (!setShutdownStartTimeNanos) {
shutdownStartTimeNanos = ticker.read();
setShutdownStartTimeNanos = true;
}
if (pendingResponses.get() != 0 || !completedBlockingTasks()) {
return false;
}
final long shutdownStartTimeNanos = this.shutdownStartTimeNanos;
final long currentTimeNanos = ticker.read();
final long duration;
if (updatedLastResTimeNanos) {
duration = Math.min(currentTimeNanos - shutdownStartTimeNanos,
currentTimeNanos - lastResTimeNanos);
} else {
duration = currentTimeNanos - shutdownStartTimeNanos;
}
return duration >= quietPeriodNanos;
}
private boolean completedBlockingTasks() {
if (!(blockingTaskExecutor instanceof ThreadPoolExecutor)) {
// Cannot determine if there's a blocking task.
return true;
}
final ThreadPoolExecutor threadPool = (ThreadPoolExecutor) blockingTaskExecutor;
return threadPool.getQueue().isEmpty() && threadPool.getActiveCount() == 0;
}
}
}
| apache-2.0 |
vineetgarg02/hive | service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java | 2894 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.auth;
import javax.security.sasl.AuthenticationException;
import org.apache.hadoop.hive.conf.HiveConf;
/**
* This class helps select a {@link PasswdAuthenticationProvider} for a given {@code AuthMethod}.
*/
public final class AuthenticationProviderFactory {
public enum AuthMethods {
LDAP("LDAP"),
PAM("PAM"),
CUSTOM("CUSTOM"),
NONE("NONE");
private final String authMethod;
private final HiveConf conf = new HiveConf();
AuthMethods(String authMethod) {
this.authMethod = authMethod;
}
public String getAuthMethod() {
return authMethod;
}
public HiveConf getConf() {
return conf;
}
public static AuthMethods getValidAuthMethod(String authMethodStr)
throws AuthenticationException {
for (AuthMethods auth : AuthMethods.values()) {
if (authMethodStr.equals(auth.getAuthMethod())) {
return auth;
}
}
throw new AuthenticationException("Not a valid authentication method");
}
}
private AuthenticationProviderFactory() {
}
public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
throws AuthenticationException {
return getAuthenticationProvider(authMethod, null);
}
public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod, HiveConf conf)
throws AuthenticationException {
if (authMethod == AuthMethods.LDAP) {
return new LdapAuthenticationProviderImpl((conf == null) ? AuthMethods.LDAP.getConf() : conf);
} else if (authMethod == AuthMethods.PAM) {
return new PamAuthenticationProviderImpl((conf == null) ? AuthMethods.PAM.getConf() : conf);
} else if (authMethod == AuthMethods.CUSTOM) {
return new CustomAuthenticationProviderImpl((conf == null) ? AuthMethods.CUSTOM.getConf() : conf);
} else if (authMethod == AuthMethods.NONE) {
return new AnonymousAuthenticationProviderImpl();
} else {
throw new AuthenticationException("Unsupported authentication method");
}
}
}
| apache-2.0 |
52nlp/webanno | webanno-webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/webapp/page/annotation/component/DocumentNamePanel.java | 2969 | /*******************************************************************************
* Copyright 2012
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.webapp.page.annotation.component;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.LoadableDetachableModel;
import org.apache.wicket.spring.injection.annot.SpringBean;
import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService;
import de.tudarmstadt.ukp.clarin.webanno.brat.annotation.BratAnnotatorModel;
import de.tudarmstadt.ukp.clarin.webanno.model.export.Project;
import de.tudarmstadt.ukp.clarin.webanno.model.export.SourceDocument;
/**
* A {@link Panel} which contains a {@link Label} to display document name as concatenations of
* {@link Project#getName()} and {@link SourceDocument#getName()}
*
* @author Seid Muhie Yimam
*/
public class DocumentNamePanel
extends Panel
{
private static final long serialVersionUID = 3584950105138069924L;
@SpringBean(name = "documentRepository")
private RepositoryService repository;
ModalWindow yesNoModal;
public DocumentNamePanel(String id, final IModel<BratAnnotatorModel> aModel)
{
super(id, aModel);
add(new Label("doumentName", new LoadableDetachableModel<String>()
{
private static final long serialVersionUID = 1L;
@Override
protected String load()
{
String projectName;
String documentName;
if (aModel.getObject().getProject() == null) {
projectName = "/";
}
else {
projectName = aModel.getObject().getProject().getName() + "/";
}
if (aModel.getObject().getDocument() == null) {
documentName = "";
}
else {
documentName = aModel.getObject().getDocument().getName();
}
return projectName + documentName;
}
}).setOutputMarkupId(true));
}
}
| apache-2.0 |
DavidHerzogTU-Berlin/cassandraToRun | interface/thrift/gen-java/org/apache/cassandra/thrift/SlicePredicate.java | 18424 | /**
* Autogenerated by Thrift Compiler (0.9.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.cassandra.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A SlicePredicate is similar to a mathematic predicate (see http://en.wikipedia.org/wiki/Predicate_(mathematical_logic)),
* which is described as "a property that the elements of a set have in common."
*
* SlicePredicate's in Cassandra are described with either a list of column_names or a SliceRange. If column_names is
* specified, slice_range is ignored.
*
* @param column_name. A list of column names to retrieve. This can be used similar to Memcached's "multi-get" feature
* to fetch N known column names. For instance, if you know you wish to fetch columns 'Joe', 'Jack',
* and 'Jim' you can pass those column names as a list to fetch all three at once.
* @param slice_range. A SliceRange describing how to range, order, and/or limit the slice.
*/
public class SlicePredicate implements org.apache.thrift.TBase<SlicePredicate, SlicePredicate._Fields>, java.io.Serializable, Cloneable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SlicePredicate");
private static final org.apache.thrift.protocol.TField COLUMN_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("column_names", org.apache.thrift.protocol.TType.LIST, (short)1);
private static final org.apache.thrift.protocol.TField SLICE_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("slice_range", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new SlicePredicateStandardSchemeFactory());
schemes.put(TupleScheme.class, new SlicePredicateTupleSchemeFactory());
}
public List<ByteBuffer> column_names; // optional
public SliceRange slice_range; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
COLUMN_NAMES((short)1, "column_names"),
SLICE_RANGE((short)2, "slice_range");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // COLUMN_NAMES
return COLUMN_NAMES;
case 2: // SLICE_RANGE
return SLICE_RANGE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private _Fields optionals[] = {_Fields.COLUMN_NAMES,_Fields.SLICE_RANGE};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.COLUMN_NAMES, new org.apache.thrift.meta_data.FieldMetaData("column_names", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))));
tmpMap.put(_Fields.SLICE_RANGE, new org.apache.thrift.meta_data.FieldMetaData("slice_range", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SliceRange.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(SlicePredicate.class, metaDataMap);
}
public SlicePredicate() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public SlicePredicate(SlicePredicate other) {
if (other.isSetColumn_names()) {
List<ByteBuffer> __this__column_names = new ArrayList<ByteBuffer>();
for (ByteBuffer other_element : other.column_names) {
ByteBuffer temp_binary_element = org.apache.thrift.TBaseHelper.copyBinary(other_element);
;
__this__column_names.add(temp_binary_element);
}
this.column_names = __this__column_names;
}
if (other.isSetSlice_range()) {
this.slice_range = new SliceRange(other.slice_range);
}
}
public SlicePredicate deepCopy() {
return new SlicePredicate(this);
}
@Override
public void clear() {
this.column_names = null;
this.slice_range = null;
}
public int getColumn_namesSize() {
return (this.column_names == null) ? 0 : this.column_names.size();
}
public java.util.Iterator<ByteBuffer> getColumn_namesIterator() {
return (this.column_names == null) ? null : this.column_names.iterator();
}
public void addToColumn_names(ByteBuffer elem) {
if (this.column_names == null) {
this.column_names = new ArrayList<ByteBuffer>();
}
this.column_names.add(elem);
}
public List<ByteBuffer> getColumn_names() {
return this.column_names;
}
public SlicePredicate setColumn_names(List<ByteBuffer> column_names) {
this.column_names = column_names;
return this;
}
public void unsetColumn_names() {
this.column_names = null;
}
/** Returns true if field column_names is set (has been assigned a value) and false otherwise */
public boolean isSetColumn_names() {
return this.column_names != null;
}
public void setColumn_namesIsSet(boolean value) {
if (!value) {
this.column_names = null;
}
}
public SliceRange getSlice_range() {
return this.slice_range;
}
public SlicePredicate setSlice_range(SliceRange slice_range) {
this.slice_range = slice_range;
return this;
}
public void unsetSlice_range() {
this.slice_range = null;
}
/** Returns true if field slice_range is set (has been assigned a value) and false otherwise */
public boolean isSetSlice_range() {
return this.slice_range != null;
}
public void setSlice_rangeIsSet(boolean value) {
if (!value) {
this.slice_range = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case COLUMN_NAMES:
if (value == null) {
unsetColumn_names();
} else {
setColumn_names((List<ByteBuffer>)value);
}
break;
case SLICE_RANGE:
if (value == null) {
unsetSlice_range();
} else {
setSlice_range((SliceRange)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case COLUMN_NAMES:
return getColumn_names();
case SLICE_RANGE:
return getSlice_range();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case COLUMN_NAMES:
return isSetColumn_names();
case SLICE_RANGE:
return isSetSlice_range();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof SlicePredicate)
return this.equals((SlicePredicate)that);
return false;
}
public boolean equals(SlicePredicate that) {
if (that == null)
return false;
boolean this_present_column_names = true && this.isSetColumn_names();
boolean that_present_column_names = true && that.isSetColumn_names();
if (this_present_column_names || that_present_column_names) {
if (!(this_present_column_names && that_present_column_names))
return false;
if (!this.column_names.equals(that.column_names))
return false;
}
boolean this_present_slice_range = true && this.isSetSlice_range();
boolean that_present_slice_range = true && that.isSetSlice_range();
if (this_present_slice_range || that_present_slice_range) {
if (!(this_present_slice_range && that_present_slice_range))
return false;
if (!this.slice_range.equals(that.slice_range))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
public int compareTo(SlicePredicate other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
SlicePredicate typedOther = (SlicePredicate)other;
lastComparison = Boolean.valueOf(isSetColumn_names()).compareTo(typedOther.isSetColumn_names());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetColumn_names()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.column_names, typedOther.column_names);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetSlice_range()).compareTo(typedOther.isSetSlice_range());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSlice_range()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.slice_range, typedOther.slice_range);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("SlicePredicate(");
boolean first = true;
if (isSetColumn_names()) {
sb.append("column_names:");
if (this.column_names == null) {
sb.append("null");
} else {
sb.append(this.column_names);
}
first = false;
}
if (isSetSlice_range()) {
if (!first) sb.append(", ");
sb.append("slice_range:");
if (this.slice_range == null) {
sb.append("null");
} else {
sb.append(this.slice_range);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
if (slice_range != null) {
slice_range.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class SlicePredicateStandardSchemeFactory implements SchemeFactory {
public SlicePredicateStandardScheme getScheme() {
return new SlicePredicateStandardScheme();
}
}
private static class SlicePredicateStandardScheme extends StandardScheme<SlicePredicate> {
public void read(org.apache.thrift.protocol.TProtocol iprot, SlicePredicate struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // COLUMN_NAMES
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list16 = iprot.readListBegin();
struct.column_names = new ArrayList<ByteBuffer>(_list16.size);
for (int _i17 = 0; _i17 < _list16.size; ++_i17)
{
ByteBuffer _elem18; // required
_elem18 = iprot.readBinary();
struct.column_names.add(_elem18);
}
iprot.readListEnd();
}
struct.setColumn_namesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // SLICE_RANGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.slice_range = new SliceRange();
struct.slice_range.read(iprot);
struct.setSlice_rangeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, SlicePredicate struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.column_names != null) {
if (struct.isSetColumn_names()) {
oprot.writeFieldBegin(COLUMN_NAMES_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.column_names.size()));
for (ByteBuffer _iter19 : struct.column_names)
{
oprot.writeBinary(_iter19);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.slice_range != null) {
if (struct.isSetSlice_range()) {
oprot.writeFieldBegin(SLICE_RANGE_FIELD_DESC);
struct.slice_range.write(oprot);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class SlicePredicateTupleSchemeFactory implements SchemeFactory {
public SlicePredicateTupleScheme getScheme() {
return new SlicePredicateTupleScheme();
}
}
private static class SlicePredicateTupleScheme extends TupleScheme<SlicePredicate> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, SlicePredicate struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetColumn_names()) {
optionals.set(0);
}
if (struct.isSetSlice_range()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetColumn_names()) {
{
oprot.writeI32(struct.column_names.size());
for (ByteBuffer _iter20 : struct.column_names)
{
oprot.writeBinary(_iter20);
}
}
}
if (struct.isSetSlice_range()) {
struct.slice_range.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, SlicePredicate struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TList _list21 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
struct.column_names = new ArrayList<ByteBuffer>(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
ByteBuffer _elem23; // required
_elem23 = iprot.readBinary();
struct.column_names.add(_elem23);
}
}
struct.setColumn_namesIsSet(true);
}
if (incoming.get(1)) {
struct.slice_range = new SliceRange();
struct.slice_range.read(iprot);
struct.setSlice_rangeIsSet(true);
}
}
}
}
| apache-2.0 |
meitar/zaproxy | zap/src/main/java/org/parosproxy/paros/core/scanner/MultipartFormParameter.java | 1709 | /*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2017 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parosproxy.paros.core.scanner;
public class MultipartFormParameter {
public static enum Type {
GENERAL,
FILE_NAME,
FILE_CONTENT_TYPE
};
private String name;
private String value;
private int start;
private int end;
private int position;
private Type type;
public MultipartFormParameter(
String name, String value, int start, int end, int position, Type type) {
this.name = name;
this.value = value;
this.start = start;
this.end = end;
this.position = position;
this.type = type;
}
public String getName() {
return name;
}
public String getValue() {
return value;
}
public int getStart() {
return start;
}
public int getEnd() {
return end;
}
public int getPosition() {
return position;
}
public Type getType() {
return type;
}
}
| apache-2.0 |
apache/jackrabbit | jackrabbit-spi-commons/src/main/java/org/apache/jackrabbit/spi/commons/query/ExactQueryNode.java | 2996 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.spi.commons.query;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.spi.Name;
/**
* Implements a query node that defines an exact match of a property and a
* value.
*/
public class ExactQueryNode extends QueryNode {
/**
* The name of the property to match
*/
private final Name property;
/**
* The value of the property to match
*/
private final Name value;
/**
* Creates a new <code>ExactQueryNode</code> instance.
*
* @param parent the parent node for this <code>ExactQueryNode</code>.
* @param property the name of the property to match.
* @param value the value of the property to match.
*/
public ExactQueryNode(QueryNode parent, Name property, Name value) {
super(parent);
if (parent == null) {
throw new NullPointerException("parent");
}
this.property = property;
this.value = value;
}
/**
* {@inheritDoc}
* @throws RepositoryException
*/
public Object accept(QueryNodeVisitor visitor, Object data) throws RepositoryException {
return visitor.visit(this, data);
}
/**
* {@inheritDoc}
*/
public int getType() {
return QueryNode.TYPE_EXACT;
}
/**
* Returns the name of the property to match.
*
* @return the name of the property to match.
*/
public Name getPropertyName() {
return property;
}
/**
* Returns the value of the property to match.
*
* @return the value of the property to match.
*/
public Name getValue() {
return value;
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj) {
if (obj instanceof ExactQueryNode) {
ExactQueryNode other = (ExactQueryNode) obj;
return (value == null ? other.value == null : value.equals(other.value))
&& (property == null ? other.property == null : property.equals(other.property));
}
return false;
}
/**
* {@inheritDoc}
*/
public boolean needsSystemTree() {
return false;
}
}
| apache-2.0 |
nybbs2003/jsyn | tests/com/jsyn/examples/SawFaders.java | 3373 | /*
* Copyright 2010 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jsyn.examples;
import java.awt.GridLayout;
import javax.swing.JApplet;
import javax.swing.JPanel;
import com.jsyn.JSyn;
import com.jsyn.Synthesizer;
import com.jsyn.swing.ExponentialRangeModel;
import com.jsyn.swing.JAppletFrame;
import com.jsyn.swing.PortControllerFactory;
import com.jsyn.swing.PortModelFactory;
import com.jsyn.swing.RotaryTextController;
import com.jsyn.unitgen.LineOut;
import com.jsyn.unitgen.LinearRamp;
import com.jsyn.unitgen.SawtoothOscillatorBL;
import com.jsyn.unitgen.UnitOscillator;
/**
* Play a sawtooth using a JSyn oscillator and some knobs.
*
* @author Phil Burk (C) 2010 Mobileer Inc
*/
public class SawFaders extends JApplet {
private static final long serialVersionUID = -2704222221111608377L;
private Synthesizer synth;
private UnitOscillator osc;
private LinearRamp lag;
private LineOut lineOut;
@Override
public void init() {
synth = JSyn.createSynthesizer();
// Add a tone generator. (band limited sawtooth)
synth.add(osc = new SawtoothOscillatorBL());
// Add a lag to smooth out amplitude changes and avoid pops.
synth.add(lag = new LinearRamp());
// Add an output mixer.
synth.add(lineOut = new LineOut());
// Connect the oscillator to both left and right output.
osc.output.connect(0, lineOut.input, 0);
osc.output.connect(0, lineOut.input, 1);
// Set the minimum, current and maximum values for the port.
lag.output.connect(osc.amplitude);
lag.input.setup(0.0, 0.5, 1.0);
lag.time.set(0.2);
// Arrange the faders in a stack.
setLayout(new GridLayout(0, 1));
ExponentialRangeModel amplitudeModel = PortModelFactory.createExponentialModel(lag.input);
RotaryTextController knob = new RotaryTextController(amplitudeModel, 5);
JPanel knobPanel = new JPanel();
knobPanel.add(knob);
add(knobPanel);
osc.frequency.setup(50.0, 300.0, 10000.0);
add(PortControllerFactory.createExponentialPortSlider(osc.frequency));
validate();
}
@Override
public void start() {
// Start synthesizer using default stereo output at 44100 Hz.
synth.start();
// We only need to start the LineOut. It will pull data from the
// oscillator.
lineOut.start();
}
@Override
public void stop() {
synth.stop();
}
/* Can be run as either an application or as an applet. */
public static void main(String args[]) {
SawFaders applet = new SawFaders();
JAppletFrame frame = new JAppletFrame("SawFaders", applet);
frame.setSize(440, 200);
frame.setVisible(true);
frame.test();
}
}
| apache-2.0 |
wso2/carbon-storage-management | components/rss-manager/org.wso2.carbon.rssmanager.core/src/main/java/org/wso2/carbon/rssmanager/core/environment/dao/impl/DatabasePrivilegeTemplateEntryDAOImpl.java | 10687 | /*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.rssmanager.core.environment.dao.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.rssmanager.common.RSSManagerConstants;
import org.wso2.carbon.rssmanager.core.dao.exception.RSSDAOException;
import org.wso2.carbon.rssmanager.core.dao.exception.RSSDatabaseConnectionException;
import org.wso2.carbon.rssmanager.core.dao.util.RSSDAOUtil;
import org.wso2.carbon.rssmanager.core.dto.common.DatabasePrivilegeTemplateEntry;
import org.wso2.carbon.rssmanager.core.environment.DatabasePrivilegeTemplateEntryDAO;
import org.wso2.carbon.rssmanager.core.util.RSSManagerUtil;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* Database privilege template entry DAO implementation
*/
public class DatabasePrivilegeTemplateEntryDAOImpl implements DatabasePrivilegeTemplateEntryDAO {
public Log log = LogFactory.getLog(DatabasePrivilegeTemplateEntryDAOImpl.class);
private DataSource dataSource;
public DatabasePrivilegeTemplateEntryDAOImpl() {
dataSource = RSSManagerUtil.getDataSource();
}
/**
* @see DatabasePrivilegeTemplateEntryDAO#addPrivilegeTemplateEntry(int, int, DatabasePrivilegeTemplateEntry)
*/
public void addPrivilegeTemplateEntry(int environmentId, int templateId, DatabasePrivilegeTemplateEntry entry)
throws RSSDAOException, RSSDatabaseConnectionException {
Connection conn = null;
PreparedStatement templateEntryStatement = null;
try {
conn = getDataSourceConnection();//acquire data source connection
conn.setAutoCommit(false);
String insertTemplateEntryQuery = "INSERT INTO RM_DB_PRIVILEGE_TEMPLATE_ENTRY(TEMPLATE_ID, SELECT_PRIV, " +
"INSERT_PRIV, UPDATE_PRIV, DELETE_PRIV, CREATE_PRIV, DROP_PRIV, GRANT_PRIV, REFERENCES_PRIV, " +
"INDEX_PRIV, ALTER_PRIV, CREATE_TMP_TABLE_PRIV, LOCK_TABLES_PRIV, CREATE_VIEW_PRIV, SHOW_VIEW_PRIV, " +
"CREATE_ROUTINE_PRIV, ALTER_ROUTINE_PRIV, EXECUTE_PRIV, EVENT_PRIV, TRIGGER_PRIV) VALUES " +
"(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
templateEntryStatement = conn.prepareStatement(insertTemplateEntryQuery);
templateEntryStatement.setInt(1, templateId);
templateEntryStatement.setString(2, entry.getSelectPriv());
templateEntryStatement.setString(3, entry.getInsertPriv());
templateEntryStatement.setString(4, entry.getUpdatePriv());
templateEntryStatement.setString(5, entry.getDeletePriv());
templateEntryStatement.setString(6, entry.getCreatePriv());
templateEntryStatement.setString(7, entry.getDropPriv());
templateEntryStatement.setString(8, entry.getGrantPriv());
templateEntryStatement.setString(9, entry.getReferencesPriv());
templateEntryStatement.setString(10, entry.getIndexPriv());
templateEntryStatement.setString(11, entry.getAlterPriv());
templateEntryStatement.setString(12, entry.getCreateTmpTablePriv());
templateEntryStatement.setString(13, entry.getLockTablesPriv());
templateEntryStatement.setString(14, entry.getCreateViewPriv());
templateEntryStatement.setString(15, entry.getShowViewPriv());
templateEntryStatement.setString(16, entry.getCreateRoutinePriv());
templateEntryStatement.setString(17, entry.getAlterRoutinePriv());
templateEntryStatement.setString(18, entry.getExecutePriv());
templateEntryStatement.setString(19, entry.getEventPriv());
templateEntryStatement.setString(20, entry.getTriggerPriv());
templateEntryStatement.executeUpdate();
conn.commit();
} catch (SQLException e) {
String msg = "Failed to add database template entry to the metadata repository";
handleException(msg, e);
} finally {
RSSDAOUtil.cleanupResources(null, templateEntryStatement, conn, RSSManagerConstants
.ADD_PRIVILEGE_TEMPLATE_PRIVILEGE_SET_ENTRY);
}
}
/**
* @see DatabasePrivilegeTemplateEntryDAO#getPrivilegeTemplateEntry(int)
*/
public DatabasePrivilegeTemplateEntry getPrivilegeTemplateEntry(int templateId)
throws RSSDAOException, RSSDatabaseConnectionException {
Connection conn = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
DatabasePrivilegeTemplateEntry entry = null;
try {
conn = getDataSourceConnection();//acquire data source connection
conn.setAutoCommit(false);
String getPrivilegeEntryQuery = "SELECT * FROM RM_DB_PRIVILEGE_TEMPLATE_ENTRY WHERE TEMPLATE_ID = ?";
statement = conn.prepareStatement(getPrivilegeEntryQuery);
statement.setInt(1, templateId);
resultSet = statement.executeQuery();
while (resultSet.next()) {
entry = new DatabasePrivilegeTemplateEntry();
entry.setId(resultSet.getInt("ID"));
entry.setSelectPriv(resultSet.getString("SELECT_PRIV"));
entry.setInsertPriv(resultSet.getString("INSERT_PRIV"));
entry.setUpdatePriv(resultSet.getString("UPDATE_PRIV"));
entry.setDeletePriv(resultSet.getString("DELETE_PRIV"));
entry.setCreatePriv(resultSet.getString("CREATE_PRIV"));
entry.setDropPriv(resultSet.getString("DROP_PRIV"));
entry.setGrantPriv(resultSet.getString("GRANT_PRIV"));
entry.setReferencesPriv(resultSet.getString("REFERENCES_PRIV"));
entry.setIndexPriv(resultSet.getString("INDEX_PRIV"));
entry.setAlterPriv(resultSet.getString("ALTER_PRIV"));
entry.setCreateTmpTablePriv(resultSet.getString("CREATE_TMP_TABLE_PRIV"));
entry.setLockTablesPriv(resultSet.getString("LOCK_TABLES_PRIV"));
entry.setCreateViewPriv(resultSet.getString("CREATE_VIEW_PRIV"));
entry.setShowViewPriv(resultSet.getString("SHOW_VIEW_PRIV"));
entry.setCreateRoutinePriv(resultSet.getString("CREATE_ROUTINE_PRIV"));
entry.setAlterRoutinePriv(resultSet.getString("ALTER_ROUTINE_PRIV"));
entry.setExecutePriv(resultSet.getString("EXECUTE_PRIV"));
entry.setEventPriv(resultSet.getString("EVENT_PRIV"));
entry.setTriggerPriv(resultSet.getString("TRIGGER_PRIV"));
}
} catch (SQLException e) {
String msg = "Failed to retrieve database privilege entry information from meta repository";
handleException(msg, e);
} finally {
RSSDAOUtil.cleanupResources(resultSet, statement, conn, RSSManagerConstants
.SELECT_PRIVILEGE_TEMPLATE_PRIVILEGE_SET_ENTRY);
}
return entry;
}
/**
* @see DatabasePrivilegeTemplateEntryDAO#updatePrivilegeTemplateEntry(int, int, DatabasePrivilegeTemplateEntry)
*/
public void updatePrivilegeTemplateEntry(int environmentId, int templateId, DatabasePrivilegeTemplateEntry updatedEntry)
throws RSSDAOException, RSSDatabaseConnectionException {
Connection conn = null;
PreparedStatement entryUpdateStatement = null;
try {
conn = getDataSourceConnection();//acquire data source connection
conn.setAutoCommit(false);
String updateTemplateEntryQuery = "UPDATE RM_DB_PRIVILEGE_TEMPLATE_ENTRY SET SELECT_PRIV=?, INSERT_PRIV=?," +
"UPDATE_PRIV=? ,DELETE_PRIV=?, CREATE_PRIV=?, DROP_PRIV=?, GRANT_PRIV=?, REFERENCES_PRIV=?, INDEX_PRIV=?, ALTER_PRIV=?," +
"CREATE_TMP_TABLE_PRIV=?, LOCK_TABLES_PRIV=?, CREATE_VIEW_PRIV=?, SHOW_VIEW_PRIV=?, CREATE_ROUTINE_PRIV=?," +
"ALTER_ROUTINE_PRIV=?, EXECUTE_PRIV=?, EVENT_PRIV=?, TRIGGER_PRIV=? WHERE TEMPLATE_ID = ?";
entryUpdateStatement = conn.prepareStatement(updateTemplateEntryQuery);
entryUpdateStatement.setString(1, updatedEntry.getSelectPriv());
entryUpdateStatement.setString(2, updatedEntry.getInsertPriv());
entryUpdateStatement.setString(3, updatedEntry.getUpdatePriv());
entryUpdateStatement.setString(4, updatedEntry.getDeletePriv());
entryUpdateStatement.setString(5, updatedEntry.getCreatePriv());
entryUpdateStatement.setString(6, updatedEntry.getDropPriv());
entryUpdateStatement.setString(7, updatedEntry.getGrantPriv());
entryUpdateStatement.setString(8, updatedEntry.getReferencesPriv());
entryUpdateStatement.setString(9, updatedEntry.getIndexPriv());
entryUpdateStatement.setString(10, updatedEntry.getAlterPriv());
entryUpdateStatement.setString(11, updatedEntry.getCreateTmpTablePriv());
entryUpdateStatement.setString(12, updatedEntry.getLockTablesPriv());
entryUpdateStatement.setString(13, updatedEntry.getCreateViewPriv());
entryUpdateStatement.setString(14, updatedEntry.getShowViewPriv());
entryUpdateStatement.setString(15, updatedEntry.getCreateRoutinePriv());
entryUpdateStatement.setString(16, updatedEntry.getAlterRoutinePriv());
entryUpdateStatement.setString(17, updatedEntry.getExecutePriv());
entryUpdateStatement.setString(18, updatedEntry.getEventPriv());
entryUpdateStatement.setString(19, updatedEntry.getTriggerPriv());
entryUpdateStatement.setInt(20, templateId);
entryUpdateStatement.executeUpdate();
conn.commit();
} catch (SQLException e) {
String msg = "Failed to update database template entry in the metadata repository";
handleException(msg, e);
} finally {
RSSDAOUtil.cleanupResources(null, entryUpdateStatement, conn, RSSManagerConstants
.UPDATE_PRIVILEGE_TEMPLATE_PRIVILEGE_SET_ENTRY);
}
}
/**
* Get data source connection
*
* @return the data source connection
*/
private Connection getDataSourceConnection() throws RSSDatabaseConnectionException {
try{
return dataSource.getConnection();//acquire data source connection
} catch (SQLException e) {
String msg = "Error while acquiring the database connection. Meta Repository Database server may down";
throw new RSSDatabaseConnectionException(msg, e);
}
}
/**
* Log and throw a rss manager data access exception
* @param msg high level exception message
* @param e error
* @throws RSSDAOException throw RSS DAO exception
*/
public void handleException(String msg, Exception e) throws RSSDAOException {
log.error(msg, e);
throw new RSSDAOException(msg, e);
}
}
| apache-2.0 |
papicella/snappy-store | tests/sql/src/main/java/sql/tpce/entity/DailyMarketPK.java | 2000 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package sql.tpce.entity;
import java.io.Serializable;
//import javax.persistence.*;
/**
* The primary key class for the DAILY_MARKET database table.
*
*/
//@Embeddable
public class DailyMarketPK implements Serializable {
//default serial version id, required for serializable classes.
private static final long serialVersionUID = 1L;
//@Temporal(TemporalType.DATE)
//@Column(name="DM_DATE", unique=true, nullable=false)
private java.util.Date dmDate;
//@Column(name="DM_S_SYMB", unique=true, nullable=false, length=15)
private String dmSSymb;
public DailyMarketPK() {
}
public java.util.Date getDmDate() {
return this.dmDate;
}
public void setDmDate(java.util.Date dmDate) {
this.dmDate = dmDate;
}
public String getDmSSymb() {
return this.dmSSymb;
}
public void setDmSSymb(String dmSSymb) {
this.dmSSymb = dmSSymb;
}
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof DailyMarketPK)) {
return false;
}
DailyMarketPK castOther = (DailyMarketPK)other;
return
this.dmDate.equals(castOther.dmDate)
&& this.dmSSymb.equals(castOther.dmSSymb);
}
public int hashCode() {
final int prime = 31;
int hash = 17;
hash = hash * prime + this.dmDate.hashCode();
hash = hash * prime + this.dmSSymb.hashCode();
return hash;
}
} | apache-2.0 |
papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/execute/StatementPlanCollector.java | 66502 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.execute;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import com.gemstone.gemfire.internal.NanoTimer;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.pivotal.gemfirexd.internal.catalog.UUID;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.access.GemFireTransaction;
import com.pivotal.gemfirexd.internal.engine.access.MemConglomerate;
import com.pivotal.gemfirexd.internal.engine.access.index.MemIndex;
import com.pivotal.gemfirexd.internal.engine.distributed.ResultHolder;
import com.pivotal.gemfirexd.internal.engine.distributed.message.StatementExecutorMessage;
import com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils;
import com.pivotal.gemfirexd.internal.engine.procedure.coordinate.ProcedureProcessorResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.catalog.XPLAINDistPropsDescriptor;
import com.pivotal.gemfirexd.internal.engine.sql.execute.DistributionPlanCollector;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireDeleteResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireDistributedResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireRegionSizeResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GemFireUpdateResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.GfxdSubqueryResultSet;
import com.pivotal.gemfirexd.internal.engine.sql.execute.NcjPullResultSet;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore;
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore.StoreStatistics;
import com.pivotal.gemfirexd.internal.engine.store.RowFormatter;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.jdbc.ConnectionContext;
import com.pivotal.gemfirexd.internal.iapi.services.i18n.MessageService;
import com.pivotal.gemfirexd.internal.iapi.services.info.JVMInfo;
import com.pivotal.gemfirexd.internal.iapi.services.io.FormatableProperties;
import com.pivotal.gemfirexd.internal.iapi.sql.Activation;
import com.pivotal.gemfirexd.internal.iapi.sql.ResultSet;
import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.DataDictionary;
import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecPreparedStatement;
import com.pivotal.gemfirexd.internal.iapi.store.access.Qualifier;
import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedConnection;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement;
import com.pivotal.gemfirexd.internal.impl.services.uuid.BasicUUID;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINResultSetDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINResultSetTimingsDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINScanPropsDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINSortPropsDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINStatementDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINStatementTimingsDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.catalog.XPLAINTableDescriptor;
import com.pivotal.gemfirexd.internal.impl.sql.execute.xplain.XPLAINUtil;
import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState;
import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager;
import com.pivotal.gemfirexd.tools.planexporter.CreateXML;
import com.pivotal.gemfirexd.tools.planexporter.StatisticsCollectionObserver;
/**
* Class that will capture per statement per execution statistics for statement
* plan generation.
*
* Implementation similar to Derby 10.7 XPLAINSystemTableVisitor
*
* @author soubhikc
*
*/
public final class StatementPlanCollector extends AbstractStatisticsCollector {
private final boolean no_call_stmts = true;
// ---------------------------------------------------------
// member variables
// ---------------------------------------------------------
// the needed system objects for writing to the dictionary
private LanguageConnectionContext lcc;
private Connection nestedConnection = null;
private DataDictionary dd;
// the stmt activation object
private BaseActivation activation;
// a flag which is used to reflect if the statistics timings is on
private boolean considerTimingInformation = false;
private ExecPreparedStatement preStmt;
// the different tuple descriptors describing the query characteristics
// regarding the stmt
private XPLAINStatementDescriptor stmt;
private XPLAINStatementTimingsDescriptor stmtTimings = null;
private UUID stmtUUID; // the UUID to save for the resultsets
// now the lists of descriptors regarding the resultsets
private final ArrayList<XPLAINResultSetDescriptor> rsets; // for the resultset
// descriptors
private final List<XPLAINResultSetTimingsDescriptor> rsetsTimings; // for the
// resultset
// timings
// descriptors
private final List<XPLAINSortPropsDescriptor> sortrsets; // for the sort props
// descriptors
private final List<XPLAINScanPropsDescriptor> scanrsets; // for the scan props
// descriptors
private final List<XPLAINDistPropsDescriptor> dsets; // for the
// distribution
// props decriptor
// this stack keeps track of the result set UUIDs, which get popped by the
// children of the current explained node
private final ArrayDeque<UUID> UUIDStack;
private final XPLAINUtil.ChildNodeTimeCollector childTiming;
private final DistributionPlanCollector distributionPlan;
private final StoreStatistics stats;
public final StatisticsCollectionObserver observer = StatisticsCollectionObserver.getInstance();
public StatementPlanCollector(final ResultSetStatisticsVisitor nextCollector) {
super(nextCollector);
// System.out.println("System Table Visitor created...");
// initialize lists
rsets = new ArrayList<XPLAINResultSetDescriptor>();
rsetsTimings = new ArrayList<XPLAINResultSetTimingsDescriptor>();
sortrsets = new ArrayList<XPLAINSortPropsDescriptor>();
scanrsets = new ArrayList<XPLAINScanPropsDescriptor>();
dsets = new ArrayList<XPLAINDistPropsDescriptor>();
// init UUIDStack
UUIDStack = new ArrayDeque<UUID>();
childTiming = new XPLAINUtil.ChildNodeTimeCollector(
null);
distributionPlan = new DistributionPlanCollector(
this,
dsets);
stats = Misc.getMemStore().getStoreStatistics();
}
@Override
public ResultSetStatisticsVisitor getClone() {
return new StatementPlanCollector(
super.getClone());
}
/**
* helper method, which pushes the UUID, "number of Children" times onto the
* UUIDStack.
*
* @param uuid
* the UUID to push
*/
private void pushUUIDnoChildren(
final UUID uuid) {
for (int i = 0; i < noChildren; i++) {
UUIDStack.push(uuid);
}
}
public UUID popUUIDFromStack() {
return UUIDStack.pop();
}
public void pushUUIDToStack(
final UUID id) {
UUIDStack.push(id);
}
// ---------------------------------------------------------
// XPLAINVisitor Implementation
// ---------------------------------------------------------
// called for remote DMLs other than SELECTs
@Override
public <T> void process(EmbedConnection conn,
final StatementExecutorMessage<T> msg, final EmbedStatement est,
boolean isLocallyExecuted) throws StandardException {
final long beginTime = NanoTimer.getTime();
XPLAINTableDescriptor.registerStatements(conn);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Generating plan for " + est.getSQLText());
}
}
if( msg.getSender() != null) {
sender = msg.getSender().toString();
}
final ResultSet resultsToWrap = est.getResultsToWrap();
processResultSet(conn, msg, null, resultsToWrap, est.getGPrepStmt(), isLocallyExecuted);
stats.collectStatementPlanStats( (NanoTimer.getTime() - beginTime), true /*remote*/);
if (nextCollector != null) {
nextCollector.process(conn, msg, est, isLocallyExecuted);
}
else {
est.getResultsToWrap().resetStatistics();
}
}
// called for remote SELECTs
@Override
public <T> void process(EmbedConnection conn,
final StatementExecutorMessage<T> msg, final ResultHolder rh,
boolean isLocallyExecuted) throws StandardException {
final long beginTime = NanoTimer.getTime();
XPLAINTableDescriptor.registerStatements(conn);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Generating plan for message : " + msg);
}
}
if( msg.getSender() != null) {
sender = msg.getSender().toString();
}
final ResultSet rs = rh.getERS().getSourceResultSet();
processResultSet(conn, msg, rh, rs, rh.getGPrepStmt(), isLocallyExecuted);
stats.collectStatementPlanStats( (NanoTimer.getTime() - beginTime), true /*remote*/);
if (nextCollector != null) {
nextCollector.process(conn, msg, rh, isLocallyExecuted);
}
else {
rs.resetStatistics();
}
}
private <T> void processResultSet(EmbedConnection conn,
final StatementExecutorMessage<T> msg, final ResultHolder rh,
final com.pivotal.gemfirexd.internal.iapi.sql.ResultSet rs,
final com.pivotal.gemfirexd.internal.iapi.sql.PreparedStatement gps,
final boolean isLocallyExecuted) throws StandardException {
if (conn == null || conn.isClosed()) {
return;
}
init((BaseActivation)rs.getActivation(), gps);
// for nested queries runtimeStats mode will be temporarily switched off
// on the data store.
if(!lcc.getRunTimeStatisticsMode() || nestedConnection == null) {
return;
}
this.activation.setExecutionID(msg.getExecutionId());
boolean continuePlanCapture = true;
continuePlanCapture = generateStatementDescriptor(
msg.getConstructTime(),
msg.getEndProcessTime(),
isLocallyExecuted);
if (continuePlanCapture) {
distributionPlan.setup(activation);
distributionPlan.processMessage(msg, rh, isLocallyExecuted);
// doesn't matter here as statisticTiming is already set.
// moreover, per message timing flag gets set on the lcc and hence
// no race here.
doXPLAIN(rs, activation, false, considerTimingInformation, isLocallyExecuted);
}
}
/*
* called in iapi.ResultSet.close
*
* timeStatsEnabled flag added for single VM case handling (#44201)
*/
@Override
public void doXPLAIN(
final ResultSet rs,
final Activation activation,
final boolean genStatementDesc,
final boolean timeStatsEnabled,
final boolean isLocallyExecuted) throws StandardException {
final long beginTime = NanoTimer.getTime();
try {
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Capturing ResultSet processing plan for connectionID=" + activation.getConnectionID()
+ " statementID=" + activation.getStatementID()
+ " executionID=" + activation.getExecutionID());
}
}
boolean continuePlanCapture = true;
if (genStatementDesc) {
Activation act = rs.getActivation();
if (SanityManager.ASSERT) {
SanityManager.ASSERT(!act.isClosed(),
"activation shouldn't be closed at this point");
}
init((BaseActivation)act, act.getPreparedStatement());
if (nestedConnection == null) {
return;
}
// get the timings settings
considerTimingInformation = timeStatsEnabled;
continuePlanCapture = generateStatementDescriptor(
rs.getBeginExecutionTimestamp(), rs.getEndExecutionTimestamp(),
isLocallyExecuted);
}
if (continuePlanCapture) {
GemFireTransaction parentTran = null;
try {
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"StatementPlanCollector: starting from root resultset " + rs);
}
}
// get TopRSS and start the traversal of the RSS-tree
rs.accept(this);
if (observer != null) {
observer.processSelectMessage(stmt, rsets, rsetsTimings, scanrsets,
sortrsets, dsets);
}
parentTran = lcc.getParentOfNestedTransactionExecute();
if (parentTran != null) {
parentTran.suspendTransaction();
}
// add the filled lists to the dictionary
addArraysToSystemCatalogs();
} catch (final SQLException e) {
throw Misc.wrapSQLException(e, e);
} finally {
if (parentTran != null) {
parentTran.resumeTransactionIfSuspended();
}
}
}
} finally {
clean();
}
stats.collectStatementPlanStats( (NanoTimer.getTime() - beginTime), false /*queryNode*/);
if (genStatementDesc && nextCollector != null) {
nextCollector.doXPLAIN(rs, activation, genStatementDesc, timeStatsEnabled, isLocallyExecuted);
}
if (observer != null) {
observer.end();
}
}
@Override
public UUID getStatementUUID() {
return stmtUUID;
}
/*this method should get called exactly once per doXPLAIN */
private boolean generateStatementDescriptor(
Timestamp beginExeTime,
Timestamp endExeTime,
boolean isLocallyExecuted) throws StandardException {
if (SanityManager.ASSERT) {
if (preStmt == null) {
SanityManager.THROWASSERT("statement null for activation "
+ activation);
}
}
// extract stmt type
final String type = XPLAINUtil.getStatementType(preStmt.getUserQueryString(activation.getLanguageConnectionContext()));
// don`t explain CALL Statements, quick implementation
if (type == null || type.equalsIgnoreCase("C") && no_call_stmts) {
return false;
}
// placeholder for the stmt timings UUID
UUID stmtTimingsUUID = null;
// 1. create new Statement Descriptor
// create new UUID
long stmt_id = this.activation.getStatementID();
if(stmt_id == -1) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Returning without plan capture due to stmt_id == -1");
}
return false;
}
stmtUUID = dd.getUUIDFactory().createUUID(stmt_id,
this.activation.getExecutionID());
if(isLocallyExecuted) {
((BasicUUID)stmtUUID).setLocallyExecuted(1);
}
// get transaction ID
final String xaID = lcc.getTransactionExecute().getTransactionIdString();
// get session ID
final String sessionID = Integer.toString(lcc.getInstanceNumber());
// get the JVM ID
final String jvmID = Integer.toString(JVMInfo.JDK_ID);
// get the OS ID
final String osID = System.getProperty("os.name");
// the current system time
final long current = System.currentTimeMillis();
// the xplain type
final String XPLAINtype = lcc.explainConnection() ? XPLAINUtil.XPLAIN_ONLY
: XPLAINUtil.XPLAIN_FULL;
// the xplain time
final Timestamp time = new Timestamp(
current);
// the thread id
final String threadID = Thread.currentThread().toString();
final String origin_member;
if (lcc.isConnectionForRemote() && sender != null) {
origin_member = sender;
}
else {
if(SanityManager.ASSERT) {
// remote connection won't execute a query locally ever.
SanityManager.ASSERT(isLocallyExecuted || !lcc.isConnectionForRemote());
}
final GemFireCacheImpl c = Misc.getGemFireCacheNoThrow();
if (c != null) {
origin_member = c
.getDistributedSystem()
.getDistributedMember()
.toString();
}
else {
origin_member = null;
}
}
long exeTime = -1;
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
if (endExeTime == null || beginExeTime == null) {
SanityManager.THROWASSERT("beginExeTs=" + beginExeTime + " endExeTs="
+ endExeTime);
}
}
}
exeTime = endExeTime.getTime() - beginExeTime.getTime();
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Introducing Statement Descriptor for "
+ preStmt.getUserQueryString(activation.getLanguageConnectionContext())
+ " statementId="
+ activation.getStatementID()
+ " executionId="
+ activation.getExecutionID()
+ " uuid="
+ stmtUUID
+ (this.considerTimingInformation ? " with time stats "
: " without time stats ") + " isLocallyExecuted="
+ isLocallyExecuted, new Throwable());
}
}
stmt = new XPLAINStatementDescriptor(
stmtUUID, // unique statement UUID
activation.getCursorName(), // the statement name
type, // the statement type
preStmt.getUserQueryString(activation.getLanguageConnectionContext()), // the statement text
jvmID, // the JVM ID
osID, // the OS ID
String.valueOf(GemFireStore.getMyId()),
origin_member,
Boolean.valueOf(isLocallyExecuted).toString(),
XPLAINtype, // the EXPLAIN tpye
time, // the EXPLAIN Timestamp
threadID, // the Thread ID
xaID, // the transaction ID
sessionID, // the Session ID
lcc.getDbname(), // the Database name
lcc.getDrdaID(), // the DRDA ID
Long.valueOf(preStmt.getParseTimeInMillis()), // the Parse Time
Long.valueOf(preStmt.getBindTimeInMillis()), // the Bind Time
Long.valueOf(preStmt.getOptimizeTimeInMillis()), // the Optimize Time
Long.valueOf(preStmt.getRoutingInfoTimeInMillis()), // the QueryInfo Time
Long.valueOf(preStmt.getGenerateTimeInMillis()), // the Generate Time
Long.valueOf(preStmt.getCompileTimeInMillis()), // the Compile Time
Long.valueOf(exeTime), // the Execute Time
preStmt.getBeginCompileTimestamp(), // the Begin Compilation TS
preStmt.getEndCompileTimestamp(), // the End Compilation TS
beginExeTime, // the Begin Execution TS
endExeTime // the End Execution TS
);
// add it to system catalog
GemFireTransaction parentTran = null;
try {
parentTran = lcc.getParentOfNestedTransactionExecute();
if(parentTran != null) {
parentTran.suspendTransaction();
}
addStmtDescriptorsToSystemCatalog();
} catch (SQLException e) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Got Exception while adding statement descriptors to system catalog "
+ e, e);
}
throw StandardException.plainWrapException(e);
} finally {
if (parentTran != null) {
parentTran.resumeTransactionIfSuspended();
}
}
return true;
}
public void init(BaseActivation act,
com.pivotal.gemfirexd.internal.iapi.sql.PreparedStatement preparedStatement) throws StandardException {
activation = act;
lcc = activation.getLanguageConnectionContext();
dd = lcc.getDataDictionary();
considerTimingInformation = lcc.getStatisticsTiming()
|| lcc.explainConnection();
preStmt = (ExecPreparedStatement)preparedStatement;
if (nestedConnection == null) {
nestedConnection = getDefaultConn();
}
}
@Override
public void clear() {
preStmt = null;
activation = null;
lcc = null;
dd = null;
}
// ---------------------------------------------------------
// helper methods
// ---------------------------------------------------------
public UUID getStmtUUID() {
return stmtUUID;
}
/**
* This method cleans up things after explanation. It frees kept resources and
* still holded references.
*/
private void clean() {
// forget about all the system objects
activation = null;
lcc = null;
if (nestedConnection != null) {
try {
nestedConnection.close();
} catch (SQLException ignore) { }
}
nestedConnection = null;
dd = null;
// forget about the stmt descriptors and the Stmt UUID
stmt = null;
stmtTimings = null;
// reset the descriptor lists to keep memory low
rsets.clear();
rsetsTimings.clear();
sortrsets.clear();
scanrsets.clear();
dsets.clear();
// clear stack, although it must be already empty...
UUIDStack.clear();
}
/**
* Open a nested Connection with which to execute INSERT statements.
*/
private final Connection getDefaultConn() throws StandardException {
final ConnectionContext cc = (ConnectionContext)lcc
.getContextManager()
.getContext(
ConnectionContext.CONTEXT_ID);
if (cc == null) {
return null;
}
Connection conn = null;
try {
conn = cc.getNestedConnection(true);
} catch (SQLException sqle) {
// ignore no current connection. instead return null.
if (!SQLState.NO_CURRENT_CONNECTION.equals(sqle.getSQLState())) {
throw Misc.wrapSQLException(sqle, sqle);
}
}
if (SanityManager.ASSERT) {
assert conn instanceof EmbedConnection;
if (conn != null && ((EmbedConnection)conn).getLanguageConnectionContext() != lcc) {
SanityManager
.THROWASSERT("Nested Connection returning with different LCC ");
}
}
return conn;
}
/**
* This method writes only the stmt and its timing descriptor to the
* dataDictionary
*
*/
private void addStmtDescriptorsToSystemCatalog() throws StandardException,
SQLException {
final boolean statsSave = lcc.getRunTimeStatisticsMode();
try {
lcc.setRunTimeStatisticsMode(false, true);
assert nestedConnection != null : "NestedConnection shouldn't be null at this point";
if(GemFireXDUtils.TracePlanGeneration) {
//sb fix lcc#cleanupOnError 3418 ac.reset() verifyStmtId(stmtUUID, conn);
}
PreparedStatement ps = nestedConnection.prepareStatement(lcc
.getExplainStatement(XPLAINStatementDescriptor.TABLENAME_STRING));
stmt.setStatementParameters(ps);
int updateCount = ps.executeUpdate();
if (SanityManager.ASSERT) {
if (GemFireXDUtils.TracePlanGeneration) {
if (updateCount != 1) {
SanityManager.DEBUG_PRINT("warning", "insert for " + stmt
+ " statement should have succeeded. updateCount=" + updateCount);
}
}
}
ps.close();
} finally {
lcc.setRunTimeStatisticsMode(statsSave, true);
}
}
/**
* This method writes the created descriptor arrays to the cooresponding
* system catalogs.
* @throws IOException
*/
private void addArraysToSystemCatalogs() throws StandardException,
SQLException {
if (rsets.size() == 0) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"NO XML to create as no query layers are present for " + stmtUUID);
}
return;
}
StringBuilder xmlFragment = new StringBuilder();
try {
rankResultSetsByTimings();
createXMLFragment(0, 0, xmlFragment, new StringBuilder("root/"));
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION, "XML Fragment aquired " + xmlFragment);
if (GemFireXDUtils.TracePlanAssertion) {
SanityManager.ASSERT(CreateXML.testXML(xmlFragment));
}
}
}
catch(Throwable t) {
System.out.println("exception occurred.");
t.printStackTrace(System.err);
return;
}
final boolean statsSave = lcc.getRunTimeStatisticsMode();
try {
Iterator<? extends XPLAINTableDescriptor> iter;
lcc.setRunTimeStatisticsMode(false, true);
assert nestedConnection != null : "NestedConnection shouldn't be null at this point";
PreparedStatement ps = nestedConnection.prepareStatement(lcc
.getExplainStatement("SYSXPLAIN_RESULTSETS"));
XPLAINResultSetDescriptor.setStatementParameters(nestedConnection, ps, stmtUUID, xmlFragment);
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + xmlFragment
+ " should have succeeded ";
ps.close();
/*
iter = rsets.iterator();
while (iter.hasNext()) {
final XPLAINResultSetDescriptor rset = (XPLAINResultSetDescriptor)iter
.next();
rset.setStatementParameters(ps);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"XPLAINResultSetDescriptor " + rset);
}
}
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + rset
+ " should have succeeded ";
if (observer != null) {
observer.processedResultSetDescriptor(rset);
}
}
ps.close();
// add the resultset timings descriptors, if timing is on
if (considerTimingInformation) {
ps = conn.prepareStatement(lcc
.getExplainStatement("SYSXPLAIN_RESULTSET_TIMINGS"));
iter = rsetsTimings.iterator();
while (iter.hasNext()) {
final XPLAINResultSetTimingsDescriptor rsetT = (XPLAINResultSetTimingsDescriptor)iter
.next();
rsetT.setStatementParameters(ps);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"XPLAINResultSetTimingsDescriptor " + rsetT);
}
}
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + rsetT
+ " timing info should have succeeded ";
if (observer != null) {
observer.processedResultSetTimingDescriptor(rsetT);
}
}
ps.close();
}
ps = conn.prepareStatement(lcc
.getExplainStatement("SYSXPLAIN_SCAN_PROPS"));
iter = scanrsets.iterator();
while (iter.hasNext()) {
final XPLAINScanPropsDescriptor scanProps = (XPLAINScanPropsDescriptor)iter
.next();
scanProps.setStatementParameters(ps);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"XPLAINScanPropsDescriptor " + scanProps);
}
}
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + scanProps
+ " scan info should have succeeded ";
if (observer != null) {
observer.processedScanPropsDescriptor(scanProps);
}
}
ps.close();
ps = conn.prepareStatement(lcc
.getExplainStatement("SYSXPLAIN_SORT_PROPS"));
iter = sortrsets.iterator();
while (iter.hasNext()) {
final XPLAINSortPropsDescriptor sortProps = (XPLAINSortPropsDescriptor)iter
.next();
sortProps.setStatementParameters(ps);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"XPLAINSortPropsDescriptor " + sortProps);
}
}
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + sortProps
+ " sort info should have succeeded ";
if (observer != null) {
observer.processedSortPropsDescriptor(sortProps);
}
}
ps.close();
ps = conn.prepareStatement(lcc
.getExplainStatement("SYSXPLAIN_DIST_PROPS"));
iter = dsets.iterator();
while (iter.hasNext()) {
final XPLAINDistPropsDescriptor distProps = (XPLAINDistPropsDescriptor)iter
.next();
distProps.setStatementParameters(ps);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"XPLAINDistributionPropsDescriptor " + distProps);
}
}
final int updateCount = ps.executeUpdate();
assert updateCount == 1: "insert for " + distProps
+ " distribution info should have succeeded ";
if (observer != null) {
observer.processedDistPropsDescriptor(distProps);
}
}
ps.close();
*/
} finally {
lcc.setRunTimeStatisticsMode(statsSave, true);
}
}
private void rankResultSetsByTimings() {
// have to make a copy to determine the sort, note rsets captures the order of insertion.
XPLAINResultSetDescriptor[] sortedDescs = rsets.toArray(new XPLAINResultSetDescriptor[rsets.size()]);
Arrays.sort(sortedDescs);
double totalExecuteTimeNanos = 0;
for (XPLAINResultSetDescriptor r : rsets) {
totalExecuteTimeNanos += r.getExecuteTime();
}
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"Ranking resultSets of size " + rsets.size()
+ " with TotalExecutionTimeNanos=" + totalExecuteTimeNanos
+ " ResultSets[]" + Arrays.toString(sortedDescs));
}
int rank = 1;
for (XPLAINResultSetDescriptor r : sortedDescs) {
r.setRank(rank++);
r.setTotalExecuteTimeNanos(totalExecuteTimeNanos);
}
}
private int createXMLFragment(final int xmlDepth, int currentLevel, final StringBuilder sb, final StringBuilder lineage) {
if (currentLevel >= rsets.size()) {
return currentLevel;
}
final XPLAINResultSetDescriptor rdesc = rsets.get(currentLevel);
final StringBuilder currentlineage = new StringBuilder(lineage).append(rdesc.rs_name).append("/");
PlanUtils.addSpaces(sb, xmlDepth).append("<node");
PlanUtils.xmlAttribute(sb, "lineage", currentlineage);
rdesc.getXMLAttributes(sb, observer);
if ( rdesc.num_children > 0) {
sb.append(">\n");
}
for (int i = 1; i <= rdesc.num_children; i++) {
currentLevel = createXMLFragment(xmlDepth + 1, currentLevel + 1, sb, currentlineage);
}
if ( rdesc.num_children > 0) {
PlanUtils.addSpaces(sb, xmlDepth).append("</node>\n");
}
else {
sb.append("/>\n");
}
if (observer != null) {
observer.processedResultSetDescriptor(rdesc);
}
return currentLevel;
}
/**
* Return the time for all operations performed by this node, but not the time
* for the children of this node.
*
*/
private long getNodeTime(
final BasicNoPutResultSetImpl currentrs) {
//TODO:[sb]:QP: implement appropriately for GemFireDistributedResultSet (excluding iteration timing).
long time = currentrs.getTimeSpent(ResultSet.CURRENT_RESULTSET_ONLY, ResultSet.ALL);
if (SanityManager.ASSERT) {
if (GemFireXDUtils.TracePlanGeneration) {
/*SanityManager.ASSERT(time > 0, currentrs
+ " had execute time zero for statementId=" + stmtUUID);*/
}
}
// in case the clock didn't ticked.
return time == 0 ? 1 : time;
}
private XPLAINResultSetTimingsDescriptor createResultSetTimingDescriptor(
final BasicNoPutResultSetImpl bnprs,
final UUID timingID) {
return createResultSetTimingDescriptor(
timingID,
bnprs.constructorTime,
bnprs.openTime,
bnprs.nextTime,
bnprs.closeTime,
getNodeTime(bnprs),
bnprs.rowsSeen,
-1,
-1);
}
private XPLAINResultSetTimingsDescriptor createResultSetTimingDescriptor(
final NoRowsResultSetImpl nrrs,
final UUID timingID) {
return createResultSetTimingDescriptor(
timingID,
-1,
-1,
-1,
-1,
nrrs.getExecuteTime(),
-1,
-1,
-1);
}
private XPLAINResultSetTimingsDescriptor createResultSetTimingDescriptor(
final ProjectRestrictResultSet rs,
final UUID timingID) {
return createResultSetTimingDescriptor(
timingID,
rs.constructorTime,
rs.openTime,
rs.nextTime,
rs.closeTime,
getNodeTime(rs),
rs.rowsSeen,
rs.projectionTime,
rs.restrictionTime);
}
public XPLAINResultSetTimingsDescriptor createResultSetTimingDescriptor(
final UUID timingID,
final long constructorTime,
final long openTime,
final long nextTime,
final long closeTime,
final long nodeTime,
final int rowsSeen,
final long projectionTime,
final long restrictionTime) {
final XPLAINResultSetTimingsDescriptor timing_desc = new XPLAINResultSetTimingsDescriptor(
timingID, constructorTime, openTime, nextTime, closeTime, nodeTime,
nextTime >= 0 ? XPLAINUtil.getAVGNextTime(nextTime, rowsSeen) : -1,
projectionTime, restrictionTime, -1, // the
// temp_cong_create_time
-1 // the temo_cong_fetch_time
);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"StatementPlanCollector: recording timing of resultset " + timing_desc);
}
}
rsetsTimings.add(timing_desc);
return timing_desc;
}
private XPLAINResultSetDescriptor createResultSetDescriptor(
final BasicNoPutResultSetImpl rs,
final XPLAINResultSetTimingsDescriptor timingID,
final String lockMode,
final String lockGran,
final String rsxplaintype,
final String rsxplaindetail,
final XPLAINScanPropsDescriptor scan,
final XPLAINSortPropsDescriptor sort,
final int rowsReturned) {
return createResultSetDescriptor(
rs.getClass().getSimpleName(),
timingID,
lockMode,
lockGran,
rsxplaintype,
rsxplaindetail,
rs.numOpens,
rs.optimizerEstimatedRowCount,
rs.optimizerEstimatedCost,
rs.rowsSeen,
rs.rowsFiltered,
rowsReturned == -1 ? rs.rowsSeen - rs.rowsFiltered : rowsReturned,
scan,
sort, null);
}
private XPLAINResultSetDescriptor createResultSetDescriptor(
final NoRowsResultSetImpl rs,
final XPLAINResultSetTimingsDescriptor timingID,
final String rsxplaintype,
final String rsxplaindetail) {
return createResultSetDescriptor(
rs.getClass().getSimpleName(),
timingID,
null,
null,
rsxplaintype,
rsxplaindetail,
-1,
-1,
-1,
-1,
-1,
-1,
null,
null, null);
}
public XPLAINResultSetDescriptor createResultSetDescriptor(
final String rs_name,
final XPLAINResultSetTimingsDescriptor timingID,
final String lockMode,
final String lockGran,
final String rsxplaintype,
final String rsxplaindetail,
final int numOpens,
final double optimizerEstimatedRowCount,
final double optimizerEstimatedCost,
final int rowsSeen,
final int rowsFiltered,
final int returned_rows,
final XPLAINScanPropsDescriptor scan,
final XPLAINSortPropsDescriptor sort, final XPLAINDistPropsDescriptor distdesc) {
final UUID rsID = dd.getUUIDFactory().createUUID();
final XPLAINResultSetDescriptor rsdesc = new XPLAINResultSetDescriptor(
rs_name,
noChildren,
rsets.size(),
rsID,
rsxplaintype,
rsxplaindetail,
numOpens < 0 ? null : Integer.valueOf(numOpens),
null, // the number of index updates
lockMode, // lock mode
lockGran, // lock granularity
(UUIDStack.isEmpty() ? null : UUIDStack.pop()),
optimizerEstimatedRowCount < 0 ? null : Double
.valueOf(optimizerEstimatedRowCount),
optimizerEstimatedCost < 0 ? null : Double
.valueOf(optimizerEstimatedCost),
null, // the affected rows
null, // the deferred rows
null, // the input rows
Integer.valueOf(rowsSeen), // the seen rows
null, // the seen rows right
Integer.valueOf(rowsFiltered), // the filtered rows
returned_rows < 0 ? null : returned_rows,// the returned rows
null, // the empty right rows
null, // index key optimization
scan,
sort,
stmtUUID,
timingID, // the stmt UUID
distdesc);
pushUUIDnoChildren(rsID);
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"StatementPlanCollector: recording resultset " + rsdesc);
}
}
rsets.add(rsdesc);
return rsdesc;
}
public XPLAINScanPropsDescriptor createScanPropDescriptor(
final String scanObjectType,
final String scanObjectName,
final String startPosition,
final String stopPosition,
final UUID scanID,
final int isoLevel,
final int rowsPerRead,
final Qualifier[][] qualifiers,
final Properties scanProperties,
final int[] hashkey_columns) {
final XPLAINScanPropsDescriptor scanRSDescriptor = new XPLAINScanPropsDescriptor(
scanID,
scanObjectName,
scanObjectType,
null, // the scan type:
// heap, btree, sort
XPLAINUtil.getIsolationLevelCode(isoLevel), // the isolation level
null, // the number of visited pages
null, // the number of visited rows
null, // the number of qualified rows
null, // the number of visited deleted rows
null, // the number of fetched columns
null, // the bitset of fetched columns
null, // the btree height
rowsPerRead < 0 ? null : Integer.valueOf(rowsPerRead), // fetchSize
startPosition,
stopPosition,
NoPutResultSetImpl.printQualifiers(qualifiers, true),
null, // the next qualifiers
XPLAINUtil.getHashKeyColumnNumberString(hashkey_columns), // the hash
// key column
// numbers
null // the hash table size
);
final FormatableProperties props = new FormatableProperties();
if (scanProperties != null) {
for (final Enumeration<?> e = scanProperties.keys(); e.hasMoreElements();) {
final String key = (String)e.nextElement();
props.put(
key,
scanProperties.get(key));
}
}
scanrsets.add(XPLAINUtil.extractScanProps(
scanRSDescriptor,
props));
return scanRSDescriptor;
}
public XPLAINSortPropsDescriptor createSortPropDescriptor(
final UUID sortID,
final Properties props,
final String sorttype,
final int inputrows,
final int outputrows,
final int mergerows,
final boolean eliminateDuplicates,
final boolean inSortedOrder) {
final XPLAINSortPropsDescriptor sortRSDescriptor = new XPLAINSortPropsDescriptor(
sortID, // the sort props UUID
sorttype, // the sort type, either (C)onstraint, (I)ndex or (T)able
inputrows < 0 ? null : inputrows, // the number of input rows
outputrows < 0 ? null : outputrows, // the number of output rows
mergerows < 0 ? null : mergerows, // the number of merge runs
null, // merge run details
XPLAINUtil.getYesNoCharFromBoolean(eliminateDuplicates),// eliminate
// duplicates
XPLAINUtil.getYesNoCharFromBoolean(inSortedOrder), // in sorted order
null // distinct_aggregate
);
sortrsets.add(XPLAINUtil.extractSortProps(
sortRSDescriptor,
props));
return sortRSDescriptor;
}
public void createDistPropDescriptor(
final XPLAINDistPropsDescriptor desc) {
if(desc.getRSID() == null) {
desc.setDistRSID(dd.getUUIDFactory().createUUID());
}
dsets.add(desc);
}
// ---------------------------------------------------------------------
// visitor methods overridden
// ---------------------------------------------------------------------
@Override
public void visit(
final GemFireDeleteResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final DistinctGroupedAggregateResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final GroupedAggregateResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
final String rsxplaintype;
//TODO - print out the grouped column names underlying the operator
final String rsxplaindetail;
rsxplaintype = XPLAINUtil.OP_GROUP;
createResultSetDescriptor(
rs,
time_desc,
null,
null,
rsxplaintype,
null,
null,
null,
rs.rowsReturned);
}
@Override
public void visit(
final DistinctScanResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final ProjectRestrictResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
final String rsxplaintype;
final String rsxplaindetail;
if (rs.restriction != null && rs.doesProjection) {
rsxplaintype = XPLAINUtil.OP_PROJ_RESTRICT;
}
else if (rs.doesProjection) {
rsxplaintype = XPLAINUtil.OP_PROJECT;
}
else if (rs.restriction != null) {
rsxplaintype = XPLAINUtil.OP_FILTER;
}
else {
rsxplaintype = XPLAINUtil.OP_PROJ_RESTRICT;
}
// Send projected column list as detail
// TODO : also generate string equivalent of restriction nodes
// and append to this detail
// And handle scenarios with no projected columns as well
if (rs.projectedColumns != null)
{
rsxplaindetail = rs.projectedColumns;
}
else
{
rsxplaindetail = null;
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
rsxplaintype,
rsxplaindetail,
null,
null,
-1);
}
@Override
public void visit(
final IndexRowToBaseRowResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
// GemStone changes BEGIN
// Get string consisting of column names and put it in OP_DETAILS
// For printing out during EXPLAIN
// i.e. for a ROWIDSCAN details
// LINEITEM : L_ORDERKEY, L_SHIPDATE, L_PARTKEY
GemFireContainer gfc = rs.gfc;
RowFormatter rf = gfc.getCurrentRowFormatter();
StringBuilder accessedCols = new StringBuilder(rs.indexName).append(" : ");
boolean first = true;
if (rs.accessedHeapCols != null && rf != null)
{
for (int inPosition = 0; inPosition < rs.accessedHeapCols.getLength(); inPosition++)
{
if (rs.accessedHeapCols.isSet(inPosition))
{
ColumnDescriptor cd = rf.getColumnDescriptor(inPosition);
if (cd == null) {
continue;
}
if (!first) {
accessedCols.append(", ");
}
else {
first = false;
}
accessedCols.append(cd.getColumnName());
}
}
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_ROWIDSCAN,
accessedCols.toString(),
null,
null,
-1);
//Gemstone changes END
}
@Override
public void visit(
final ScrollInsensitiveResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_SCROLL,
"(" + rs.resultSetNumber + "), " + "[" + rs.numFromHashTable + ", "
+ rs.numToHashTable + "]",
null,
null,
-1);
}
@Override
public void visit(final GemFireResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
timingID,
-1,
rs.openTime,
rs.nextTime,
rs.closeTime,
rs.getTimeSpent(ResultSet.CURRENT_RESULTSET_ONLY, ResultSet.ALL),
0,
-1,
-1);
}
// Make detail string for EXPLAIN showing
// Region name, column name that is source of GET
GemFireContainer gfc = rs.getGFContainer();
RowFormatter rf = rs.getProjectionFormat();
StringBuilder rsexplaindetail = new StringBuilder(gfc.getSchemaName()).append(".").append(gfc.getTableName());
if (rf != null)
{
boolean first = true;
for (int inPosition = 0; inPosition < rf.getNumColumns(); inPosition++)
{
ColumnDescriptor cd = rf.getColumnDescriptor(inPosition);
if (cd == null) {
continue;
}
if (!first) {
rsexplaindetail.append(", ");
}
else {
first = false;
rsexplaindetail.append(" ");
}
rsexplaindetail.append(cd.getColumnName());
}
}
createResultSetDescriptor(
rs.getClass().getSimpleName(),
time_desc,
null,
null,
rs.isGetAllLocalIndexPlan() ? XPLAINUtil.OP_LI_GETTALL : (rs
.isGetAllPlan() ? XPLAINUtil.OP_GETTALL : XPLAINUtil.OP_GET),
rsexplaindetail.toString(),
1,
rs.getEstimatedRowCount(),
-1,
-1,
-1,
rs.rowsReturned,
null,
null, null);
}
@Override
public void visit(
final NormalizeResultSet rs) {
}
@Override
public void visit(
final AnyResultSet anyResultSet) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final GemFireDistributedResultSet rs) {
// createRe....();
// create sort properties
// aggregation , distinct, group by, special case outer join, n-way merge
// happening
if (SanityManager.DEBUG) {
if (GemFireXDUtils.TracePlanGeneration) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_PLAN_GENERATION,
"StatementPlanCollector: Processing " + rs);
}
}
distributionPlan.processGFDistResultSet(rs);
}
@Override
public void visit(
final LastIndexKeyResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final MiscResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final OnceResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final ProcedureProcessorResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final GfxdSubqueryResultSet rs) {
// TODO Auto-generated method stub
distributionPlan.processDistribution(rs);
}
@Override
public void visit(
final NcjPullResultSet rs) {
// TODO Auto-generated method stub
distributionPlan.processDistribution(rs);
}
@Override
public void visit(
final TemporaryRowHolderResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final WindowResultSet rs) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final SortResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
UUID sortID = dd.getUUIDFactory().createUUID();
final XPLAINSortPropsDescriptor sort_desc = createSortPropDescriptor(
sortID,
rs.sortProperties,
null,
-1,
-1,
-1,
rs.distinct,
rs.isInSortedOrder);
XPLAINResultSetDescriptor rsdesc = createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_SORT,
String.valueOf(rs.resultSetNumber),
null,
sort_desc,
rs.rowsReturned);
// we have inputRows extra info than others... so update it.
rsdesc.setInputRows(rs.rowsInput);
}
@Override
public void visit(
final HashTableResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
final UUID scanID = dd.getUUIDFactory().createUUID();
final XPLAINScanPropsDescriptor scan_desc = createScanPropDescriptor(
null,
"Temporary HashTable",
null,
null,
scanID,
TransactionController.ISOLATION_READ_COMMITTED,
-1,
rs.nextQualifiers,
rs.scanProperties,
rs.keyColumns);
scan_desc.setHashtableSize(rs.hashtableSize);
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_HASHTABLE,
"(" + rs.resultSetNumber + ")",
scan_desc,
null,
-1);
}
@Override
public void visit(
final UpdateResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
final XPLAINResultSetDescriptor rsdesc = createResultSetDescriptor(
rs,
time_desc,
XPLAINUtil.OP_UPDATE,
null);
rsdesc.setAffectedRows(rs.rowCount);
rsdesc.setDeferredRows(rs.deferred);
rsdesc.setIndexesUpdated(rs.constantAction.irgs.length);
}
@Override
public void visit(
final DeleteResultSet rs,
final int overridable) {
// TODO Auto-generated method stub
}
@Override
public void visit(
final GemFireUpdateResultSet rs,
final int overridable) {
// TODO Auto-generated method stub
}
@Override
public String toString() {
return "STATEMENT PLAN COLLECTOR"
+ (nextCollector != null ? " + " + nextCollector.toString() : "");
}
@Override
public void visit(
GemFireRegionSizeResultSet regionSizeResultSet) {
// TODO Auto-generated method stub
}
@Override
public void visit(RowCountResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_ROW_COUNT,
"(" + rs.resultSetNumber + ")",
null,
null,
-1);
}
@Override
public void visit(RowResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_ROW,
"(" + rs.resultSetNumber + ")",
null,
null,
rs.rowsReturned);
}
@Override
public void visit(UnionResultSet rs) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_UNION,
"(" + rs.resultSetNumber + ")",
null,
null,
rs.rowsReturned);
}
// ---------------------------------------------------------------------------------------
// visitor methods that are more driven by AbstractStatisticsCollector.
// ---------------------------------------------------------------------------------------
@Override
public void visit(
final TableScanResultSet rs,
final int overridable) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
String lockString = null;
if (rs.forUpdate()) {
lockString = MessageService.getTextMessage(SQLState.LANG_EXCLUSIVE);
}
else {
if (rs.isolationLevel == TransactionController.ISOLATION_READ_COMMITTED_NOHOLDLOCK) {
lockString = MessageService
.getTextMessage(SQLState.LANG_INSTANTANEOUS_SHARE);
}
else {
lockString = MessageService.getTextMessage(SQLState.LANG_SHARE);
}
}
final String lockMode = XPLAINUtil.getLockModeCode(lockString);
final String lockGran = XPLAINUtil.getLockGranularityCode(lockString);
String rsxplaintype = null, scanObjectType = null;
String rsxplaindetail = null, scanObjectName = null;
String startPosition = null;
String stopPosition = null;
if (rs.indexName != null) {
if (rs.isConstraint()) {
rsxplaintype = XPLAINUtil.OP_CONSTRAINTSCAN;
scanObjectType = "C"; // constraint
rsxplaindetail = "C: " + rs.indexName;
//GemStone changes BEGIN
// Try setting scanObject name correctly
//scanObjectName = rs.indexName;
GemFireContainer gfc = ((MemConglomerate)rs.scoci).getGemFireContainer();
// If container is NULL, this is a constraint over a hash table, not a
// table-defined constraint
if (gfc != null)
{
scanObjectName = gfc.getQualifiedTableName();
}
else
{
scanObjectName = "HASH SCAN:" + rs.tableName;
}
// Send back non-qualifier predicates used in this scan as
// detail information
// Qualifier preds are already sent back in scan_qualifiers
if (rs.nonQualPreds != null)
{
rsxplaindetail = "WHERE : "+rs.nonQualPreds;
}
else
{
rsxplaindetail = null;
}
//Gemstone changes end
}
else {
rsxplaintype = XPLAINUtil.OP_INDEXSCAN;
scanObjectType = "I"; // index
rsxplaindetail = "";
// If this is a case-insensitive comparison, explain it
// (Case sensitive is the norm - if needed, can print out
// either state)
if (!((MemIndex)rs.scoci).caseSensitive())
{
rsxplaindetail += "(Case Insensitive) ";
}
scanObjectName = rs.indexName;
// Send back non-qualifier predicates used in this scan as
// detail information
// Qualifier preds are already sent back in scan_qualifiers
if (rs.nonQualPreds != null)
{
rsxplaindetail += "WHERE : "+rs.nonQualPreds;
}
}
/* Start and stop position strings will be non-null
* if the TSRS has been closed. Otherwise, we go off
* and build the strings now.
*/
startPosition = rs.startPositionString;
if (startPosition == null) {
startPosition = rs.printStartPosition();
}
stopPosition = rs.stopPositionString;
if (stopPosition == null) {
stopPosition = rs.printStopPosition();
}
}
else {
rsxplaintype = XPLAINUtil.OP_TABLESCAN;
scanObjectType = "T"; // table
rsxplaindetail = "T: " + rs.tableName;
//Gemstone changes BEGIN
// Add in schema name as well for EXPLAIN
//scanObjectName = rs.tableName;
scanObjectName = rs.regionName;
//Gemstone changes END
}
final UUID scanID = dd.getUUIDFactory().createUUID();
final XPLAINScanPropsDescriptor scan_desc = createScanPropDescriptor(
scanObjectType,
scanObjectName,
startPosition,
stopPosition,
scanID,
rs.isolationLevel,
rs.rowsPerRead,
rs.qualifiers,
rs.getScanProperties(),
null);
createResultSetDescriptor(
rs,
time_desc,
lockMode,
lockGran,
rsxplaintype,
rsxplaindetail,
scan_desc,
null,
-1);
}
@Override
public void visit(
ScalarAggregateResultSet rs,
int overridable) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
int count = rs.aggInfoList.size();
//Gemstone changes BEGIN
// Print out aggregate descriptions in OP_DETAILS
// For later inclusion in EXPLAIN plans
// This operator may be doing multiple aggregations
String aggDescription = "";
for (int i = 0; i < count; i++) {
AggregatorInfo aggInfo = (AggregatorInfo)rs.aggInfoList.elementAt(i);
if (i > 0)
{
aggDescription += ",";
}
if (aggInfo.isDistinct())
{
aggDescription += "DISTINCT ";
}
aggDescription += aggInfo.getAggregateName();
}
//Gemstone changes END
XPLAINResultSetDescriptor rsdesc = createResultSetDescriptor(
rs,
time_desc,
null,
null,
XPLAINUtil.OP_AGGREGATE,
aggDescription,
null,
null,
1); // Scalar Aggregate always returns 1 row
rsdesc.setInputRows(rs.rowsInput);
rsdesc.setIndexKeyOptimization(rs.singleInputRow ? "Y" : "N");
}
@Override
public void visit(
final JoinResultSet rs,
int overridable) {
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
time_desc.setAvgNextTime(XPLAINUtil.getAVGNextTime(
rs.nextTime,
(rs.rowsSeenLeft + rs.rowsSeenRight)));
}
final String xplainType;
final int emptyRightRowsReturned;
switch (overridable) {
case 1:
xplainType = XPLAINUtil.OP_JOIN_NL;
emptyRightRowsReturned = -1;
break;
case 2:
xplainType = XPLAINUtil.OP_JOIN_NL_LO;
emptyRightRowsReturned = ((NestedLoopLeftOuterJoinResultSet)rs).emptyRightRowsReturned;
break;
case 3:
xplainType = XPLAINUtil.OP_JOIN_HASH;
emptyRightRowsReturned = -1;
break;
case 4:
xplainType = XPLAINUtil.OP_JOIN_HASH_LO;
emptyRightRowsReturned = ((HashLeftOuterJoinResultSet)rs).emptyRightRowsReturned;
break;
case 5:
xplainType = XPLAINUtil.OP_JOIN_MERGE;
emptyRightRowsReturned = -1;
break;
default:
xplainType = null;
emptyRightRowsReturned = -1;
}
StringBuilder op_details = new StringBuilder();
op_details.append(
"(").append(
rs.resultSetNumber).append(
")");
if (rs.oneRowRightSide)
op_details.append(", EXISTS JOIN");
final XPLAINResultSetDescriptor rsdesc = createResultSetDescriptor(
rs,
time_desc,
null,
null,
xplainType,
op_details.toString(),
null,
null,
rs.rowsReturned);
rsdesc.setRowsSeenRight(rs.rowsSeenRight);
rsdesc.setEmptyRightRowsReturned(emptyRightRowsReturned);
}
@Override
public void visit(
HashScanResultSet rs,
int overridable) {
boolean instantaneousLocks = false;
HashScanResultSet hsrs = rs;
String startPosition = null;
String stopPosition = null;
String lockString = null;
if (hsrs.forUpdate) {
lockString = MessageService.getTextMessage(SQLState.LANG_EXCLUSIVE);
}
else {
if (instantaneousLocks) {
lockString = MessageService
.getTextMessage(SQLState.LANG_INSTANTANEOUS_SHARE);
}
else {
lockString = MessageService.getTextMessage(SQLState.LANG_SHARE);
}
}
switch (hsrs.lockMode) {
case TransactionController.MODE_TABLE:
// RESOLVE: Not sure this will really work, as we
// are tacking together English words to make a phrase.
// Will this work in other languages?
lockString = lockString + " "
+ MessageService.getTextMessage(SQLState.LANG_TABLE);
break;
case TransactionController.MODE_RECORD:
// RESOLVE: Not sure this will really work, as we
// are tacking together English words to make a phrase.
// Will this work in other languages?
lockString = lockString + " "
+ MessageService.getTextMessage(SQLState.LANG_ROW);
break;
}
if (hsrs.indexName != null) {
/* Start and stop position strings will be non-null
* if the HSRS has been closed. Otherwise, we go off
* and build the strings now.
*/
startPosition = hsrs.startPositionString;
if (startPosition == null) {
startPosition = hsrs.printStartPosition();
}
stopPosition = hsrs.stopPositionString;
if (stopPosition == null) {
stopPosition = hsrs.printStopPosition();
}
}
XPLAINResultSetTimingsDescriptor time_desc = null;
if (considerTimingInformation) {
UUID timingID = dd.getUUIDFactory().createUUID();
time_desc = createResultSetTimingDescriptor(
rs,
timingID);
}
final UUID scanID = dd.getUUIDFactory().createUUID();
final String scanObjectType, scanObjectName;
final String xplaindetail;
if (rs.indexName != null) {
if (rs.isConstraint) {
scanObjectType = "C"; // constraint
scanObjectName = rs.indexName;
xplaindetail = "C: " + rs.indexName;
}
else {
scanObjectType = "I"; // index
scanObjectName = rs.indexName;
xplaindetail = "I: " + rs.indexName;
}
}
else {
scanObjectType = "T"; // table
scanObjectName = rs.tableName;
xplaindetail = "T: " + rs.tableName;
}
final XPLAINScanPropsDescriptor scan_desc = createScanPropDescriptor(
scanObjectType,
scanObjectName,
startPosition,
stopPosition,
scanID,
rs.isolationLevel,
-1,
rs.scanQualifiers,
rs.scanProperties,
rs.keyColumns);
scan_desc.setHashtableSize(rs.hashtableSize);
final String lockMode = XPLAINUtil.getLockModeCode(lockString);
final String lockGran = XPLAINUtil.getLockGranularityCode(lockString);
createResultSetDescriptor(
rs,
time_desc,
lockMode,
lockGran,
(overridable == 2 ? XPLAINUtil.OP_DISTINCTSCAN : XPLAINUtil.OP_HASHSCAN),
xplaindetail,
scan_desc,
null,
-1);
}
}
| apache-2.0 |
smgoller/geode | geode-core/src/main/java/org/apache/geode/security/ResourcePermission.java | 6820 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.security;
import static org.apache.geode.cache.Region.SEPARATOR;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.shiro.authz.permission.WildcardPermission;
import org.apache.shiro.util.CollectionUtils;
import org.apache.geode.annotations.Immutable;
/**
* ResourcePermission defines the resource, the operation, the region and the key involved in the
* action to be authorized.
*
* It is passed to the SecurityManager for the implementation to decide whether to grant a user this
* permission or not.
*/
@Immutable
public class ResourcePermission extends WildcardPermission {
public static final String ALL = "*";
public static final String NULL = "NULL";
/**
* @deprecated use ALL
*/
public static final String ALL_REGIONS = "*";
/**
* @deprecated use All
*/
public static final String ALL_KEYS = "*";
public enum Resource {
ALL, NULL, CLUSTER, DATA;
public String getName() {
if (this == ALL) {
return ResourcePermission.ALL;
}
return name();
}
}
public enum Operation {
ALL, NULL, MANAGE, WRITE, READ;
public String getName() {
if (this == ALL) {
return ResourcePermission.ALL;
}
return name();
}
}
public enum Target {
ALL, DISK, GATEWAY, QUERY, DEPLOY;
public String getName() {
if (this == ALL) {
return ResourcePermission.ALL;
}
return name();
}
}
// these default values are used when creating an allow-all lock around an operation
private String resource = NULL;
private String operation = NULL;
private String target = ALL;
private String key = ALL;
public ResourcePermission() {
setParts(Arrays.asList(
Collections.singleton(this.resource),
Collections.singleton(this.operation),
Collections.singleton(this.target),
Collections.singleton(this.key)));
}
public ResourcePermission(Resource resource, Operation operation) {
this(resource, operation, ALL, ALL);
}
public ResourcePermission(Resource resource, Operation operation, String target) {
this(resource, operation, target, ALL);
}
public ResourcePermission(Resource resource, Operation operation, Target target) {
this(resource, operation, target, ALL);
}
public ResourcePermission(Resource resource, Operation operation, Target target, String key) {
init(resource == null ? NULL : resource.getName(),
operation == null ? NULL : operation.getName(), target == null ? null : target.getName(),
key);
}
public ResourcePermission(Resource resource, Operation operation, String target, String key) {
init(resource == null ? NULL : resource.getName(),
operation == null ? NULL : operation.getName(), parseTarget(target), key);
}
private String parseTarget(String target) {
return target == null ? null : StringUtils.stripStart(target, SEPARATOR);
}
public ResourcePermission(String resource, String operation) {
this(resource, operation, ALL, ALL);
}
public ResourcePermission(String resource, String operation, String target) {
this(resource, operation, target, ALL);
}
public ResourcePermission(String resource, String operation, String target, String key) {
// what's eventually stored are either "*", "NULL" or a valid enum except ALL.
// Fields are never null.
init(parsePart(resource, r -> Resource.valueOf(r).getName()),
parsePart(operation, o -> Operation.valueOf(o).getName()), parseTarget(target), key);
}
private void init(String resource, String operation, String target, String key) {
this.resource = resource;
this.operation = operation;
if (target != null) {
this.target = target;
}
if (key != null) {
this.key = key;
}
setParts(Arrays.asList(
Collections.singleton(this.resource),
Collections.singleton(this.operation),
CollectionUtils.asSet(this.target.split(SUBPART_DIVIDER_TOKEN)),
CollectionUtils.asSet(this.key.split(SUBPART_DIVIDER_TOKEN))));
}
private String parsePart(String part, UnaryOperator<String> operator) {
if (part == null) {
return NULL;
}
if (part.equals(ALL)) {
return ALL;
}
return operator.apply(part.toUpperCase());
}
/**
* Returns the resource, could be either ALL, NULL, DATA or CLUSTER
*/
public Resource getResource() {
if (ALL.equals(resource)) {
return Resource.ALL;
}
return Resource.valueOf(resource);
}
/**
* Returns the operation, could be either ALL, NULL, MANAGE, WRITE or READ
*/
public Operation getOperation() {
if (ALL.equals(operation)) {
return Operation.ALL;
}
return Operation.valueOf(operation);
}
/**
* could be either "*", "NULL", "DATA", "CLUSTER"
*/
public String getResourceString() {
return resource;
}
/**
* Returns the operation, could be either "*", "NULL", "MANAGE", "WRITE" or "READ"
*/
public String getOperationString() {
return operation;
}
/**
* returns the regionName, or cluster target, could be "*", meaning all regions or all targets
*/
public String getTarget() {
return target;
}
/**
* @deprecated use getTarget()
*/
public String getRegionName() {
return getTarget();
}
/**
* returns the key, could be "*" meaning all keys.
*/
public String getKey() {
return key;
}
@Override
public String toString() {
List<String> parts = new ArrayList<>(Arrays.asList(resource, operation, target, key));
if (ALL.equals(key)) {
parts.remove(3);
if (ALL.equals(target)) {
parts.remove(2);
if (ALL.equals(operation)) {
parts.remove(1);
}
}
}
return parts.stream().collect(Collectors.joining(":"));
}
}
| apache-2.0 |
wgpshashank/aerospike-client-java | test/src/com/aerospike/test/sync/large/TestLargeSet.java | 1695 | /*
* Copyright 2012-2015 Aerospike, Inc.
*
* Portions may be licensed to Aerospike, Inc. under one or more contributor
* license agreements WHICH ARE COMPATIBLE WITH THE APACHE LICENSE, VERSION 2.0.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.aerospike.test.sync.large;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import com.aerospike.client.Key;
import com.aerospike.client.Value;
import com.aerospike.test.sync.TestSync;
public class TestLargeSet extends TestSync {
@Test
public void largeSet() {
Key key = new Key(args.namespace, args.set, "setkey");
String binName = args.getBinName("setbin");
// Delete record if it already exists.
client.delete(null, key);
// Initialize large set operator.
com.aerospike.client.large.LargeSet set = client.getLargeSet(null, key, binName, null);
// Write values.
set.add(Value.get("setvalue1"));
set.add(Value.get("setvalue2"));
set.add(Value.get("setvalue3"));
// Remove last value.
set.remove(Value.get("setvalue3"));
assertEquals(2, set.size());
String received = (String)set.get(Value.get("setvalue2"));
assertEquals("setvalue2", received);
}
}
| apache-2.0 |
vmorsiani/jabbot | extensions/jabbot-jira-extensions/src/test/java/org/wanna/jabbot/extensions/jira/IssueParserTest.java | 1820 | package org.wanna.jabbot.extensions.jira;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.wanna.jabbot.extensions.jira.binding.Issue;
import java.io.IOException;
import java.io.InputStream;
import static org.hamcrest.CoreMatchers.*;
/**
* @author vmorsiani <vmorsiani>
* @since 2014-07-02
*/
public class IssueParserTest {
private ObjectMapper mapper;
private InputStream inputStream;
@Before
public void before(){
mapper = new ObjectMapper(); // can reuse, share globally
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
@After
public void after() throws IOException {
if(inputStream != null){
inputStream.close();
}
}
@Test
public void parseFromStream() throws IOException {
inputStream = ClassLoader.getSystemClassLoader().getResourceAsStream("mky-1.json");
Issue issue = mapper.readValue(inputStream, Issue.class);
Assert.assertThat(issue.getKey(), is("MKY-1"));
Assert.assertThat(issue.getFields().getSummary(),is("First Test Issue"));
Assert.assertThat(issue.getFields().getAssignee().getDisplayName(), is("Administrator"));
Assert.assertThat(issue.getFields().getReporter().getDisplayName(),is("Administrator"));
Assert.assertThat(issue.getFields().getStatus().getName(),is("Open"));
Assert.assertThat(issue.getFields().getResolution(),nullValue());
}
@Test
public void parseNotFound() throws IOException{
inputStream = ClassLoader.getSystemClassLoader().getResourceAsStream("issue-not-found.json");
Issue issue = mapper.readValue(inputStream,Issue.class);
Assert.assertThat(issue,notNullValue());
Assert.assertThat(issue.getKey(),nullValue());
}
}
| artistic-2.0 |
Sethtroll/runelite | runelite-client/src/main/java/net/runelite/client/plugins/skillcalculator/SkillCalculatorPanel.java | 3641 | /*
* Copyright (c) 2018, Kruithne <kruithne@gmail.com>
* Copyright (c) 2018, Psikoi <https://github.com/psikoi>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.skillcalculator;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import javax.swing.ImageIcon;
import javax.swing.JScrollPane;
import javax.swing.border.EmptyBorder;
import net.runelite.api.Client;
import net.runelite.client.game.ItemManager;
import net.runelite.client.game.SkillIconManager;
import net.runelite.client.game.SpriteManager;
import net.runelite.client.ui.ColorScheme;
import net.runelite.client.ui.PluginPanel;
import net.runelite.client.ui.components.materialtabs.MaterialTab;
import net.runelite.client.ui.components.materialtabs.MaterialTabGroup;
class SkillCalculatorPanel extends PluginPanel
{
private final SkillCalculator uiCalculator;
private final SkillIconManager iconManager;
private final MaterialTabGroup tabGroup;
SkillCalculatorPanel(SkillIconManager iconManager, Client client, SpriteManager spriteManager, ItemManager itemManager)
{
super();
getScrollPane().setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
this.iconManager = iconManager;
setBorder(new EmptyBorder(10, 10, 10, 10));
setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.weightx = 1;
c.gridx = 0;
c.gridy = 0;
tabGroup = new MaterialTabGroup();
tabGroup.setLayout(new GridLayout(0, 6, 7, 7));
addCalculatorButtons();
final UICalculatorInputArea uiInput = new UICalculatorInputArea();
uiInput.setBorder(new EmptyBorder(15, 0, 15, 0));
uiInput.setBackground(ColorScheme.DARK_GRAY_COLOR);
uiCalculator = new SkillCalculator(client, uiInput, spriteManager, itemManager);
add(tabGroup, c);
c.gridy++;
add(uiInput, c);
c.gridy++;
add(uiCalculator, c);
c.gridy++;
}
private void addCalculatorButtons()
{
for (CalculatorType calculatorType : CalculatorType.values())
{
ImageIcon icon = new ImageIcon(iconManager.getSkillImage(calculatorType.getSkill(), true));
MaterialTab tab = new MaterialTab(icon, tabGroup, null);
tab.setOnSelectEvent(() ->
{
uiCalculator.openCalculator(calculatorType);
return true;
});
tabGroup.addTab(tab);
}
}
} | bsd-2-clause |
clearthesky/apk-parser | src/main/java/net/dongliu/apk/parser/struct/zip/EOCD.java | 1985 | package net.dongliu.apk.parser.struct.zip;
/**
* End of central directory record
*/
public class EOCD {
public static final int SIGNATURE = 0x06054b50;
// private int signature;
// Number of this disk
private short diskNum;
// Disk where central directory starts
private short cdStartDisk;
// Number of central directory records on this disk
private short cdRecordNum;
// Total number of central directory records
private short totalCDRecordNum;
// Size of central directory (bytes)
private int cdSize;
// Offset of start of central directory, relative to start of archive
private int cdStart;
// Comment length (n)
private short commentLen;
// private List<String> commentList;
public short getDiskNum() {
return diskNum;
}
public void setDiskNum(int diskNum) {
this.diskNum = (short) diskNum;
}
public int getCdStartDisk() {
return cdStartDisk & 0xffff;
}
public void setCdStartDisk(int cdStartDisk) {
this.cdStartDisk = (short) cdStartDisk;
}
public int getCdRecordNum() {
return cdRecordNum & 0xffff;
}
public void setCdRecordNum(int cdRecordNum) {
this.cdRecordNum = (short) cdRecordNum;
}
public int getTotalCDRecordNum() {
return totalCDRecordNum & 0xffff;
}
public void setTotalCDRecordNum(int totalCDRecordNum) {
this.totalCDRecordNum = (short) totalCDRecordNum;
}
public long getCdSize() {
return cdSize & 0xffffffffL;
}
public void setCdSize(long cdSize) {
this.cdSize = (int) cdSize;
}
public long getCdStart() {
return cdStart & 0xffffffffL;
}
public void setCdStart(long cdStart) {
this.cdStart = (int) cdStart;
}
public int getCommentLen() {
return commentLen & 0xffff;
}
public void setCommentLen(int commentLen) {
this.commentLen = (short) commentLen;
}
}
| bsd-2-clause |
steffeli/inf5750-tracker-capture | dhis-web/dhis-web-maintenance/dhis-web-maintenance-datadictionary/src/main/java/org/hisp/dhis/dd/action/dataelement/GetDataElementListAction.java | 5884 | package org.hisp.dhis.dd.action.dataelement;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hisp.dhis.user.UserSettingService.KEY_CURRENT_DOMAIN_TYPE;
import java.util.Collections;
import java.util.List;
import org.hisp.dhis.common.comparator.IdentifiableObjectNameComparator;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementDomain;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.paging.ActionPagingSupport;
import org.hisp.dhis.user.UserSettingService;
/**
* @author Torgeir Lorange Ostby
*/
public class GetDataElementListAction
extends ActionPagingSupport<DataElement>
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private DataElementService dataElementService;
public void setDataElementService( DataElementService dataElementService )
{
this.dataElementService = dataElementService;
}
private UserSettingService userSettingService;
public void setUserSettingService( UserSettingService userSettingService )
{
this.userSettingService = userSettingService;
}
// -------------------------------------------------------------------------
// Output
// -------------------------------------------------------------------------
private List<DataElement> dataElements;
public List<DataElement> getDataElements()
{
return dataElements;
}
// -------------------------------------------------------------------------
// Input & Output
// -------------------------------------------------------------------------
private String domainType;
public String getDomainType()
{
return domainType;
}
public void setDomainType( String domainType )
{
this.domainType = domainType;
}
private String key;
public String getKey()
{
return key;
}
public void setKey( String key )
{
this.key = key;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
@Override
public String execute()
{
if ( domainType == null ) // None, get current domain type
{
domainType = (String) userSettingService.getUserSetting( KEY_CURRENT_DOMAIN_TYPE );
}
else if ( "all".equals( domainType ) ) // All, reset current domain type
{
userSettingService.saveUserSetting( KEY_CURRENT_DOMAIN_TYPE, null );
domainType = null;
}
else // Specified, set current domain type
{
userSettingService.saveUserSetting( KEY_CURRENT_DOMAIN_TYPE, domainType );
}
// ---------------------------------------------------------------------
// Criteria
// ---------------------------------------------------------------------
if ( isNotBlank( key ) ) // Filter on key only if set
{
this.paging = createPaging( dataElementService.getDataElementCountByName( key ) );
dataElements = dataElementService.getDataElementsBetweenByName( key, paging.getStartPos(), paging.getPageSize() );
}
else if ( domainType != null )
{
DataElementDomain deDomainType = DataElementDomain.fromValue( domainType );
this.paging = createPaging( dataElementService.getDataElementCountByDomainType( deDomainType ) );
dataElements = dataElementService.getDataElementsByDomainType( deDomainType, paging.getStartPos(), paging.getPageSize() );
}
else
{
this.paging = createPaging( dataElementService.getDataElementCount() );
dataElements = dataElementService.getDataElementsBetween( paging.getStartPos(), paging.getPageSize() );
}
Collections.sort( dataElements, new IdentifiableObjectNameComparator() );
return SUCCESS;
}
} | bsd-3-clause |
codemercenary/JMatIO | src/main/java/ca/mjdsystems/jmatio/io/MLObjectPlaceholder.java | 863 | /*
* Copyright 2014 Matthew Dawson <matthew@mjdsystems.ca>
*/
package ca.mjdsystems.jmatio.io;
import ca.mjdsystems.jmatio.types.MLArray;
import java.util.Arrays;
/**
*
* @author Matthew Dawson <matthew@mjdsystems.ca>
*/
class MLObjectPlaceholder extends MLArray {
MLObjectPlaceholder(String name, String className, int[][] information)
{
super( name, new int[]{information[2][0], information[3][0]}, -1, 0 );
this.className = className;
this.information = information;
this.objectIds = new int[information.length - 5];
for (int i = 0; i < objectIds.length; ++i) {
objectIds[i] = information[i + 4][0];
}
this.classId = information[information.length - 1][0];
}
final String className;
final int[][] information;
final int[] objectIds;
final int classId;
}
| bsd-3-clause |
ngs-doo/dsl-json | library/src/test/java/com/dslplatform/json/generated/types/Money/OneListOfOneMoniesDefaultValueTurtle.java | 3268 | package com.dslplatform.json.generated.types.Money;
import com.dslplatform.json.generated.types.StaticJson;
import com.dslplatform.json.generated.ocd.javaasserts.MoneyAsserts;
import java.io.IOException;
public class OneListOfOneMoniesDefaultValueTurtle {
private static StaticJson.JsonSerialization jsonSerialization;
@org.junit.BeforeClass
public static void initializeJsonSerialization() throws IOException {
jsonSerialization = StaticJson.getSerialization();
}
@org.junit.Test
public void testDefaultValueEquality() throws IOException {
final java.util.List<java.math.BigDecimal> defaultValue = new java.util.ArrayList<java.math.BigDecimal>(0);
final StaticJson.Bytes defaultValueJsonSerialized = jsonSerialization.serialize(defaultValue);
final java.util.List<java.math.BigDecimal> defaultValueJsonDeserialized = jsonSerialization.deserializeList(java.math.BigDecimal.class, defaultValueJsonSerialized.content, defaultValueJsonSerialized.length);
MoneyAsserts.assertOneListOfOneEquals(defaultValue, defaultValueJsonDeserialized);
}
@org.junit.Test
public void testBorderValue1Equality() throws IOException {
final java.util.List<java.math.BigDecimal> borderValue1 = new java.util.ArrayList<java.math.BigDecimal>(java.util.Arrays.asList(java.math.BigDecimal.ZERO.setScale(2)));
final StaticJson.Bytes borderValue1JsonSerialized = jsonSerialization.serialize(borderValue1);
final java.util.List<java.math.BigDecimal> borderValue1JsonDeserialized = jsonSerialization.deserializeList(java.math.BigDecimal.class, borderValue1JsonSerialized.content, borderValue1JsonSerialized.length);
MoneyAsserts.assertOneListOfOneEquals(borderValue1, borderValue1JsonDeserialized);
}
@org.junit.Test
public void testBorderValue2Equality() throws IOException {
final java.util.List<java.math.BigDecimal> borderValue2 = new java.util.ArrayList<java.math.BigDecimal>(java.util.Arrays.asList(new java.math.BigDecimal("1E19")));
final StaticJson.Bytes borderValue2JsonSerialized = jsonSerialization.serialize(borderValue2);
final java.util.List<java.math.BigDecimal> borderValue2JsonDeserialized = jsonSerialization.deserializeList(java.math.BigDecimal.class, borderValue2JsonSerialized.content, borderValue2JsonSerialized.length);
MoneyAsserts.assertOneListOfOneEquals(borderValue2, borderValue2JsonDeserialized);
}
@org.junit.Test
public void testBorderValue3Equality() throws IOException {
final java.util.List<java.math.BigDecimal> borderValue3 = new java.util.ArrayList<java.math.BigDecimal>(java.util.Arrays.asList(java.math.BigDecimal.ZERO.setScale(2), java.math.BigDecimal.ONE, new java.math.BigDecimal("3.1415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679").setScale(2, java.math.BigDecimal.ROUND_HALF_UP), new java.math.BigDecimal("-1E-2"), new java.math.BigDecimal("1E19")));
final StaticJson.Bytes borderValue3JsonSerialized = jsonSerialization.serialize(borderValue3);
final java.util.List<java.math.BigDecimal> borderValue3JsonDeserialized = jsonSerialization.deserializeList(java.math.BigDecimal.class, borderValue3JsonSerialized.content, borderValue3JsonSerialized.length);
MoneyAsserts.assertOneListOfOneEquals(borderValue3, borderValue3JsonDeserialized);
}
}
| bsd-3-clause |
ezegarra/microbrowser | src/prefuse/data/io/CSVTableReader.java | 4951 | package prefuse.data.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import prefuse.data.parser.DataParseException;
import prefuse.data.parser.ParserFactory;
/**
* TableReader for Comma Separated Value (CSV) files. CSV files list
* each row of a table on a line, separating each data column by a line.
* Typically the first line of the file is a header row indicating the
* names of each data column.
*
* For a more in-depth description of the CSV format, please see this
* <a href="http://www.creativyst.com/Doc/Articles/CSV/CSV01.htm">
* CSV reference web page</a>.
*
* @author <a href="http://jheer.org">jeffrey heer</a>
*/
public class CSVTableReader extends AbstractTextTableReader {
/**
* Create a new CSVTableReader.
*/
public CSVTableReader() {
super();
}
/**
* Create a new CSVTableReader.
* @param parserFactory the ParserFactory to use for parsing text strings
* into table values.
*/
public CSVTableReader(ParserFactory parserFactory) {
super(parserFactory);
}
/**
* @see prefuse.data.io.AbstractTextTableReader#read(java.io.InputStream, prefuse.data.io.TableReadListener)
*/
public void read(InputStream is, TableReadListener trl)
throws IOException, DataParseException
{
String line;
StringBuffer sbuf = new StringBuffer();
boolean inRecord = false;
int inQuote = 0;
int lineno = 0;
int col = 0;
BufferedReader br = new BufferedReader(new InputStreamReader(is));
while ( (line=br.readLine()) != null ) {
// increment the line number
++lineno;
// extract the character array for quicker processing
char[] c = line.toCharArray();
int last = c.length-1;
// iterate through current line
for ( int i=0; i<=last; ++i ) {
if ( !inRecord ) {
// not currently processing a record
if ( Character.isWhitespace(c[i]) )
{
continue;
}
else if ( c[i] == '\"' )
{
inRecord = true;
inQuote = 1;
}
else if ( c[i] == ',' )
{
String s = sbuf.toString().trim();
trl.readValue(lineno, ++col, s);
sbuf.delete(0, sbuf.length());
}
else
{
inRecord = true;
sbuf.append(c[i]);
}
} else {
// in the midst of a record
if ( inQuote == 1 ) {
if ( c[i]=='\"' && (i==last || c[i+1] != '\"') )
{
// end of quotation
inQuote = 2;
}
else if ( c[i]=='\"' )
{
// double quote so skip one ahead
sbuf.append(c[i++]);
}
else
{
sbuf.append(c[i]);
}
} else {
if ( Character.isWhitespace(c[i]) )
{
sbuf.append(c[i]);
}
else if ( c[i] != ',' && inQuote == 2 )
{
throw new IllegalStateException(
"Invalid data format. " +
"Error at line " + lineno + ", col " + i);
}
else if ( c[i] != ',' )
{
sbuf.append(c[i]);
}
else
{
String s = sbuf.toString().trim();
trl.readValue(lineno, ++col, s);
sbuf.delete(0, sbuf.length());
inQuote = 0;
inRecord = false;
}
}
}
}
if ( inQuote != 1 ) {
String s = sbuf.toString().trim();
trl.readValue(lineno, ++col, s);
sbuf.delete(0, sbuf.length());
inQuote = 0;
inRecord = false;
}
if ( !inRecord && col > 0 ) {
col = 0;
}
}
}
} // end of class CSVTableReader
| bsd-3-clause |
jnehlmeier/threetenbp | src/main/java/org/threeten/bp/temporal/ChronoField.java | 29480 | /*
* Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.threeten.bp.temporal;
import static org.threeten.bp.temporal.ChronoUnit.DAYS;
import static org.threeten.bp.temporal.ChronoUnit.ERAS;
import static org.threeten.bp.temporal.ChronoUnit.FOREVER;
import static org.threeten.bp.temporal.ChronoUnit.HALF_DAYS;
import static org.threeten.bp.temporal.ChronoUnit.HOURS;
import static org.threeten.bp.temporal.ChronoUnit.MICROS;
import static org.threeten.bp.temporal.ChronoUnit.MILLIS;
import static org.threeten.bp.temporal.ChronoUnit.MINUTES;
import static org.threeten.bp.temporal.ChronoUnit.MONTHS;
import static org.threeten.bp.temporal.ChronoUnit.NANOS;
import static org.threeten.bp.temporal.ChronoUnit.SECONDS;
import static org.threeten.bp.temporal.ChronoUnit.WEEKS;
import static org.threeten.bp.temporal.ChronoUnit.YEARS;
import java.util.Locale;
import java.util.Map;
import org.threeten.bp.DayOfWeek;
import org.threeten.bp.Instant;
import org.threeten.bp.Year;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.chrono.ChronoLocalDate;
import org.threeten.bp.chrono.Chronology;
import org.threeten.bp.format.ResolverStyle;
import org.threeten.bp.jdk8.Jdk8Methods;
/**
* A standard set of fields.
* <p>
* This set of fields provide field-based access to manipulate a date, time or date-time.
* The standard set of fields can be extended by implementing {@link TemporalField}.
* <p>
* These fields are intended to be applicable in multiple calendar systems.
* For example, most non-ISO calendar systems define dates as a year, month and day,
* just with slightly different rules.
* The documentation of each field explains how it operates.
*
* <h3>Specification for implementors</h3>
* This is a final, immutable and thread-safe enum.
*/
public enum ChronoField implements TemporalField {
/**
* The nano-of-second.
* <p>
* This counts the nanosecond within the second, from 0 to 999,999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should set as much precision as the
* object stores, using integer division to remove excess precision.
* For example, if the {@code TemporalAccessor} stores time to millisecond precision,
* then the nano-of-second must be divided by 1,000,000 before replacing the milli-of-second.
*/
NANO_OF_SECOND("NanoOfSecond", NANOS, SECONDS, ValueRange.of(0, 999999999)),
/**
* The nano-of-day.
* <p>
* This counts the nanosecond within the day, from 0 to (24 * 60 * 60 * 1,000,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
*/
NANO_OF_DAY("NanoOfDay", NANOS, DAYS, ValueRange.of(0, 86400L * 1000000000L - 1)),
/**
* The micro-of-second.
* <p>
* This counts the microsecond within the second, from 0 to 999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000.
*/
MICRO_OF_SECOND("MicroOfSecond", MICROS, SECONDS, ValueRange.of(0, 999999)),
/**
* The micro-of-day.
* <p>
* This counts the microsecond within the day, from 0 to (24 * 60 * 60 * 1,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000.
*/
MICRO_OF_DAY("MicroOfDay", MICROS, DAYS, ValueRange.of(0, 86400L * 1000000L - 1)),
/**
* The milli-of-second.
* <p>
* This counts the millisecond within the second, from 0 to 999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000,000.
*/
MILLI_OF_SECOND("MilliOfSecond", MILLIS, SECONDS, ValueRange.of(0, 999)),
/**
* The milli-of-day.
* <p>
* This counts the millisecond within the day, from 0 to (24 * 60 * 60 * 1,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000,000.
*/
MILLI_OF_DAY("MilliOfDay", MILLIS, DAYS, ValueRange.of(0, 86400L * 1000L - 1)),
/**
* The second-of-minute.
* <p>
* This counts the second within the minute, from 0 to 59.
* This field has the same meaning for all calendar systems.
*/
SECOND_OF_MINUTE("SecondOfMinute", SECONDS, MINUTES, ValueRange.of(0, 59)),
/**
* The second-of-day.
* <p>
* This counts the second within the day, from 0 to (24 * 60 * 60) - 1.
* This field has the same meaning for all calendar systems.
*/
SECOND_OF_DAY("SecondOfDay", SECONDS, DAYS, ValueRange.of(0, 86400L - 1)),
/**
* The minute-of-hour.
* <p>
* This counts the minute within the hour, from 0 to 59.
* This field has the same meaning for all calendar systems.
*/
MINUTE_OF_HOUR("MinuteOfHour", MINUTES, HOURS, ValueRange.of(0, 59)),
/**
* The minute-of-day.
* <p>
* This counts the minute within the day, from 0 to (24 * 60) - 1.
* This field has the same meaning for all calendar systems.
*/
MINUTE_OF_DAY("MinuteOfDay", MINUTES, DAYS, ValueRange.of(0, (24 * 60) - 1)),
/**
* The hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 0 to 11.
* This is the hour that would be observed on a standard 12-hour digital clock.
* This field has the same meaning for all calendar systems.
*/
HOUR_OF_AMPM("HourOfAmPm", HOURS, HALF_DAYS, ValueRange.of(0, 11)),
/**
* The clock-hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 1 to 12.
* This is the hour that would be observed on a standard 12-hour analog wall clock.
* This field has the same meaning for all calendar systems.
*/
CLOCK_HOUR_OF_AMPM("ClockHourOfAmPm", HOURS, HALF_DAYS, ValueRange.of(1, 12)),
/**
* The hour-of-day.
* <p>
* This counts the hour within the day, from 0 to 23.
* This is the hour that would be observed on a standard 24-hour digital clock.
* This field has the same meaning for all calendar systems.
*/
HOUR_OF_DAY("HourOfDay", HOURS, DAYS, ValueRange.of(0, 23)),
/**
* The clock-hour-of-day.
* <p>
* This counts the hour within the AM/PM, from 1 to 24.
* This is the hour that would be observed on a 24-hour analog wall clock.
* This field has the same meaning for all calendar systems.
*/
CLOCK_HOUR_OF_DAY("ClockHourOfDay", HOURS, DAYS, ValueRange.of(1, 24)),
/**
* The am-pm-of-day.
* <p>
* This counts the AM/PM within the day, from 0 (AM) to 1 (PM).
* This field has the same meaning for all calendar systems.
*/
AMPM_OF_DAY("AmPmOfDay", HALF_DAYS, DAYS, ValueRange.of(0, 1)),
/**
* The day-of-week, such as Tuesday.
* <p>
* This represents the standard concept of the day of the week.
* In the default ISO calendar system, this has values from Monday (1) to Sunday (7).
* The {@link DayOfWeek} class can be used to interpret the result.
* <p>
* Most non-ISO calendar systems also define a seven day week that aligns with ISO.
* Those calendar systems must also use the same numbering system, from Monday (1) to
* Sunday (7), which allows {@code DayOfWeek} to be used.
* <p>
* Calendar systems that do not have a standard seven day week should implement this field
* if they have a similar concept of named or numbered days within a period similar
* to a week. It is recommended that the numbering starts from 1.
*/
DAY_OF_WEEK("DayOfWeek", DAYS, WEEKS, ValueRange.of(1, 7)),
/**
* The aligned day-of-week within a month.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_WEEK_OF_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-month 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-month 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_DAY_OF_WEEK_IN_MONTH("AlignedDayOfWeekInMonth", DAYS, WEEKS, ValueRange.of(1, 7)),
/**
* The aligned day-of-week within a year.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_WEEK_OF_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-year 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-year 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_DAY_OF_WEEK_IN_YEAR("AlignedDayOfWeekInYear", DAYS, WEEKS, ValueRange.of(1, 7)),
/**
* The day-of-month.
* <p>
* This represents the concept of the day within the month.
* In the default ISO calendar system, this has values from 1 to 31 in most months.
* April, June, September, November have days from 1 to 30, while February has days
* from 1 to 28, or 29 in a leap year.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-month values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the month.
*/
DAY_OF_MONTH("DayOfMonth", DAYS, MONTHS, ValueRange.of(1, 28, 31)),
/**
* The day-of-year.
* <p>
* This represents the concept of the day within the year.
* In the default ISO calendar system, this has values from 1 to 365 in standard
* years and 1 to 366 in leap years.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-year values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the year.
*/
DAY_OF_YEAR("DayOfYear", DAYS, YEARS, ValueRange.of(1, 365, 366)),
/**
* The epoch-day, based on the Java epoch of 1970-01-01 (ISO).
* <p>
* This field is the sequential count of days where 1970-01-01 (ISO) is zero.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
EPOCH_DAY("EpochDay", DAYS, FOREVER, ValueRange.of((long) (Year.MIN_VALUE * 365.25), (long) (Year.MAX_VALUE * 365.25))),
/**
* The aligned week within a month.
* <p>
* This represents concept of the count of weeks within the period of a month
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Thus, day-of-month values 1 to 7 are in aligned-week 1, while day-of-month values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_WEEK_OF_MONTH("AlignedWeekOfMonth", WEEKS, MONTHS, ValueRange.of(1, 4, 5)),
/**
* The aligned week within a year.
* <p>
* This represents concept of the count of weeks within the period of a year
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Thus, day-of-year values 1 to 7 are in aligned-week 1, while day-of-year values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_WEEK_OF_YEAR("AlignedWeekOfYear", WEEKS, YEARS, ValueRange.of(1, 53)),
/**
* The month-of-year, such as March.
* <p>
* This represents the concept of the month within the year.
* In the default ISO calendar system, this has values from January (1) to December (12).
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* month-of-year values for users of the calendar system.
* Normally, this is a count of months starting from 1.
*/
MONTH_OF_YEAR("MonthOfYear", MONTHS, YEARS, ValueRange.of(1, 12)),
/**
* The proleptic-month, which counts months sequentially from year 0.
* <p>
* The first month in year zero has the value zero.
* The value increase for later months and decrease for earlier ones.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* This field is defined to have the same meaning in all calendar systems.
* It is simply a count of months from whatever the calendar defines as year 0.
*/
PROLEPTIC_MONTH("ProlepticMonth", MONTHS, FOREVER, ValueRange.of(Year.MIN_VALUE * 12L, Year.MAX_VALUE * 12L + 11)),
/**
* The year within the era.
* <p>
* This represents the concept of the year within the era.
* This field is typically used with {@link #ERA}.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* <p>
* For example, subtracting a year each time yield the following:<br>
* - year-proleptic 2 = 'CE' year-of-era 2<br>
* - year-proleptic 1 = 'CE' year-of-era 1<br>
* - year-proleptic 0 = 'BCE' year-of-era 1<br>
* - year-proleptic -1 = 'BCE' year-of-era 2<br>
* <p>
* Note that the ISO-8601 standard does not actually define eras.
* Note also that the ISO eras do not align with the well-known AD/BC eras due to the
* change between the Julian and Gregorian calendar systems.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* year-of-era value for users of the calendar system.
* Since most calendar systems have only two eras, the year-of-era numbering approach
* will typically be the same as that used by the ISO calendar system.
* The year-of-era value should typically always be positive, however this is not required.
*/
YEAR_OF_ERA("YearOfEra", YEARS, FOREVER, ValueRange.of(1, Year.MAX_VALUE, Year.MAX_VALUE + 1)),
/**
* The proleptic year, such as 2012.
* <p>
* This represents the concept of the year, counting sequentially and using negative numbers.
* The proleptic year is not interpreted in terms of the era.
* See {@link #YEAR_OF_ERA} for an example showing the mapping from proleptic year to year-of-era.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR_OF_ERA} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* Non-ISO calendar systems should implement this field as follows.
* If the calendar system has only two eras, before and after a fixed date, then the
* proleptic-year value must be the same as the year-of-era value for the later era,
* and increasingly negative for the earlier era.
* If the calendar system has more than two eras, then the proleptic-year value may be
* defined with any appropriate value, although defining it to be the same as ISO may be
* the best option.
*/
YEAR("Year", YEARS, FOREVER, ValueRange.of(Year.MIN_VALUE, Year.MAX_VALUE)),
/**
* The era.
* <p>
* This represents the concept of the era, which is the largest division of the time-line.
* This field is typically used with {@link #YEAR_OF_ERA}.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* See {@link #YEAR_OF_ERA} for a full example.
* <p>
* Non-ISO calendar systems should implement this field to define eras.
* The value of the era that was active on 1970-01-01 (ISO) must be assigned the value 1.
* Earlier eras must have sequentially smaller values.
* Later eras must have sequentially larger values,
*/
ERA("Era", ERAS, FOREVER, ValueRange.of(0, 1)),
/**
* The instant epoch-seconds.
* <p>
* This represents the concept of the sequential count of seconds where
* 1970-01-01T00:00Z (ISO) is zero.
* This field may be used with {@link #NANO_OF_DAY} to represent the fraction of the day.
* <p>
* An {@link Instant} represents an instantaneous point on the time-line.
* On their own they have no elements which allow a local date-time to be obtained.
* Only when paired with an offset or time-zone can the local date or time be found.
* This field allows the seconds part of the instant to be queried.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
INSTANT_SECONDS("InstantSeconds", SECONDS, FOREVER, ValueRange.of(Long.MIN_VALUE, Long.MAX_VALUE)),
/**
* The offset from UTC/Greenwich.
* <p>
* This represents the concept of the offset in seconds of local time from UTC/Greenwich.
* <p>
* A {@link ZoneOffset} represents the period of time that local time differs from UTC/Greenwich.
* This is usually a fixed number of hours and minutes.
* It is equivalent to the {@link ZoneOffset#getTotalSeconds() total amount} of the offset in seconds.
* For example, during the winter Paris has an offset of {@code +01:00}, which is 3600 seconds.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
OFFSET_SECONDS("OffsetSeconds", SECONDS, FOREVER, ValueRange.of(-18 * 3600, 18 * 3600));
private final String name;
private final TemporalUnit baseUnit;
private final TemporalUnit rangeUnit;
private final ValueRange range;
private ChronoField(String name, TemporalUnit baseUnit, TemporalUnit rangeUnit, ValueRange range) {
this.name = name;
this.baseUnit = baseUnit;
this.rangeUnit = rangeUnit;
this.range = range;
}
//-----------------------------------------------------------------------
@Override
public TemporalUnit getBaseUnit() {
return baseUnit;
}
@Override
public TemporalUnit getRangeUnit() {
return rangeUnit;
}
/**
* Gets the range of valid values for the field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* <p>
* This method returns the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
*
* @return the range of valid values for the field, not null
*/
@Override
public ValueRange range() {
return range;
}
//-----------------------------------------------------------------------
/**
* Checks if this field represents a component of a date.
*
* @return true if it is a component of a date
*/
public boolean isDateBased() {
return ordinal() >= DAY_OF_WEEK.ordinal() && ordinal() <= ERA.ordinal();
}
/**
* Checks if this field represents a component of a time.
*
* @return true if it is a component of a time
*/
public boolean isTimeBased() {
return ordinal() < DAY_OF_WEEK.ordinal();
}
//-----------------------------------------------------------------------
/**
* Checks that the specified value is valid for this field.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
public long checkValidValue(long value) {
return range().checkValidValue(value, this);
}
/**
* Checks that the specified value is valid and fits in an {@code int}.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* It also checks that all valid values are within the bounds of an {@code int}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
public int checkValidIntValue(long value) {
return range().checkValidIntValue(value, this);
}
//-----------------------------------------------------------------------
@Override
public boolean isSupportedBy(TemporalAccessor temporal) {
return temporal.isSupported(this);
}
@Override
public ValueRange rangeRefinedBy(TemporalAccessor temporal) {
return temporal.range(this);
}
@Override
public long getFrom(TemporalAccessor temporal) {
return temporal.getLong(this);
}
@SuppressWarnings("unchecked")
@Override
public <R extends Temporal> R adjustInto(R temporal, long newValue) {
return (R) temporal.with(this, newValue);
}
@Override
public String getDisplayName(Locale locale) {
Jdk8Methods.requireNonNull(locale, "locale");
return toString();
}
//-----------------------------------------------------------------------
@Override
public TemporalAccessor resolve(Map<TemporalField, Long> fieldValues,
TemporalAccessor partialTemporal, ResolverStyle resolverStyle) {
return null; // resolve implemented in builder
}
//-----------------------------------------------------------------------
@Override
public String toString() {
return name;
}
}
| bsd-3-clause |
hispindia/dhis2-Core | dhis-2/dhis-support/dhis-support-system/src/main/java/org/hisp/dhis/system/help/HelpManager.java | 5134 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.system.help;
import static org.hisp.dhis.commons.util.StreamUtils.ENCODING_UTF8;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Locale;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.ClassPathResource;
/**
* @author Lars Helge Overland
*/
@Slf4j
public class HelpManager
{
// -------------------------------------------------------------------------
// HelpManager implementation
// -------------------------------------------------------------------------
public static void getHelpContent( OutputStream out, String id, Locale locale )
{
try
{
ClassPathResource classPathResource = resolveHelpFileResource( locale );
Source source = new StreamSource( classPathResource.getInputStream(), ENCODING_UTF8 );
Result result = new StreamResult( out );
Transformer transformer = getTransformer( "help_stylesheet.xsl" );
transformer.setParameter( "sectionId", id );
transformer.transform( source, result );
}
catch ( Exception ex )
{
throw new RuntimeException( "Failed to get help content", ex );
}
}
public static void getHelpItems( OutputStream out, Locale locale )
{
try
{
ClassPathResource classPathResource = resolveHelpFileResource( locale );
Source source = new StreamSource( classPathResource.getInputStream(), ENCODING_UTF8 );
Result result = new StreamResult( out );
getTransformer( "helpitems_stylesheet.xsl" ).transform( source, result );
}
catch ( Exception ex )
{
throw new RuntimeException( "Failed to get help content", ex );
}
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private static Transformer getTransformer( String stylesheetName )
throws IOException,
TransformerConfigurationException
{
Source stylesheet = new StreamSource( new ClassPathResource( stylesheetName ).getInputStream(), ENCODING_UTF8 );
return TransformerFactory.newInstance().newTransformer( stylesheet );
}
private static ClassPathResource resolveHelpFileResource( Locale locale )
{
String helpFile;
ClassPathResource classPathResource;
if ( locale != null && locale.getDisplayLanguage() != null )
{
helpFile = "help_content_" + locale.getLanguage() + "_" + locale.getCountry() + ".xml";
log.debug( "Help file: " + helpFile );
}
else
{
helpFile = "help_content.xml";
log.debug( "Help file: " + helpFile );
}
classPathResource = new ClassPathResource( helpFile );
if ( !classPathResource.exists() )
{
log.warn( "Help file: " + helpFile + " not available on classpath, falling back to defaul" );
helpFile = "help_content.xml";
classPathResource = new ClassPathResource( helpFile );
}
return classPathResource;
}
}
| bsd-3-clause |
wjkohnen/antlr4 | tool/src/org/antlr/v4/codegen/model/ElementFrequenciesVisitor.java | 8306 | /*
* Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.codegen.model;
import org.antlr.runtime.tree.TreeNodeStream;
import org.antlr.v4.misc.FrequencySet;
import org.antlr.v4.misc.MutableInt;
import org.antlr.v4.parse.GrammarTreeVisitor;
import org.antlr.v4.tool.ErrorManager;
import org.antlr.v4.tool.ast.ActionAST;
import org.antlr.v4.tool.ast.AltAST;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.TerminalAST;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Map;
public class ElementFrequenciesVisitor extends GrammarTreeVisitor {
/**
* This special value means "no set", and is used by {@link #minFrequencies}
* to ensure that {@link #combineMin} doesn't merge an empty set (all zeros)
* with the results of the first alternative.
*/
private static final FrequencySet<String> SENTINEL = new FrequencySet<String>();
final Deque<FrequencySet<String>> frequencies;
private final Deque<FrequencySet<String>> minFrequencies;
public ElementFrequenciesVisitor(TreeNodeStream input) {
super(input);
frequencies = new ArrayDeque<FrequencySet<String>>();
frequencies.push(new FrequencySet<String>());
minFrequencies = new ArrayDeque<FrequencySet<String>>();
minFrequencies.push(SENTINEL);
}
FrequencySet<String> getMinFrequencies() {
assert minFrequencies.size() == 1;
assert minFrequencies.peek() != SENTINEL;
assert SENTINEL.isEmpty();
return minFrequencies.peek();
}
/** During code gen, we can assume tree is in good shape */
@Override
public ErrorManager getErrorManager() { return super.getErrorManager(); }
/*
* Common
*/
/**
* Generate a frequency set as the union of two input sets. If an
* element is contained in both sets, the value for the output will be
* the maximum of the two input values.
*
* @param a The first set.
* @param b The second set.
* @return The union of the two sets, with the maximum value chosen
* whenever both sets contain the same key.
*/
protected static FrequencySet<String> combineMax(FrequencySet<String> a, FrequencySet<String> b) {
FrequencySet<String> result = combineAndClip(a, b, 1);
for (Map.Entry<String, MutableInt> entry : a.entrySet()) {
result.get(entry.getKey()).v = entry.getValue().v;
}
for (Map.Entry<String, MutableInt> entry : b.entrySet()) {
MutableInt slot = result.get(entry.getKey());
slot.v = Math.max(slot.v, entry.getValue().v);
}
return result;
}
/**
* Generate a frequency set as the union of two input sets. If an
* element is contained in both sets, the value for the output will be
* the minimum of the two input values.
*
* @param a The first set.
* @param b The second set. If this set is {@link #SENTINEL}, it is treated
* as though no second set were provided.
* @return The union of the two sets, with the minimum value chosen
* whenever both sets contain the same key.
*/
protected static FrequencySet<String> combineMin(FrequencySet<String> a, FrequencySet<String> b) {
if (b == SENTINEL) {
return a;
}
assert a != SENTINEL;
FrequencySet<String> result = combineAndClip(a, b, Integer.MAX_VALUE);
for (Map.Entry<String, MutableInt> entry : result.entrySet()) {
entry.getValue().v = Math.min(a.count(entry.getKey()), b.count(entry.getKey()));
}
return result;
}
/**
* Generate a frequency set as the union of two input sets, with the
* values clipped to a specified maximum value. If an element is
* contained in both sets, the value for the output, prior to clipping,
* will be the sum of the two input values.
*
* @param a The first set.
* @param b The second set.
* @param clip The maximum value to allow for any output.
* @return The sum of the two sets, with the individual elements clipped
* to the maximum value given by {@code clip}.
*/
protected static FrequencySet<String> combineAndClip(FrequencySet<String> a, FrequencySet<String> b, int clip) {
FrequencySet<String> result = new FrequencySet<String>();
for (Map.Entry<String, MutableInt> entry : a.entrySet()) {
for (int i = 0; i < entry.getValue().v; i++) {
result.add(entry.getKey());
}
}
for (Map.Entry<String, MutableInt> entry : b.entrySet()) {
for (int i = 0; i < entry.getValue().v; i++) {
result.add(entry.getKey());
}
}
for (Map.Entry<String, MutableInt> entry : result.entrySet()) {
entry.getValue().v = Math.min(entry.getValue().v, clip);
}
return result;
}
@Override
public void tokenRef(TerminalAST ref) {
frequencies.peek().add(ref.getText());
minFrequencies.peek().add(ref.getText());
}
@Override
public void ruleRef(GrammarAST ref, ActionAST arg) {
frequencies.peek().add(ref.getText());
minFrequencies.peek().add(ref.getText());
}
/*
* Parser rules
*/
@Override
protected void enterAlternative(AltAST tree) {
frequencies.push(new FrequencySet<String>());
minFrequencies.push(new FrequencySet<String>());
}
@Override
protected void exitAlternative(AltAST tree) {
frequencies.push(combineMax(frequencies.pop(), frequencies.pop()));
minFrequencies.push(combineMin(minFrequencies.pop(), minFrequencies.pop()));
}
@Override
protected void enterElement(GrammarAST tree) {
frequencies.push(new FrequencySet<String>());
minFrequencies.push(new FrequencySet<String>());
}
@Override
protected void exitElement(GrammarAST tree) {
frequencies.push(combineAndClip(frequencies.pop(), frequencies.pop(), 2));
minFrequencies.push(combineAndClip(minFrequencies.pop(), minFrequencies.pop(), 2));
}
@Override
protected void enterBlockSet(GrammarAST tree) {
frequencies.push(new FrequencySet<String>());
minFrequencies.push(new FrequencySet<String>());
}
@Override
protected void exitBlockSet(GrammarAST tree) {
for (Map.Entry<String, MutableInt> entry : frequencies.peek().entrySet()) {
// This visitor counts a block set as a sequence of elements, not a
// sequence of alternatives of elements. Reset the count back to 1
// for all items when leaving the set to ensure duplicate entries in
// the set are treated as a maximum of one item.
entry.getValue().v = 1;
}
if (minFrequencies.peek().size() > 1) {
// Everything is optional
minFrequencies.peek().clear();
}
frequencies.push(combineAndClip(frequencies.pop(), frequencies.pop(), 2));
minFrequencies.push(combineAndClip(minFrequencies.pop(), minFrequencies.pop(), 2));
}
@Override
protected void exitSubrule(GrammarAST tree) {
if (tree.getType() == CLOSURE || tree.getType() == POSITIVE_CLOSURE) {
for (Map.Entry<String, MutableInt> entry : frequencies.peek().entrySet()) {
entry.getValue().v = 2;
}
}
if (tree.getType() == CLOSURE || tree.getType() == OPTIONAL) {
// Everything inside a closure is optional, so the minimum
// number of occurrences for all elements is 0.
minFrequencies.peek().clear();
}
}
/*
* Lexer rules
*/
@Override
protected void enterLexerAlternative(GrammarAST tree) {
frequencies.push(new FrequencySet<String>());
minFrequencies.push(new FrequencySet<String>());
}
@Override
protected void exitLexerAlternative(GrammarAST tree) {
frequencies.push(combineMax(frequencies.pop(), frequencies.pop()));
minFrequencies.push(combineMin(minFrequencies.pop(), minFrequencies.pop()));
}
@Override
protected void enterLexerElement(GrammarAST tree) {
frequencies.push(new FrequencySet<String>());
minFrequencies.push(new FrequencySet<String>());
}
@Override
protected void exitLexerElement(GrammarAST tree) {
frequencies.push(combineAndClip(frequencies.pop(), frequencies.pop(), 2));
minFrequencies.push(combineAndClip(minFrequencies.pop(), minFrequencies.pop(), 2));
}
@Override
protected void exitLexerSubrule(GrammarAST tree) {
if (tree.getType() == CLOSURE || tree.getType() == POSITIVE_CLOSURE) {
for (Map.Entry<String, MutableInt> entry : frequencies.peek().entrySet()) {
entry.getValue().v = 2;
}
}
if (tree.getType() == CLOSURE) {
// Everything inside a closure is optional, so the minimum
// number of occurrences for all elements is 0.
minFrequencies.peek().clear();
}
}
}
| bsd-3-clause |
dhis2/dhis2-core | dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/events/event/csv/DefaultCsvEventService.java | 7551 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dxf2.events.event.csv;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.dxf2.events.event.DataValue;
import org.hisp.dhis.dxf2.events.event.Event;
import org.hisp.dhis.event.EventStatus;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvParser;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import com.google.common.collect.Lists;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Service( "org.hisp.dhis.dxf2.events.event.csv.CsvEventService" )
public class DefaultCsvEventService
implements CsvEventService<Event>
{
private static final CsvMapper CSV_MAPPER = new CsvMapper().enable( CsvParser.Feature.WRAP_AS_ARRAY );
private static final CsvSchema CSV_SCHEMA = CSV_MAPPER.schemaFor( CsvEventDataValue.class )
.withLineSeparator( "\n" );
@Override
public void writeEvents( OutputStream outputStream, List<Event> events, boolean withHeader )
throws IOException
{
ObjectWriter writer = CSV_MAPPER.writer( CSV_SCHEMA.withUseHeader( withHeader ) );
List<CsvEventDataValue> dataValues = new ArrayList<>();
for ( Event event : events )
{
CsvEventDataValue templateDataValue = new CsvEventDataValue();
templateDataValue.setEvent( event.getEvent() );
templateDataValue.setStatus( event.getStatus() != null ? event.getStatus().name() : null );
templateDataValue.setProgram( event.getProgram() );
templateDataValue.setProgramStage( event.getProgramStage() );
templateDataValue.setEnrollment( event.getEnrollment() );
templateDataValue.setOrgUnit( event.getOrgUnit() );
templateDataValue.setEventDate( event.getEventDate() );
templateDataValue.setDueDate( event.getDueDate() );
templateDataValue.setStoredBy( event.getStoredBy() );
templateDataValue.setCompletedDate( event.getCompletedDate() );
templateDataValue.setCompletedBy( event.getCompletedBy() );
if ( event.getGeometry() != null )
{
templateDataValue.setGeometry( event.getGeometry().toText() );
if ( event.getGeometry().getGeometryType().equals( "Point" ) )
{
templateDataValue.setLongitude( event.getGeometry().getCoordinate().x );
templateDataValue.setLatitude( event.getGeometry().getCoordinate().y );
}
}
for ( DataValue value : event.getDataValues() )
{
CsvEventDataValue dataValue = new CsvEventDataValue( templateDataValue );
dataValue.setDataElement( value.getDataElement() );
dataValue.setValue( value.getValue() );
dataValue.setProvidedElsewhere( value.getProvidedElsewhere() );
if ( value.getStoredBy() != null )
{
dataValue.setStoredBy( value.getStoredBy() );
}
dataValues.add( dataValue );
}
}
writer.writeValue( outputStream, dataValues );
}
@Override
public List<Event> readEvents( InputStream inputStream, boolean skipFirst )
throws IOException,
ParseException
{
List<Event> events = Lists.newArrayList();
ObjectReader reader = CSV_MAPPER.readerFor( CsvEventDataValue.class )
.with( CSV_SCHEMA.withSkipFirstDataRow( skipFirst ) );
MappingIterator<CsvEventDataValue> iterator = reader.readValues( inputStream );
Event event = new Event();
event.setEvent( "not_valid" );
while ( iterator.hasNext() )
{
CsvEventDataValue dataValue = iterator.next();
if ( !event.getEvent().equals( dataValue.getEvent() ) )
{
event = new Event();
event.setEvent( dataValue.getEvent() );
event.setStatus( StringUtils.isEmpty( dataValue.getStatus() )
? EventStatus.ACTIVE
: Enum.valueOf( EventStatus.class, dataValue.getStatus() ) );
event.setProgram( dataValue.getProgram() );
event.setProgramStage( dataValue.getProgramStage() );
event.setEnrollment( dataValue.getEnrollment() );
event.setOrgUnit( dataValue.getOrgUnit() );
event.setEventDate( dataValue.getEventDate() );
event.setDueDate( dataValue.getDueDate() );
event.setCompletedDate( dataValue.getCompletedDate() );
event.setCompletedBy( dataValue.getCompletedBy() );
if ( dataValue.getGeometry() != null )
{
event.setGeometry( new WKTReader().read( dataValue.getGeometry() ) );
}
else if ( dataValue.getLongitude() != null && dataValue.getLatitude() != null )
{
event.setGeometry( new WKTReader()
.read( "Point(" + dataValue.getLongitude() + " " + dataValue.getLatitude() + ")" ) );
}
events.add( event );
}
DataValue value = new DataValue( dataValue.getDataElement(), dataValue.getValue() );
value.setStoredBy( dataValue.getStoredBy() );
value.setProvidedElsewhere( dataValue.getProvidedElsewhere() );
event.getDataValues().add( value );
}
return events;
}
}
| bsd-3-clause |
navalev/azure-sdk-for-java | sdk/cognitiveservices/ms-azure-cs-newssearch/src/main/java/com/microsoft/azure/cognitiveservices/search/newssearch/models/MediaObject.java | 1987 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.search.newssearch.models;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.annotation.JsonSubTypes;
/**
* Defines a media object.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "_type", defaultImpl = MediaObject.class)
@JsonTypeName("MediaObject")
@JsonSubTypes({
@JsonSubTypes.Type(name = "ImageObject", value = ImageObject.class),
@JsonSubTypes.Type(name = "VideoObject", value = VideoObject.class)
})
public class MediaObject extends CreativeWork {
/**
* Original URL to retrieve the source (file) for the media object (e.g the
* source URL for the image).
*/
@JsonProperty(value = "contentUrl", access = JsonProperty.Access.WRITE_ONLY)
private String contentUrl;
/**
* The width of the source media object, in pixels.
*/
@JsonProperty(value = "width", access = JsonProperty.Access.WRITE_ONLY)
private Integer width;
/**
* The height of the source media object, in pixels.
*/
@JsonProperty(value = "height", access = JsonProperty.Access.WRITE_ONLY)
private Integer height;
/**
* Get the contentUrl value.
*
* @return the contentUrl value
*/
public String contentUrl() {
return this.contentUrl;
}
/**
* Get the width value.
*
* @return the width value
*/
public Integer width() {
return this.width;
}
/**
* Get the height value.
*
* @return the height value
*/
public Integer height() {
return this.height;
}
}
| mit |
Peter-Maximilian/settlers-remake | jsettlers.mapcreator/src/main/java/jsettlers/mapcreator/tools/shapes/EShapeType.java | 2524 | /*******************************************************************************
* Copyright (c) 2015 - 2016
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.mapcreator.tools.shapes;
/**
* Supported shape types
*
* @author Andreas Butti
*
*/
public enum EShapeType {
/**
* Draw a single point
*/
POINT(new PointShape(), ShapeIcon.POINT),
/**
* Line without breaks, even if you move the mouse faster
*/
LINE(new LineShape(), ShapeIcon.LINE),
/**
* TODO A good description
*/
LINE_CIRCLE(new LineCircleShape(), ShapeIcon.LINE_CIRCLE),
/**
* TODO A good description
*/
GRID_CIRCLE(new GridCircleShape(), ShapeIcon.GRID_CIRCLE),
/**
* TODO A good description
*/
FUZZY_LINE_CIRCLE(new FuzzyLineCircleShape(), ShapeIcon.FUZZY_LINE_CIRCLE),
/**
* Noisy line without breaks, even if you move the mouse faster
*/
NOISY_LINE_CIRCLE(new NoisyLineCircleShape(), ShapeIcon.NOISY_LINE_CIRCLE);
/**
* The shape corresponding to this enum value
*/
private final ShapeType shape;
/**
* Shape icon
*/
private final ShapeIcon icon;
/**
* Constructor
*
* @param shape
*/
EShapeType(ShapeType shape, ShapeIcon icon) {
this.shape = shape;
this.icon = icon;
}
/**
* @return The shape corresponding to this enum value
*/
public ShapeType getShape() {
return shape;
}
/**
* @return Shape icon
*/
public ShapeIcon getIcon() {
return icon;
}
}
| mit |
rango1900/MindTrails | templeton/src/main/java/edu/virginia/psyc/templeton/controller/GraphController.java | 4387 | package edu.virginia.psyc.templeton.controller;
import edu.virginia.psyc.templeton.domain.TempletonStudy;
import edu.virginia.psyc.templeton.persistence.ExpectancyBias;
import edu.virginia.psyc.templeton.persistence.ExpectancyBiasRepository;
import org.apache.commons.math3.stat.regression.SimpleRegression;
import org.mindtrails.controller.BaseController;
import org.mindtrails.domain.Participant;
import org.mindtrails.domain.Session;
import org.mindtrails.service.EmailService;
import org.mindtrails.service.ParticipantService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Provides a graph of the the Expectancy Bias results from a participant over time.
*/
@Controller
@RequestMapping("/graph")
public class GraphController extends BaseController {
private static final Logger LOG = LoggerFactory.getLogger(GraphController.class);
@Autowired private ExpectancyBiasRepository biasRepository;
@Autowired private ParticipantService participantService;
@Autowired private EmailService emailService;
@RequestMapping
public String graph(ModelMap model, Principal principal) {
Participant p = participantService.findByEmail(principal.getName());
List<ExpectancyBias> list = biasRepository.findByParticipant(p);
List<List<Object>> points = new ArrayList();
List<List<Object>> regressionPoints = new ArrayList();
TempletonStudy study = (TempletonStudy)p.getStudy();
Collections.sort(list);
SimpleRegression regression;
ExpectancyBias original = list.get(0);
ExpectancyBias last = list.get(list.size() - 1);
regression = new SimpleRegression();
double counter = 0;
for(ExpectancyBias eb : list) {
// don't include the post assessment when calculating the regression.
if (!eb.getSession().startsWith("POST")) {
regression.addData(counter, eb.score());
counter++;
}
}
// Create plot points
List<Object> point;
for(ExpectancyBias eb : list) {
point = new ArrayList<>();
Session session = study.getSession(eb.getSession());
if(session == null)
point.add("");
else
point.add(session.getDisplayName());
point.add(eb.score());
points.add(point);
if(eb.equals(original)) {
ArrayList<Object> rPoint = new ArrayList<>(point);
rPoint.set(1, regression.getIntercept());
regressionPoints.add(rPoint);
}
if(eb.equals(last)) {
ArrayList<Object> rPoint = new ArrayList<>(point);
rPoint.set(1, regression.predict(list.size()));
regressionPoints.add(rPoint);
}
}
int improvement = new Double((regression.getIntercept() - regression.predict(list.size()))/regression.getIntercept() * 100).intValue();
String status = "";
if(Math.abs(improvement) < 15) status = "same";
else if (improvement > 30) status = "lot";
else if (improvement > 15) status = "little";
else if (improvement < -15) status = "worse";
model.addAttribute("points", points);
model.addAttribute("regressionPoints", regressionPoints);
model.addAttribute("improvement", improvement);
model.addAttribute("status", status);
model.addAttribute("participant", p);
return "graph";
}
@RequestMapping("/requestTraining")
public String requestTraining(ModelMap model, Principal principal) {
Participant p = participantService.findByEmail(principal.getName());
String message = "Participant #" + p.getId() + " completed the study, and would like to restart in a non-control group.";
this.emailService.sendAdminEmail("Training Request", message);
model.addAttribute("RequestSent", true);
return graph(model, principal);
}
}
| mit |
anudeepsharma/azure-sdk-for-java | azure-batch/src/main/java/com/microsoft/azure/batch/protocol/models/PoolGetOptions.java | 7771 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.batch.protocol.models;
import com.microsoft.rest.DateTimeRfc1123;
import org.joda.time.DateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Additional parameters for the Pool_Get operation.
*/
public class PoolGetOptions {
/**
* An OData $select clause.
*/
@JsonProperty(value = "")
private String select;
/**
* An OData $expand clause.
*/
@JsonProperty(value = "")
private String expand;
/**
* The maximum time that the server can spend processing the request, in
* seconds. The default is 30 seconds.
*/
@JsonProperty(value = "")
private Integer timeout;
/**
* The caller-generated request identity, in the form of a GUID with no
* decoration such as curly braces, e.g.
* 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*/
@JsonProperty(value = "")
private String clientRequestId;
/**
* Whether the server should return the client-request-id identifier in
* the response.
*/
@JsonProperty(value = "")
private Boolean returnClientRequestId;
/**
* The time the request was issued. If not specified, this header will be
* automatically populated with the current system clock time.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ocpDate;
/**
* An ETag is specified. Specify this header to perform the operation only
* if the resource's ETag is an exact match as specified.
*/
@JsonProperty(value = "")
private String ifMatch;
/**
* An ETag is specified. Specify this header to perform the operation only
* if the resource's ETag does not match the specified ETag.
*/
@JsonProperty(value = "")
private String ifNoneMatch;
/**
* Specify this header to perform the operation only if the resource has
* been modified since the specified date/time.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ifModifiedSince;
/**
* Specify this header to perform the operation only if the resource has
* not been modified since the specified date/time.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ifUnmodifiedSince;
/**
* Get the select value.
*
* @return the select value
*/
public String select() {
return this.select;
}
/**
* Set the select value.
*
* @param select the select value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withSelect(String select) {
this.select = select;
return this;
}
/**
* Get the expand value.
*
* @return the expand value
*/
public String expand() {
return this.expand;
}
/**
* Set the expand value.
*
* @param expand the expand value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withExpand(String expand) {
this.expand = expand;
return this;
}
/**
* Get the timeout value.
*
* @return the timeout value
*/
public Integer timeout() {
return this.timeout;
}
/**
* Set the timeout value.
*
* @param timeout the timeout value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withTimeout(Integer timeout) {
this.timeout = timeout;
return this;
}
/**
* Get the clientRequestId value.
*
* @return the clientRequestId value
*/
public String clientRequestId() {
return this.clientRequestId;
}
/**
* Set the clientRequestId value.
*
* @param clientRequestId the clientRequestId value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withClientRequestId(String clientRequestId) {
this.clientRequestId = clientRequestId;
return this;
}
/**
* Get the returnClientRequestId value.
*
* @return the returnClientRequestId value
*/
public Boolean returnClientRequestId() {
return this.returnClientRequestId;
}
/**
* Set the returnClientRequestId value.
*
* @param returnClientRequestId the returnClientRequestId value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withReturnClientRequestId(Boolean returnClientRequestId) {
this.returnClientRequestId = returnClientRequestId;
return this;
}
/**
* Get the ocpDate value.
*
* @return the ocpDate value
*/
public DateTime ocpDate() {
if (this.ocpDate == null) {
return null;
}
return this.ocpDate.getDateTime();
}
/**
* Set the ocpDate value.
*
* @param ocpDate the ocpDate value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withOcpDate(DateTime ocpDate) {
if (ocpDate == null) {
this.ocpDate = null;
} else {
this.ocpDate = new DateTimeRfc1123(ocpDate);
}
return this;
}
/**
* Get the ifMatch value.
*
* @return the ifMatch value
*/
public String ifMatch() {
return this.ifMatch;
}
/**
* Set the ifMatch value.
*
* @param ifMatch the ifMatch value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withIfMatch(String ifMatch) {
this.ifMatch = ifMatch;
return this;
}
/**
* Get the ifNoneMatch value.
*
* @return the ifNoneMatch value
*/
public String ifNoneMatch() {
return this.ifNoneMatch;
}
/**
* Set the ifNoneMatch value.
*
* @param ifNoneMatch the ifNoneMatch value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withIfNoneMatch(String ifNoneMatch) {
this.ifNoneMatch = ifNoneMatch;
return this;
}
/**
* Get the ifModifiedSince value.
*
* @return the ifModifiedSince value
*/
public DateTime ifModifiedSince() {
if (this.ifModifiedSince == null) {
return null;
}
return this.ifModifiedSince.getDateTime();
}
/**
* Set the ifModifiedSince value.
*
* @param ifModifiedSince the ifModifiedSince value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withIfModifiedSince(DateTime ifModifiedSince) {
if (ifModifiedSince == null) {
this.ifModifiedSince = null;
} else {
this.ifModifiedSince = new DateTimeRfc1123(ifModifiedSince);
}
return this;
}
/**
* Get the ifUnmodifiedSince value.
*
* @return the ifUnmodifiedSince value
*/
public DateTime ifUnmodifiedSince() {
if (this.ifUnmodifiedSince == null) {
return null;
}
return this.ifUnmodifiedSince.getDateTime();
}
/**
* Set the ifUnmodifiedSince value.
*
* @param ifUnmodifiedSince the ifUnmodifiedSince value to set
* @return the PoolGetOptions object itself.
*/
public PoolGetOptions withIfUnmodifiedSince(DateTime ifUnmodifiedSince) {
if (ifUnmodifiedSince == null) {
this.ifUnmodifiedSince = null;
} else {
this.ifUnmodifiedSince = new DateTimeRfc1123(ifUnmodifiedSince);
}
return this;
}
}
| mit |
rafaneri/upm | examples/java/MMA7660Sample.java | 2264 | /*
* Author: Stefan Andritoiu <stefan.andritoiu@intel.com>
* Copyright (c) 2015 Intel Corporation.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//NOT TESTED!!!
public class MMA7660Sample {
static {
try {
System.loadLibrary("javaupm_mma7660");
} catch (UnsatisfiedLinkError e) {
System.err.println("error in loading native library");
System.exit(-1);
}
}
public static void main(String[] args) throws InterruptedException {
// ! [Interesting]
// Instantiate an MMA7660 on I2C bus 0
upm_mma7660.MMA7660 accel = new upm_mma7660.MMA7660(0);
// place device in standby mode so we can write registers
accel.setModeStandby();
// enable 64 samples per second
accel.setSampleRate(upm_mma7660.MMA7660.MMA7660_AUTOSLEEP_T.AUTOSLEEP_64);
// place device into active mode
accel.setModeActive();
while (true) {
int[] rawValues = accel.getRawValues();
System.out.println("Raw Values: x = " + rawValues[0] + " y = " + rawValues[1] + " x = "
+ rawValues[2]);
float[] acceleration = accel.getAcceleration();
System.out.println("Raw Values: x = " + acceleration[0] + " y = " + acceleration[1]
+ " x = " + acceleration[2]);
Thread.sleep(1000);
}
// ! [Interesting]
}
} | mit |
rherlt/HWR-Berlin-OOP2-2016 | src/eclipse_workspace/09_HelloBeerApi/HttpclientLib/httpcomponents-client-4.5.2/examples/org/apache/http/examples/client/ClientPreemptiveBasicAuthentication.java | 4095 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.examples.client;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.AuthCache;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
/**
* An example of HttpClient can be customized to authenticate
* preemptively using BASIC scheme.
* <b>
* Generally, preemptive authentication can be considered less
* secure than a response to an authentication challenge
* and therefore discouraged.
*/
public class ClientPreemptiveBasicAuthentication {
public static void main(String[] args) throws Exception {
HttpHost target = new HttpHost("httpbin.org", 80, "http");
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(
new AuthScope(target.getHostName(), target.getPort()),
new UsernamePasswordCredentials("user", "passwd"));
CloseableHttpClient httpclient = HttpClients.custom()
.setDefaultCredentialsProvider(credsProvider).build();
try {
// Create AuthCache instance
AuthCache authCache = new BasicAuthCache();
// Generate BASIC scheme object and add it to the local
// auth cache
BasicScheme basicAuth = new BasicScheme();
authCache.put(target, basicAuth);
// Add AuthCache to the execution context
HttpClientContext localContext = HttpClientContext.create();
localContext.setAuthCache(authCache);
HttpGet httpget = new HttpGet("http://httpbin.org/hidden-basic-auth/user/passwd");
System.out.println("Executing request " + httpget.getRequestLine() + " to target " + target);
for (int i = 0; i < 3; i++) {
CloseableHttpResponse response = httpclient.execute(target, httpget, localContext);
try {
System.out.println("----------------------------------------");
System.out.println(response.getStatusLine());
System.out.println(EntityUtils.toString(response.getEntity()));
} finally {
response.close();
}
}
} finally {
httpclient.close();
}
}
}
| mit |
riuvshin/che-plugins | plugin-angularjs/core/client/src/main/java/org/eclipse/che/plugin/angularjs/core/client/javascript/contentassist/JavaScriptContentAssistProvider.java | 1209 | /*******************************************************************************
* Copyright (c) 2014 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.plugin.angularjs.core.client.javascript.contentassist;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
/**
* @author Florent Benoit
*/
public final class JavaScriptContentAssistProvider extends JavaScriptObject implements IContentAssistProvider {
protected JavaScriptContentAssistProvider() {
}
public native JsArray<JsProposal> computeProposals(String buffer, int offset, IContext context)/*-{
return this['computeProposals'](buffer, offset, context);
}-*/;
public native JsProgram parse(String content)/*-{
return $wnd.esprima.parse(content, {tolerant: true});
}-*/;
}
| epl-1.0 |
simleo/openmicroscopy | components/blitz/src/omero/gateway/util/PojoMapper.java | 29487 | /*
*------------------------------------------------------------------------------
* Copyright (C) 2006-2017 University of Dundee. All rights reserved.
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*------------------------------------------------------------------------------
*/
package omero.gateway.util;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.Map.Entry;
import omero.RString;
import omero.model.Annotation;
import omero.model.BooleanAnnotation;
import omero.model.BooleanAnnotationI;
import omero.model.CommentAnnotation;
import omero.model.CommentAnnotationI;
import omero.model.Dataset;
import omero.model.DatasetI;
import omero.model.DoubleAnnotation;
import omero.model.Ellipse;
import omero.model.Experimenter;
import omero.model.ExperimenterGroup;
import omero.model.FileAnnotation;
import omero.model.FileAnnotationI;
import omero.model.Fileset;
import omero.model.Folder;
import omero.model.IObject;
import omero.model.Image;
import omero.model.ImageI;
import omero.model.Label;
import omero.model.Line;
import omero.model.LongAnnotation;
import omero.model.MapAnnotation;
import omero.model.MapAnnotationI;
import omero.model.Mask;
import omero.model.OriginalFile;
import omero.model.Pixels;
import omero.model.Plate;
import omero.model.PlateAcquisition;
import omero.model.PlateAcquisitionI;
import omero.model.PlateI;
import omero.model.Point;
import omero.model.Polygon;
import omero.model.Polyline;
import omero.model.Project;
import omero.model.ProjectI;
import omero.model.Rectangle;
import omero.model.Roi;
import omero.model.Screen;
import omero.model.ScreenI;
import omero.model.TagAnnotation;
import omero.model.TagAnnotationI;
import omero.model.TermAnnotation;
import omero.model.TermAnnotationI;
import omero.model.TimestampAnnotation;
import omero.model.TimestampAnnotationI;
import omero.model.Well;
import omero.model.WellI;
import omero.model.WellSample;
import omero.model.XmlAnnotation;
import omero.gateway.model.AnnotationData;
import omero.gateway.model.BooleanAnnotationData;
import omero.gateway.model.DataObject;
import omero.gateway.model.DatasetData;
import omero.gateway.model.DoubleAnnotationData;
import omero.gateway.model.EllipseData;
import omero.gateway.model.ExperimenterData;
import omero.gateway.model.FileAnnotationData;
import omero.gateway.model.FileData;
import omero.gateway.model.FilesetData;
import omero.gateway.model.FolderData;
import omero.gateway.model.GroupData;
import omero.gateway.model.ImageData;
import omero.gateway.model.LineData;
import omero.gateway.model.LongAnnotationData;
import omero.gateway.model.MapAnnotationData;
import omero.gateway.model.MaskData;
import omero.gateway.model.PixelsData;
import omero.gateway.model.PlateAcquisitionData;
import omero.gateway.model.PlateData;
import omero.gateway.model.PointData;
import omero.gateway.model.PolygonData;
import omero.gateway.model.PolylineData;
import omero.gateway.model.ProjectData;
import omero.gateway.model.ROIData;
import omero.gateway.model.RatingAnnotationData;
import omero.gateway.model.RectangleData;
import omero.gateway.model.ScreenData;
import omero.gateway.model.TagAnnotationData;
import omero.gateway.model.TermAnnotationData;
import omero.gateway.model.TextData;
import omero.gateway.model.TextualAnnotationData;
import omero.gateway.model.TimeAnnotationData;
import omero.gateway.model.WellData;
import omero.gateway.model.WellSampleData;
import omero.gateway.model.XMLAnnotationData;
/**
* Helper methods to convert {@link IObject}s into their corresponding
* {@link DataObject}s.
*
* @author Jean-Marie Burel
* <a href="mailto:j.burel@dundee.ac.uk">j.burel@dundee.ac.uk</a>
* @version 2.2
* @since OME2.2
*/
public class PojoMapper
{
/**
* Helper method to convert the specified object into its corresponding
* {@link DataObject} or collection of {@link DataObject}s.
*
* @param value The object to convert.
* @return See above.
*/
private static Object convert(Object value)
{
if (value instanceof IObject) return asDataObject((IObject) value);
else if (value instanceof Collection)
return convertToDataObjects((Collection) value);
else if (value instanceof Map) return asDataObjects((Map) value);
else return null;
}
/**
* Converts the specified {@link IObject} into its corresponding
* {@link DataObject}.
*
* @param object The object to convert.
* @return See above.
* @throws IllegalArgumentException If the object is null or
* if the type {@link IObject} is unknown.
*/
public static DataObject asDataObject(IObject object)
{
if (object == null)
throw new IllegalArgumentException("IObject cannot be null.");
if (object instanceof Project)
return new ProjectData((Project) object);
else if (object instanceof Dataset)
return new DatasetData((Dataset) object);
else if (object instanceof Folder)
return new FolderData((Folder) object);
else if (object instanceof Image)
return new ImageData((Image) object);
else if (object instanceof TermAnnotation)
return new TermAnnotationData((TermAnnotation) object);
else if (object instanceof TagAnnotation)
return new TagAnnotationData((TagAnnotation) object);
else if (object instanceof CommentAnnotation)
return new TextualAnnotationData((CommentAnnotation) object);
else if (object instanceof LongAnnotation) {
LongAnnotation ann = (LongAnnotation) object;
RString ns = ann.getNs();
if (ns != null) {
if (RatingAnnotationData.INSIGHT_RATING_NS.equals(
ns.getValue()))
return new RatingAnnotationData(ann);
return new LongAnnotationData(ann);
}
return new LongAnnotationData(ann);
} else if (object instanceof DoubleAnnotation)
return new DoubleAnnotationData((DoubleAnnotation) object);
else if (object instanceof FileAnnotation)
return new FileAnnotationData((FileAnnotation) object);
else if (object instanceof BooleanAnnotation)
return new BooleanAnnotationData((BooleanAnnotation) object);
else if (object instanceof TimestampAnnotation)
return new TimeAnnotationData((TimestampAnnotation) object);
else if (object instanceof XmlAnnotation)
return new XMLAnnotationData((XmlAnnotation) object);
else if (object instanceof Pixels)
return new PixelsData((Pixels) object);
else if (object instanceof Experimenter)
return new ExperimenterData((Experimenter) object);
else if (object instanceof ExperimenterGroup)
return new GroupData((ExperimenterGroup) object);
else if (object instanceof Screen)
return new ScreenData((Screen) object);
else if (object instanceof Plate)
return new PlateData((Plate) object);
else if (object instanceof PlateAcquisition)
return new PlateAcquisitionData((PlateAcquisition) object);
else if (object instanceof Well)
return new WellData((Well) object);
else if (object instanceof WellSample)
return new WellSampleData((WellSample) object);
else if (object instanceof Roi)
return new ROIData((Roi) object);
else if (object instanceof Fileset)
return new FilesetData((Fileset) object);
else if (object instanceof MapAnnotation)
return new MapAnnotationData((MapAnnotation)object);
return null;
}
/**
* Converts each {@link IObject element} of the collection into its
* corresponding {@link DataObject}.
*
* @param objects The set of objects to convert.
* @return A set of {@link DataObject}s.
* @throws IllegalArgumentException If the set is <code>null</code>, doesn't
* contain {@link IObject} or if the type {@link IObject} is unknown.
*/
public static <T extends DataObject> Collection<T> convertToDataObjects(Collection objects)
{
if (objects == null) return Collections.EMPTY_LIST;
Collection<T> result = new ArrayList<T>(objects.size());
Iterator i = objects.iterator();
DataObject data;
while (i.hasNext()) {
data = asDataObject((IObject) i.next());
if (data != null) result.add(((T)data));
}
return result;
}
/**
* Converts each {@link IObject element} of the collection into its
* corresponding {@link DataObject}.
*
* @param objects The set of objects to convert.
* @return A set of {@link DataObject}s.
* @throws IllegalArgumentException If the set is <code>null</code>, doesn't
* contain {@link IObject} or if the type {@link IObject} is unknown.
*/
public static <T extends DataObject> Collection<T> asCastedDataObjects(List objects)
{
if (objects == null) return new HashSet<T>();
Collection<T> set = new ArrayList<T>(objects.size());
Iterator i = objects.iterator();
DataObject data;
while (i.hasNext()) {
data = asDataObject((IObject) i.next());
if (data != null)
set.add((T) data);
}
return set;
}
/**
* Converts each element of the list to a pair (key, value) in the map.
* The object in the list must be a IObject subclass and the key is the
* ID of the object.
*
* @param keyKlass The class that will be the key for the map
* @param valueKlass The class that will be the value for the map
* @param method The method name as a string that, using reflection,
* will be used to get the key from the object.
* @param objects The map of objects to convert.
* @return A map of converted objects.
* @throws NoSuchMethodException
* @throws SecurityException
* @throws InvocationTargetException
* @throws IllegalAccessException
* @throws IllegalArgumentException
* @throws IllegalArgumentException If the map is <code>null</code>
* or if the type {@link IObject} is unknown.
*/
public static <K, V extends DataObject> Map<K, V>
asDataObjectMap(Class<K> keyKlass, Class<V> valueKlass,
String method, List objects) throws
SecurityException,
NoSuchMethodException,
IllegalArgumentException,
IllegalAccessException,
InvocationTargetException
{
Map<K, V> map = new TreeMap<K, V>();
V value;
Method meth;
K keyValue;
for (Object obj: objects)
{
value = (V) asDataObject((IObject)obj);
meth = (value.getClass()).getMethod(method);
keyValue = (K) meth.invoke(value, (Object[]) null);
map.put(keyValue, value);
}
return map;
}
/**
* Converts each pair (key, value) of the map. If the key (resp. value) is
* an {@link IObject}, the element is converted into its corresponding
* {@link DataObject}.
*
* @param objects The map of objects to convert.
* @return A map of converted objects.
* @throws IllegalArgumentException If the map is <code>null</code>
* or if the type {@link IObject} is unknown.
*/
public static Map asDataObjects(Map objects)
{
if (objects == null)
throw new IllegalArgumentException("The map cannot be null.");
Map<Object, Object>
map = new HashMap<Object, Object>(objects.size());
Set set = objects.entrySet();
Entry entry;
Iterator i = set.iterator();
Object key, value;
Object convertedKey = null;
Object convertedValue = null;
while (i.hasNext()) {
entry = (Entry) i.next();
key = entry.getKey();
value = entry.getValue();
convertedKey = convert(key);
convertedValue = convert(value);
map.put(convertedKey == null ? key : convertedKey,
convertedValue == null ? value : convertedValue);
}
return map;
}
/**
* Get the pojo type for a an {@link IObject} class
* (Reverse of {@link #getModelType(Class)})
*
* @param modelType
* The {@link IObject}
* @return See above
*/
public static Class<? extends DataObject> getPojoType(Class<? extends IObject> modelType) {
if (OriginalFile.class.equals(modelType))
return FileData.class;
else if (Project.class.equals(modelType))
return ProjectData.class;
else if (Dataset.class.equals(modelType))
return DatasetData.class;
else if (Image.class.equals(modelType))
return ImageData.class;
else if (BooleanAnnotation.class.equals(modelType))
return BooleanAnnotationData.class;
else if (LongAnnotation.class.equals(modelType))
return LongAnnotationData.class;
else if (TagAnnotation.class.equals(modelType))
return TagAnnotationData.class;
else if (CommentAnnotation.class.equals(modelType))
return TextualAnnotationData.class;
else if (FileAnnotation.class.equals(modelType))
return FileAnnotationData.class;
else if (TermAnnotation.class.equals(modelType))
return TermAnnotationData.class;
else if (Screen.class.equals(modelType))
return ScreenData.class;
else if (Plate.class.equals(modelType))
return PlateData.class;
else if (Well.class.equals(modelType))
return WellData.class;
else if (WellSample.class.equals(modelType))
return WellSampleData.class;
else if (PlateAcquisition.class.equals(modelType))
return PlateAcquisitionData.class;
else if (ExperimenterGroup.class.equals(modelType))
return GroupData.class;
else if (Experimenter.class.equals(modelType))
return ExperimenterData.class;
else if (DoubleAnnotation.class.equals(modelType))
return DoubleAnnotationData.class;
else if (XmlAnnotation.class.equals(modelType))
return XMLAnnotationData.class;
else if (Fileset.class.equals(modelType))
return FilesetData.class;
else if (MapAnnotation.class.equals(modelType))
return MapAnnotationData.class;
throw new IllegalArgumentException(modelType.getClass().getSimpleName()+" not supported");
}
/**
* Converts the specified POJO into the corresponding model class,
* see {@link #getModelType(Class)}
*
* @param pojoType
* The POJO class (Either the simple or the full
* class name, e. g. omero.gateway.model.DatasetData or
* DatasetData)
* @return The corresponding {@link IObject} class.
*/
public static Class<? extends IObject> getModelType(String pojoType) {
Class pojoClass;
try {
// make sure it works with full and simple class names
if (FileData.class.getSimpleName().equals(pojoType))
pojoType = FileData.class.getName();
else if (ProjectData.class.getSimpleName().equals(pojoType))
pojoType = ProjectData.class.getName();
else if (DatasetData.class.getSimpleName().equals(pojoType))
pojoType = DatasetData.class.getName();
else if (FolderData.class.getSimpleName().equals(pojoType))
pojoType = FolderData.class.getName();
else if (ImageData.class.getSimpleName().equals(pojoType))
pojoType = ImageData.class.getName();
else if (BooleanAnnotationData.class.getSimpleName().equals(
pojoType))
pojoType = BooleanAnnotationData.class.getName();
else if (RatingAnnotationData.class.getSimpleName()
.equals(pojoType)
|| LongAnnotationData.class.getSimpleName()
.equals(pojoType))
pojoType = LongAnnotationData.class.getName();
else if (TagAnnotationData.class.getSimpleName().equals(pojoType))
pojoType = TagAnnotationData.class.getName();
else if (TextualAnnotationData.class.getSimpleName().equals(
pojoType))
pojoType = TextualAnnotationData.class.getName();
else if (FileAnnotationData.class.getSimpleName().equals(pojoType))
pojoType = FileAnnotationData.class.getName();
else if (TermAnnotationData.class.getSimpleName().equals(pojoType))
pojoType = TermAnnotationData.class.getName();
else if (ScreenData.class.getSimpleName().equals(pojoType))
pojoType = ScreenData.class.getName();
else if (PlateData.class.getSimpleName().equals(pojoType))
pojoType = PlateData.class.getName();
else if (WellData.class.getSimpleName().equals(pojoType))
pojoType = WellData.class.getName();
else if (WellSampleData.class.getSimpleName().equals(pojoType))
pojoType = WellSampleData.class.getName();
else if (PlateAcquisitionData.class.getSimpleName()
.equals(pojoType))
pojoType = PlateAcquisitionData.class.getName();
else if (FileData.class.getSimpleName().equals(pojoType))
pojoType = FileData.class.getName();
else if (GroupData.class.getSimpleName().equals(pojoType))
pojoType = GroupData.class.getName();
else if (ExperimenterData.class.getSimpleName().equals(pojoType))
pojoType = ExperimenterData.class.getName();
else if (DoubleAnnotationData.class.getSimpleName()
.equals(pojoType))
pojoType = DoubleAnnotationData.class.getName();
else if (XMLAnnotationData.class.getSimpleName().equals(pojoType))
pojoType = XMLAnnotationData.class.getName();
else if (FilesetData.class.getSimpleName().equals(pojoType))
pojoType = FilesetData.class.getName();
else if (MapAnnotationData.class.getSimpleName().equals(pojoType))
pojoType = MapAnnotationData.class.getName();
else if (ROIData.class.getSimpleName().equals(pojoType))
pojoType = ROIData.class.getName();
else if (EllipseData.class.getSimpleName().equals(pojoType))
pojoType = EllipseData.class.getName();
else if (LineData.class.getSimpleName().equals(pojoType))
pojoType = LineData.class.getName();
else if (MaskData.class.getSimpleName().equals(pojoType))
pojoType = MaskData.class.getName();
else if (PointData.class.getSimpleName().equals(pojoType))
pojoType = PointData.class.getName();
else if (PolygonData.class.getSimpleName().equals(pojoType))
pojoType = PolygonData.class.getName();
else if (PolylineData.class.getSimpleName().equals(pojoType))
pojoType = PolylineData.class.getName();
else if (RectangleData.class.getSimpleName().equals(pojoType))
pojoType = RectangleData.class.getName();
else if (TextData.class.getSimpleName().equals(pojoType))
pojoType = TextData.class.getName();
pojoClass = Class.forName(pojoType);
return getModelType(pojoClass);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(pojoType + " not found");
}
}
/**
* Converts the specified POJO into the corresponding model class.
* (Reverse of {@link #getPojoType(Class)})
* @param pojoType
* The POJO class.
* @return The corresponding {@link IObject} class.
*/
public static Class<? extends IObject> getModelType(Class<? extends DataObject> pojoType) {
if (!DataObject.class.isAssignableFrom(pojoType))
throw new IllegalArgumentException(pojoType.getSimpleName()+" is not a DataObject");
if (FileData.class.equals(pojoType))
return OriginalFile.class;
else if (ProjectData.class.equals(pojoType))
return Project.class;
else if (DatasetData.class.equals(pojoType))
return Dataset.class;
else if (FolderData.class.equals(pojoType))
return Folder.class;
else if (ImageData.class.equals(pojoType))
return Image.class;
else if (BooleanAnnotationData.class.equals(pojoType))
return BooleanAnnotation.class;
else if (RatingAnnotationData.class.equals(pojoType)
|| LongAnnotationData.class.equals(pojoType))
return LongAnnotation.class;
else if (TagAnnotationData.class.equals(pojoType))
return TagAnnotation.class;
else if (TextualAnnotationData.class.equals(pojoType))
return CommentAnnotation.class;
else if (FileAnnotationData.class.equals(pojoType))
return FileAnnotation.class;
else if (TermAnnotationData.class.equals(pojoType))
return TermAnnotation.class;
else if (ScreenData.class.equals(pojoType))
return Screen.class;
else if (PlateData.class.equals(pojoType))
return Plate.class;
else if (WellData.class.equals(pojoType))
return Well.class;
else if (WellSampleData.class.equals(pojoType))
return WellSample.class;
else if (PlateAcquisitionData.class.equals(pojoType))
return PlateAcquisition.class;
else if (FileData.class.equals(pojoType))
return OriginalFile.class;
else if (GroupData.class.equals(pojoType))
return ExperimenterGroup.class;
else if (ExperimenterData.class.equals(pojoType))
return Experimenter.class;
else if (DoubleAnnotationData.class.equals(pojoType))
return DoubleAnnotation.class;
else if (XMLAnnotationData.class.equals(pojoType))
return XmlAnnotation.class;
else if (FilesetData.class.equals(pojoType))
return Fileset.class;
else if (MapAnnotationData.class.equals(pojoType))
return MapAnnotation.class;
else if (ROIData.class.equals(pojoType))
return Roi.class;
else if (EllipseData.class.equals(pojoType))
return Ellipse.class;
else if (LineData.class.equals(pojoType))
return Line.class;
else if (MaskData.class.equals(pojoType))
return Mask.class;
else if (PointData.class.equals(pojoType))
return Point.class;
else if (PolygonData.class.equals(pojoType))
return Polygon.class;
else if (PolylineData.class.equals(pojoType))
return Polyline.class;
else if (RectangleData.class.equals(pojoType))
return Rectangle.class;
else if (TextData.class.equals(pojoType))
return Label.class;
throw new IllegalArgumentException(pojoType.getClass().getSimpleName()+" not supported");
}
/**
* Returns the name of the data type which has to used for Graph actions,
* see {@link Requests}
*
* @param dataType The pojo type
* @return See above
*/
public static String getGraphType(Class<? extends DataObject> dataType) {
// containers
if (dataType.equals(DatasetData.class))
return Dataset.class.getSimpleName();
if (dataType.equals(ProjectData.class))
return Project.class.getSimpleName();
if (dataType.equals(ScreenData.class))
return Screen.class.getSimpleName();
if (dataType.equals(WellData.class))
return Well.class.getSimpleName();
if (dataType.equals(PlateData.class))
return Plate.class.getSimpleName();
if (dataType.equals(PlateAcquisitionData.class))
return PlateAcquisition.class.getSimpleName();
if (dataType.equals(FolderData.class))
return Folder.class.getSimpleName();
// annotations
if (dataType.equals(AnnotationData.class))
return Annotation.class.getSimpleName();
if (dataType.equals(TagAnnotationData.class))
return TagAnnotation.class.getSimpleName();
if (dataType.equals(BooleanAnnotationData.class))
return BooleanAnnotation.class.getSimpleName();
if (dataType.equals(TermAnnotationData.class))
return TermAnnotation.class.getSimpleName();
if (dataType.equals(FileAnnotationData.class))
return FileAnnotation.class.getSimpleName();
if (dataType.equals(TextualAnnotationData.class))
return CommentAnnotation.class.getSimpleName();
if (dataType.equals(MapAnnotationData.class))
return MapAnnotation.class.getSimpleName();
if (dataType.equals(TimeAnnotationData.class))
return TimestampAnnotation.class.getSimpleName();
if (dataType.equals(XMLAnnotationData.class))
return XmlAnnotation.class.getSimpleName();
// other
if (dataType.equals(ImageData.class))
return Image.class.getSimpleName();
if (dataType.equals(ROIData.class))
return Roi.class.getSimpleName();
throw new IllegalArgumentException("type not supported");
}
/**
* Converts the specified type to its corresponding type for search.
*
* @param nodeType The type to convert.
* @return See above.
*/
public static String convertTypeForSearch(Class nodeType)
{
if (nodeType.equals(Image.class) || nodeType.equals(ImageData.class))
return ImageI.class.getName();
else if (nodeType.equals(TagAnnotation.class) ||
nodeType.equals(TagAnnotationData.class))
return TagAnnotationI.class.getName();
else if (nodeType.equals(BooleanAnnotation.class) ||
nodeType.equals(BooleanAnnotationData.class))
return BooleanAnnotationI.class.getName();
else if (nodeType.equals(TermAnnotation.class) ||
nodeType.equals(TermAnnotationData.class))
return TermAnnotationI.class.getName();
else if (nodeType.equals(FileAnnotation.class) ||
nodeType.equals(FileAnnotationData.class))
return FileAnnotationI.class.getName();
else if (nodeType.equals(CommentAnnotation.class) ||
nodeType.equals(TextualAnnotationData.class))
return CommentAnnotationI.class.getName();
else if (nodeType.equals(MapAnnotation.class) ||
nodeType.equals(MapAnnotationData.class))
return MapAnnotationI.class.getName();
else if (nodeType.equals(TimestampAnnotation.class) ||
nodeType.equals(TimeAnnotationData.class))
return TimestampAnnotationI.class.getName();
else if (nodeType.equals(Dataset.class) ||
nodeType.equals(DatasetData.class))
return DatasetI.class.getName();
else if (nodeType.equals(Project.class) ||
nodeType.equals(ProjectData.class))
return ProjectI.class.getName();
else if (nodeType.equals(Screen.class) ||
nodeType.equals(ScreenData.class))
return ScreenI.class.getName();
else if (nodeType.equals(Well.class) ||
nodeType.equals(WellData.class))
return WellI.class.getName();
else if (nodeType.equals(Plate.class) ||
nodeType.equals(PlateData.class))
return PlateI.class.getName();
else if (nodeType.equals(PlateAcquisition.class) ||
nodeType.equals(PlateAcquisitionData.class))
return PlateAcquisitionI.class.getName();
throw new IllegalArgumentException("type not supported");
}
}
| gpl-2.0 |
TEC-Foundation/TEC-COMM | app/src/androidTest/java/com/nicholastmosher/easycom/ApplicationTest.java | 360 | package com.nicholastmosher.easycom;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | gpl-2.0 |
AdmireTheDistance/android_libcore | luni/src/test/java/libcore/java/util/regex/OldMatcherTest.java | 22491 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libcore.java.util.regex;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import junit.framework.TestCase;
public class OldMatcherTest extends TestCase {
String[] groupPatterns = { "(a|b)*aabb", "((a)|b)*aabb", "((a|b)*)a(abb)",
"(((a)|(b))*)aabb", "(((a)|(b))*)aa(b)b", "(((a)|(b))*)a(a(b)b)" };
public void testAppendReplacement() {
Pattern pat = Pattern.compile("XX");
Matcher m = pat.matcher("Today is XX-XX-XX ...");
StringBuffer sb = new StringBuffer();
for (int i = 0; m.find(); i++) {
m.appendReplacement(sb, new Integer(i * 10 + i).toString());
}
m.appendTail(sb);
assertEquals("Today is 0-11-22 ...", sb.toString());
pat = Pattern.compile("cat");
m = pat.matcher("one-cat-two-cats-in-the-yard");
sb = new StringBuffer();
Throwable t = null;
m.find();
try {
m.appendReplacement(null, "dog");
} catch (NullPointerException e) {
t = e;
}
assertNotNull(t);
t = null;
m.find();
try {
m.appendReplacement(sb, null);
} catch (NullPointerException e) {
t = e;
}
assertNotNull(t);
}
public void test_resetLjava_lang_String() {
String testPattern = "(abb)";
String testString1 = "babbabbcccabbabbabbabbabb";
String testString2 = "cddcddcddcddcddbbbb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString1);
while (mat.find());
assertEquals("Reset should return itself 1", mat, mat.reset(testString2));
assertFalse("After reset matcher should not find pattern in given input", mat.find());
assertEquals("Reset should return itself 2", mat, mat.reset(testString1));
assertTrue("After reset matcher should find pattern in given input", mat.find());
}
public void testAppendTail() {
Pattern p = Pattern.compile("cat");
Matcher m = p.matcher("one-cat-two-cats-in-the-yard");
StringBuffer sb = new StringBuffer();
while (m.find()) {
m.appendReplacement(sb, "dog");
}
m.appendTail(sb);
assertEquals("one-dog-two-dogs-in-the-yard", sb.toString());
p = Pattern.compile("cat|yard");
m = p.matcher("one-cat-two-cats-in-the-yard");
sb = new StringBuffer();
while (m.find()) {
m.appendReplacement(sb, "dog");
}
assertEquals("one-dog-two-dogs-in-the-dog", sb.toString());
m.appendTail(sb);
assertEquals("one-dog-two-dogs-in-the-dog", sb.toString());
p = Pattern.compile("cat");
m = p.matcher("one-cat-two-cats-in-the-yard");
sb = new StringBuffer();
while (m.find()) {
m.appendReplacement(sb, "dog");
}
Throwable t = null;
try {
m.appendTail(null);
} catch (NullPointerException e) {
t = e;
}
assertNotNull(t);
}
public void test_reset() {
String testPattern = "(abb)";
String testString = "babbabbcccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
while (mat.find());
assertEquals("Reset should return itself", mat, mat.reset());
assertTrue("After reset matcher should find pattern in given input", mat.find());
}
public void test_hasAnchoringBounds() {
String testPattern = "abb";
String testString = "abb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
assertTrue("Matcher uses anchoring bound by default",
mat.hasAnchoringBounds());
Matcher mu = mat.useAnchoringBounds(true);
assertTrue("Incorrect value of anchoring bounds",
mu.hasAnchoringBounds());
mu = mat.useAnchoringBounds(false);
assertFalse("Incorrect value of anchoring bounds",
mu.hasAnchoringBounds());
}
public void test_hasTransparentBounds() {
String testPattern = "abb";
String testString = "ab\nb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
assertFalse("Matcher uses opaque bounds by default",
mat.hasTransparentBounds());
Matcher mu = mat.useTransparentBounds(true);
assertTrue("Incorrect value of anchoring bounds",
mu.hasTransparentBounds());
mu = mat.useTransparentBounds(false);
assertFalse("Incorrect value of anchoring bounds",
mu.hasTransparentBounds());
}
public void test_startI() {
String testPattern = "(((abb)a)(bb))";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
int start = 3;
int end = 6;
int i, j;
for (j = 0; j < 3; j++) {
while (mat.find(start + j - 2)) {
for (i = 0; i < 4; i++) {
assertEquals("Start is wrong for group " + i + " :" + mat.group(i), start, mat.start(i));
}
assertEquals("Start is wrong for group " + i + " :" + mat.group(i), start + 4, mat.start(i));
start = end;
end += 3;
}
}
}
public void test_endI() {
String testPattern = "(((abb)a)(bb))";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
int start = 3;
int end = 6;
int i, j;
for (j = 0; j < 3; j++) {
while (mat.find(start + j - 2)) {
for (i = 0; i < 4; i++) {
assertEquals("End is wrong for group " + i + " :" + mat.group(i), start + mat.group(i).length(), mat.end(i));
}
assertEquals("End is wrong for group " + i + " :" + mat.group(i), start + 4 + mat.group(i).length(), mat.end(i));
start = end;
end += 3;
}
}
}
public void test_lookingAt() {
String testPattern = "(((abb)a)(bb))";
String testString1 = "babbabbcccabbabbabbabbabb";
String testString2 = "abbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat1 = pat.matcher(testString1);
Matcher mat2 = pat.matcher(testString2);
assertFalse("Should not find given pattern in 1 string", mat1.lookingAt());
mat1.region(1, 10);
assertTrue("Should find given pattern in region of string", mat1.lookingAt());
assertTrue("Should find given pattern in 2 string", mat2.lookingAt());
}
public void test_findI() {
String testPattern = "(abb)";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
int start = 3;
int end = 6;
int j;
for (j = 0; j < 3; j++) {
while (mat.find(start + j - 2)) {
assertEquals(start, mat.start(1));
assertEquals(end, mat.end(1));
start = end;
end += 3;
}
start = 6;
end = 9;
}
testPattern = "(\\d{1,3})";
testString = "aaaa123456789045";
Pattern pat2 = Pattern.compile(testPattern);
Matcher mat2 = pat2.matcher(testString);
start = 4;
int length = 3;
for (j = 0; j < length; j++) {
for (int i = 4 + j; i < testString.length() - length; i += length) {
mat2.find(i);
assertEquals(testString.substring(i, i + length), mat2.group(1));
}
}
String string3 = "Brave new world";
Pattern pat3 = Pattern.compile("new");
Matcher mat3 = pat3.matcher(string3);
// find(int) throws for out of range indexes.
try {
mat3.find(-1);
fail();
} catch (IndexOutOfBoundsException expected) {
}
assertFalse(mat3.find(string3.length()));
try {
mat3.find(string3.length() + 1);
fail();
} catch (IndexOutOfBoundsException expected) {
}
assertTrue(mat3.find(6));
assertFalse(mat3.find(7));
mat3.region(7, 10);
assertFalse(mat3.find()); // No "new" in the region.
assertTrue(mat3.find(3)); // find(int) ignores the region.
assertTrue(mat3.find(6)); // find(int) ignores the region.
assertFalse(mat3.find(7)); // No "new" >= 7.
mat3.region(1, 4);
assertFalse(mat3.find()); // No "new" in the region.
assertTrue(mat3.find(5)); // find(int) ignores the region.
}
public void testSEOLsymbols() {
Pattern pat = Pattern.compile("^a\\(bb\\[$");
Matcher mat = pat.matcher("a(bb[");
assertTrue(mat.matches());
}
public void test_start() {
String testPattern = "(abb)";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
int start = 3;
int end = 6;
int j;
for (j = 0; j < 3; j++) {
while (mat.find()) {
assertEquals("Start is wrong", start, mat.start());
start = end;
end += 3;
}
}
}
public void test_end() {
String testPattern = "(abb)";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
int start = 3;
int end = 6;
int j;
for (j = 0; j < 3; j++) {
while (mat.find()) {
assertEquals("Start is wrong", end, mat.end());
start = end;
end += 3;
}
}
}
public void testGroupCount() {
for (int i = 0; i < groupPatterns.length; i++) {
Pattern test = Pattern.compile(groupPatterns[i]);
Matcher mat = test.matcher("ababababbaaabb");
mat.matches();
assertEquals(i + 1, mat.groupCount());
}
}
public void testRegion() {
Pattern p = Pattern.compile("abba");
Matcher m = p.matcher("Gabba gabba hey");
m.region(0, 15);
assertTrue(m.find());
assertTrue(m.find());
assertFalse(m.find());
m.region(5, 15);
assertTrue(m.find());
assertFalse(m.find());
m.region(10, 15);
assertFalse(m.find());
Throwable t = null;
try {
m.region(-1, 15);
} catch (IndexOutOfBoundsException e) {
t = e;
}
assertNotNull(t);
t = null;
try {
m.region(0, 16);
} catch (IndexOutOfBoundsException e) {
t = e;
}
assertNotNull(t);
}
public void testMatchesURI() {
Pattern pat = Pattern.
compile("^(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?");
Matcher mat = pat
.matcher("file:/c:/workspace/api/build.win32/classes/META-INF/"
+ "services/javax.xml.parsers.DocumentBuilderFactory");
assertTrue(mat.matches());
}
public void testQuoteReplacement() {
assertEquals("\\$dollar and slash\\\\", Matcher.quoteReplacement("$dollar and slash\\"));
}
public void testUnicode() {
assertTrue(Pattern.compile("\\x61a").matcher("aa").matches());
// assertTrue(Pattern.matches("\\u0061a", "aa"));
assertTrue(Pattern.compile("\\0141a").matcher("aa").matches());
assertTrue(Pattern.compile("\\0777").matcher("?7").matches());
}
public void testUnicodeCategory() {
assertTrue(Pattern.compile("\\p{Ll}").matcher("k").matches()); // Unicode lower case
assertTrue(Pattern.compile("\\P{Ll}").matcher("K").matches()); // Unicode non-lower
// case
assertTrue(Pattern.compile("\\p{Lu}").matcher("K").matches()); // Unicode upper case
assertTrue(Pattern.compile("\\P{Lu}").matcher("k").matches()); // Unicode non-upper
// case
// combinations
assertTrue(Pattern.compile("[\\p{L}&&[^\\p{Lu}]]").matcher("k").matches());
assertTrue(Pattern.compile("[\\p{L}&&[^\\p{Ll}]]").matcher("K").matches());
assertFalse(Pattern.compile("[\\p{L}&&[^\\p{Lu}]]").matcher("K").matches());
assertFalse(Pattern.compile("[\\p{L}&&[^\\p{Ll}]]").matcher("k").matches());
// category/character combinations
assertFalse(Pattern.compile("[\\p{L}&&[^a-z]]").matcher("k").matches());
assertTrue(Pattern.compile("[\\p{L}&&[^a-z]]").matcher("K").matches());
assertTrue(Pattern.compile("[\\p{Lu}a-z]").matcher("k").matches());
assertTrue(Pattern.compile("[a-z\\p{Lu}]").matcher("k").matches());
assertFalse(Pattern.compile("[\\p{Lu}a-d]").matcher("k").matches());
assertTrue(Pattern.compile("[a-d\\p{Lu}]").matcher("K").matches());
// assertTrue(Pattern.matches("[\\p{L}&&[^\\p{Lu}&&[^K]]]", "K"));
assertFalse(Pattern.compile("[\\p{L}&&[^\\p{Lu}&&[^G]]]").matcher("K").matches());
}
public void test_regionStart() {
String testPattern = "(abb)";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
assertEquals("Region sould start from 0 position", 0, mat.regionStart());
mat.region(1, 10);
assertEquals("Region sould start from 1 position after setting new region", 1, mat.regionStart());
mat.reset();
assertEquals("Region sould start from 0 position after reset", 0, mat.regionStart());
}
public void test_regionEnd() {
String testPattern = "(abb)";
String testString = "cccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
assertEquals("Region end value should be equal to string length", testString.length(), mat.regionEnd());
mat.region(1, 10);
assertEquals("Region end value should be equal to 10 after setting new region", 10, mat.regionEnd());
mat.reset();
assertEquals("Region end value should be equal to string length after reset", testString.length(), mat.regionEnd());
}
public void test_toMatchResult() {
String testPattern = "(((abb)a)(bb))";
String testString = "babbabbcccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
mat.region(1, 7);
assertTrue("matcher should find pattern in given region", mat.matches());
assertEquals("matched section should start from 1 position", 1, mat.toMatchResult().start());
assertEquals("matched section for 2 group should start from 1 position", 1, mat.toMatchResult().start(2));
assertEquals("matched section for whole pattern should end on 7 position", 7, mat.toMatchResult().end());
assertEquals("matched section for 3 group should end at 4 position", 4, mat.toMatchResult().end(3));
assertEquals("group not matched", "abbabb", mat.toMatchResult().group());
assertEquals("3 group not matched", "abb", mat.toMatchResult().group(3));
assertEquals("Total number of groups does not matched with given pattern", 4, mat.toMatchResult().groupCount());
}
public void test_usePatternLjava_util_regex_Pattern() {
String testPattern1 = "(((abb)a)(bb))";
String testPattern2 = "(abbabb)";
String testPattern3 = "(babb)";
String testString = "babbabbcccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern1);
Matcher mat = pat.matcher(testString);
mat.region(1, 7);
assertTrue("matcher should find pattern in given region in case of groupe in pattern", mat.matches());
assertEquals("", mat, mat.usePattern(Pattern.compile(testPattern2)));
assertTrue("matcher should find pattern in given region", mat.matches());
assertEquals("", mat, mat.usePattern(Pattern.compile(testPattern3)));
assertFalse("matcher should not find pattern in given region", mat.matches());
}
public void test_anchoringBounds() {
String testPattern = "^ro$";
String testString = "android";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
mat.region(2, 5);
mat.useAnchoringBounds(false);
assertFalse("Shouldn't find pattern with non-anchoring bounds", mat.find(0));
mat.region(2, 5);
mat.useAnchoringBounds(true);
assertFalse("Should find pattern with anchoring bounds", mat.find(0));
}
public void test_transparentBounds() {
String testPattern = "and(?=roid)";
String testString = "android";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
mat.region(0, 3);
mat.useTransparentBounds(false);
assertFalse("Shouldn't find pattern with opaque bounds", mat.matches());
mat.useTransparentBounds(true);
assertTrue("Should find pattern transparent bounds", mat.matches()); // ***
testPattern = "and(?!roid)";
testString = "android";
pat = Pattern.compile(testPattern);
mat = pat.matcher(testString);
mat.region(0, 3);
mat.useTransparentBounds(false);
assertTrue("Should find pattern with opaque bounds", mat.matches());
mat.useTransparentBounds(true);
assertFalse("Shouldn't find pattern transparent bounds", mat.matches()); // ***
}
public void test_hitEnd() {
String testPattern = "abb";
String testString = "babbabbcccabbabbabbabbabb";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
while (mat.find()) {
assertFalse("hitEnd should return false during parsing input", mat.hitEnd());
}
assertTrue("hitEnd should return true after finding last match", mat.hitEnd()); // ***
}
public void test_requireEnd() {
String testPattern = "bba";
String testString = "abbbbba";
Pattern pat = Pattern.compile(testPattern);
Matcher mat = pat.matcher(testString);
assertTrue(mat.find());
assertFalse(mat.requireEnd());
testPattern = "bba$";
testString = "abbbbba";
pat = Pattern.compile(testPattern);
mat = pat.matcher(testString);
assertTrue(mat.find());
assertTrue(mat.requireEnd());
}
/*
* Regression test for HARMONY-674
*/
public void testPatternMatcher() throws Exception {
Pattern pattern = Pattern.compile("(?:\\d+)(?:pt)");
assertTrue(pattern.matcher("14pt").matches());
}
public void testUnicodeCharacterClasses() throws Exception {
// http://code.google.com/p/android/issues/detail?id=21176
// We use the Unicode TR-18 definitions: http://www.unicode.org/reports/tr18/#Compatibility_Properties
assertTrue("\u0666".matches("\\d")); // ARABIC-INDIC DIGIT SIX
assertFalse("\u0666".matches("\\D")); // ARABIC-INDIC DIGIT SIX
assertTrue("\u1680".matches("\\s")); // OGHAM SPACE MARK
assertFalse("\u1680".matches("\\S")); // OGHAM SPACE MARK
assertTrue("\u00ea".matches("\\w")); // LATIN SMALL LETTER E WITH CIRCUMFLEX
assertFalse("\u00ea".matches("\\W")); // LATIN SMALL LETTER E WITH CIRCUMFLEX
}
// http://code.google.com/p/android/issues/detail?id=41143
public void testConcurrentMatcherAccess() throws Exception {
final Pattern p = Pattern.compile("(^|\\W)([a-z])");
final Matcher m = p.matcher("");
ArrayList<Thread> threads = new ArrayList<Thread>();
for (int i = 0; i < 10; ++i) {
Thread t = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 4096; ++i) {
String s = "some example text";
m.reset(s);
try {
StringBuffer sb = new StringBuffer(s.length());
while (m.find()) {
m.appendReplacement(sb, m.group(1) + m.group(2));
}
m.appendTail(sb);
} catch (Exception expected) {
// This code is inherently unsafe and crazy;
// we're just trying to provoke native crashes!
}
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
}
// https://code.google.com/p/android/issues/detail?id=33040
public void test33040() throws Exception {
Pattern p = Pattern.compile("ma");
// replaceFirst resets the region; apparently, this was broken in Android 1.6.
String result = p.matcher("mama").region(2, 4).replaceFirst("mi");
assertEquals("mima", result);
}
}
| gpl-2.0 |
LiHaoTan/teammates | src/main/java/teammates/ui/controller/InstructorStudentListPageAction.java | 2797 | package teammates.ui.controller;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import teammates.common.datatransfer.attributes.CourseAttributes;
import teammates.common.datatransfer.attributes.InstructorAttributes;
import teammates.common.util.Const;
import teammates.common.util.StatusMessage;
import teammates.common.util.StatusMessageColor;
import teammates.ui.datatransfer.InstructorStudentListPageCourseData;
import teammates.ui.pagedata.InstructorStudentListPageData;
public class InstructorStudentListPageAction extends Action {
@Override
public ActionResult execute() {
gateKeeper.verifyInstructorPrivileges(account);
String searchKey = getRequestParamValue(Const.ParamsNames.SEARCH_KEY);
Boolean displayArchive = getRequestParamAsBoolean(Const.ParamsNames.DISPLAY_ARCHIVE);
Map<String, InstructorAttributes> instructors = new HashMap<>();
List<CourseAttributes> courses = logic.getCoursesForInstructor(account.googleId);
// Sort by creation date
courses.sort(Comparator.comparing(course -> course.createdAt));
// Get instructor attributes
List<InstructorAttributes> instructorList = logic.getInstructorsForGoogleId(account.googleId);
for (InstructorAttributes instructor : instructorList) {
instructors.put(instructor.courseId, instructor);
}
if (courses.isEmpty()) {
statusToUser.add(new StatusMessage(Const.StatusMessages.INSTRUCTOR_NO_COURSE_AND_STUDENTS,
StatusMessageColor.WARNING));
}
statusToAdmin = "instructorStudentList Page Load<br>" + "Total Courses: " + courses.size();
List<InstructorStudentListPageCourseData> coursesToDisplay = new ArrayList<>();
for (CourseAttributes course : courses) {
InstructorAttributes instructor = instructors.get(course.getId());
boolean isInstructorAllowedToModify = instructor.isAllowedForPrivilege(
Const.ParamsNames.INSTRUCTOR_PERMISSION_MODIFY_STUDENT);
boolean isCourseDisplayed = displayArchive || !instructor.isArchived;
if (isCourseDisplayed) {
coursesToDisplay.add(new InstructorStudentListPageCourseData(course, instructor.isArchived,
isInstructorAllowedToModify));
}
}
InstructorStudentListPageData data =
new InstructorStudentListPageData(account, sessionToken, searchKey, displayArchive, coursesToDisplay);
return createShowPageResult(Const.ViewURIs.INSTRUCTOR_STUDENT_LIST, data);
}
}
| gpl-2.0 |
smalyshev/blazegraph | bigdata/src/java/com/bigdata/striterator/PushbackIterator.java | 2239 | package com.bigdata.striterator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import cutthecrap.utils.striterators.ICloseableIterator;
/**
* Allows pushback of the most recently visited element onto the iterator.
* <p>
* Note: There is no corresponding {@link IFilter} pattern for this class since
* you need the interface offered by the {@link PushbackIterator} in order to
* use pushback.
*
* @author <a href="mailto:thompsonbry@users.sourceforge.net">Bryan Thompson</a>
* @version $Id$
* @param <E>
*/
public class PushbackIterator<E> implements Iterator<E>, ICloseableIterator<E> {
private final Iterator<E> src;
/**
* The most recent element visited by the iterator.
*/
private E current;
/**
* When non-<code>null</code>, this element was pushed back and is
* the next element to be visited.
*/
private E buffer;
public PushbackIterator(final Iterator<E> src) {
if (src == null)
throw new IllegalArgumentException();
this.src = src;
}
@Override
public boolean hasNext() {
return buffer != null || src.hasNext();
}
@Override
public E next() {
if (!hasNext())
throw new NoSuchElementException();
final E tmp;
if (buffer != null) {
tmp = buffer;
buffer = null;
} else {
tmp = src.next();
}
current = tmp;
return tmp;
}
/**
* Push the value onto the internal buffer. It will be returned by the
* next call to {@link #next()}.
*
* @param value
* The value.
*
* @throws IllegalStateException
* if there is already a value pushed back.
*/
public void pushback() {
if (buffer != null)
throw new IllegalStateException();
// pushback the last visited element.
buffer = current;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public void close() {
if (src instanceof ICloseableIterator) {
((ICloseableIterator<E>) src).close();
}
}
}
| gpl-2.0 |
jdahaldev/itsimple | src/gui/InnerFrame.java | 30854 | /***
* itSIMPLE: Integrated Tool Software Interface for Modeling PLanning Environments
*
* Copyright (C) 2007,2008 Universidade de Sao Paulo
*
* This file is part of itSIMPLE.
*
* itSIMPLE is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version. Other licenses might be available
* upon written agreement.
*
* itSIMPLE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with itSIMPLE. If not, see <http://www.gnu.org/licenses/>.
*
* Authors: Tiago S. Vaquero,
* Victor Romero.
**/
package src.gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.LayoutManager;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JLayeredPane;
import javax.swing.JMenuBar;
import javax.swing.JPanel;
import javax.swing.JRootPane;
import javax.swing.RootPaneContainer;
import javax.swing.border.EmptyBorder;
import javax.swing.event.MouseInputAdapter;
public class InnerFrame extends JPanel implements RootPaneContainer
{
/**
*
*/
private static final long serialVersionUID = -2055541883521748081L;
private static ImageIcon ICONIZE_BUTTON_ICON = new ImageIcon("resources/images/state.png");
private static ImageIcon RESTORE_BUTTON_ICON = new ImageIcon("resources/images/eRight.png");
private static ImageIcon CLOSE_BUTTON_ICON = new ImageIcon("resources/images/close.png");
private static ImageIcon MAXIMIZE_BUTTON_ICON = new ImageIcon("resources/images/zoomIN.png");
private static ImageIcon MINIMIZE_BUTTON_ICON = new ImageIcon("resources/images/zoomOUT.png");
private static ImageIcon PRESS_CLOSE_BUTTON_ICON = new ImageIcon("pressclose.gif");
private static ImageIcon PRESS_RESTORE_BUTTON_ICON = new ImageIcon("pressrestore.gif");
private static ImageIcon PRESS_ICONIZE_BUTTON_ICON = new ImageIcon("pressiconize.gif");
private static ImageIcon PRESS_MAXIMIZE_BUTTON_ICON = new ImageIcon("pressmaximize.gif");
private static ImageIcon PRESS_MINIMIZE_BUTTON_ICON = new ImageIcon("pressminimize.gif");
private static ImageIcon DEFAULT_FRAME_ICON = new ImageIcon("default.gif");
private static int BORDER_THICKNESS = 4;
private static int WIDTH = 200;
private static int HEIGHT = 200;
private static int TITLE_BAR_HEIGHT = 25;
private static int FRAME_ICON_PADDING = 2;
private static int ICONIZED_WIDTH = 150;
private static Color TITLE_BAR_BG_COLOR = new Color(108,190,116);
private static Color BORDER_COLOR = new Color(8,90,16);
private int m_titleBarHeight = TITLE_BAR_HEIGHT;
private int m_width = WIDTH;
private int m_height = HEIGHT;
//private int m_iconizedWidth = ICONIZED_WIDTH;
private int m_x;
private int m_y;
private String m_title;
private JLabel m_titleLabel;
private JLabel m_iconLabel;
private boolean m_iconified;
private boolean m_maximized;
private boolean m_iconizeable;
private boolean m_resizeable;
private boolean m_closeable;
private boolean m_maximizeable;
// only false when maximized
private boolean m_draggable = true;
private JRootPane m_rootPane;
// used to wrap m_titlePanel and m_rootPane
private JPanel m_frameContentPanel;
private JPanel m_titlePanel;
private JPanel m_contentPanel;
private JPanel m_buttonPanel;
private JPanel m_buttonWrapperPanel;
private InnerFrameButton m_iconize;
private InnerFrameButton m_close;
private InnerFrameButton m_maximize;
private ImageIcon m_frameIcon = DEFAULT_FRAME_ICON;
private NorthResizeEdge m_northResizer;
private SouthResizeEdge m_southResizer;
private EastResizeEdge m_eastResizer;
private WestResizeEdge m_westResizer;
public InnerFrame() {
this("");
}
public InnerFrame(String title) {
this(title, null);
}
public InnerFrame(String title, ImageIcon frameIcon) {
this(title, frameIcon, true, true, true, true);
}
public InnerFrame(String title, ImageIcon frameIcon,
boolean resizeable, boolean iconizeable,
boolean maximizeable, boolean closeable) {
super.setLayout(new BorderLayout());
attachNorthResizeEdge();
attachSouthResizeEdge();
attachEastResizeEdge();
attachWestResizeEdge();
populateInnerFrame();
setTitle(title);
setResizeable(resizeable);
setIconizeable(iconizeable);
setCloseable(closeable);
setMaximizeable(maximizeable);
if (frameIcon != null)
setFrameIcon(frameIcon);
}
protected void populateInnerFrame() {
m_rootPane = new JRootPane();
m_frameContentPanel = new JPanel();
m_frameContentPanel.setLayout(new BorderLayout());
createTitleBar();
m_contentPanel = new JPanel(new BorderLayout());
m_rootPane.setContentPane(m_contentPanel);
m_frameContentPanel.add(m_titlePanel, BorderLayout.NORTH);
m_frameContentPanel.add(m_rootPane, BorderLayout.CENTER);
setupCapturePanel();
super.add(m_frameContentPanel, BorderLayout.CENTER);
}
protected void setupCapturePanel() {
CapturePanel mouseTrap = new CapturePanel();
m_rootPane.getLayeredPane().add(mouseTrap,
new Integer(Integer.MIN_VALUE));
mouseTrap.setBounds(0,0,10000,10000);
}
// don't allow this in root pane containers
public Component add(Component c) {
return null;
}
// don't allow this in root pane containers
public void setLayout(LayoutManager mgr) {
}
public JMenuBar getJMenuBar() {
return m_rootPane.getJMenuBar();
}
public JRootPane getRootPane() {
return m_rootPane;
}
public Container getContentPane() {
return m_rootPane.getContentPane();
}
public Component getGlassPane() {
return m_rootPane.getGlassPane();
}
public JLayeredPane getLayeredPane() {
return m_rootPane.getLayeredPane();
}
public void setJMenuBar(JMenuBar menu) {
m_rootPane.setJMenuBar(menu);
}
public void setContentPane(Container content) {
m_rootPane.setContentPane(content);
}
public void setGlassPane(Component glass) {
m_rootPane.setGlassPane(glass);
}
public void setLayeredPane(JLayeredPane layered) {
m_rootPane.setLayeredPane(layered);
}
public void toFront() {
if (getParent() instanceof JLayeredPane)
((JLayeredPane) getParent()).moveToFront(this);
}
public void close() {
if (getParent() instanceof JLayeredPane) {
JLayeredPane jlp = (JLayeredPane) getParent();
jlp.remove(InnerFrame.this);
jlp.repaint();
}
}
public boolean isIconizeable() {
return m_iconizeable;
}
public void setIconizeable(boolean b) {
m_iconizeable = b;
m_iconize.setVisible(b);
m_titlePanel.revalidate();
}
public boolean isCloseable() {
return m_closeable;
}
public void setCloseable(boolean b) {
m_closeable = b;
m_close.setVisible(b);
m_titlePanel.revalidate();
}
public boolean isMaximizeable() {
return m_maximizeable;
}
public void setMaximizeable(boolean b) {
m_maximizeable = b;
m_maximize.setVisible(b);
m_titlePanel.revalidate();
}
public boolean isIconified() {
return m_iconified;
}
public void setIconified(boolean b) {
m_iconified = b;
if (b) {
if (isMaximized())
setMaximized(false);
toFront();
m_width = getWidth(); // remember width
m_height = getHeight(); // remember height
setBounds(getX(), getY(), ICONIZED_WIDTH,
m_titleBarHeight + 2*BORDER_THICKNESS);
m_iconize.setIcon(RESTORE_BUTTON_ICON);
m_iconize.setPressedIcon(PRESS_RESTORE_BUTTON_ICON);
setResizeable(false);
}
else {
toFront();
setBounds(getX(), getY(), m_width, m_height);
m_iconize.setIcon(ICONIZE_BUTTON_ICON);
m_iconize.setPressedIcon(PRESS_ICONIZE_BUTTON_ICON);
setResizeable(true);
}
revalidate();
}
public boolean isMaximized() {
return m_maximized;
}
public void setMaximized(boolean b) {
m_maximized = b;
if (b)
{
if (isIconified())
setIconified(false);
toFront();
m_width = getWidth(); // remember width
m_height = getHeight(); // remember height
m_x = getX(); // remember x
m_y = getY(); // remember y
setBounds(0, 0, getParent().getWidth(), getParent().getHeight());
m_maximize.setIcon(MINIMIZE_BUTTON_ICON);
m_maximize.setPressedIcon(PRESS_MINIMIZE_BUTTON_ICON);
setResizeable(false);
setDraggable(false);
}
else {
toFront();
setBounds(m_x, m_y, m_width, m_height);
m_maximize.setIcon(MAXIMIZE_BUTTON_ICON);
m_maximize.setPressedIcon(PRESS_MAXIMIZE_BUTTON_ICON);
setResizeable(true);
setDraggable(true);
}
revalidate();
}
////////////////////////////////////////////
//////////////// Title Bar /////////////////
////////////////////////////////////////////
public void setFrameIcon(ImageIcon fi) {
m_frameIcon = fi;
if (fi != null) {
if (m_frameIcon.getIconHeight() > TITLE_BAR_HEIGHT)
setTitleBarHeight(m_frameIcon.getIconHeight() + 2*FRAME_ICON_PADDING);
m_iconLabel.setIcon(m_frameIcon);
}
else setTitleBarHeight(TITLE_BAR_HEIGHT);
revalidate();
}
public ImageIcon getFrameIcon() {
return m_frameIcon;
}
public void setTitle(String s) {
m_title = s;
m_titleLabel.setText(s);
m_titlePanel.repaint();
}
public String getTitle() {
return m_title;
}
public void setTitleBarHeight(int h) {
m_titleBarHeight = h;
}
public int getTitleBarHeight() {
return m_titleBarHeight;
}
public boolean isDraggable() {
return m_draggable;
}
private void setDraggable(boolean b) {
m_draggable = b;
}
// create the title bar: m_titlePanel
protected void createTitleBar() {
m_titlePanel = new JPanel() {
/**
*
*/
private static final long serialVersionUID = -948066492361091787L;
public Dimension getPreferredSize() {
return new Dimension(InnerFrame.this.getWidth(),
m_titleBarHeight);
}
};
m_titlePanel.setLayout(new BorderLayout());
m_titlePanel.setOpaque(true);
m_titlePanel.setBackground(TITLE_BAR_BG_COLOR);
m_titleLabel = new JLabel();
m_titleLabel.setForeground(Color.black);
m_close = new InnerFrameButton(CLOSE_BUTTON_ICON);
m_close.setPressedIcon(PRESS_CLOSE_BUTTON_ICON);
m_close.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
InnerFrame.this.close();
}
});
m_maximize = new InnerFrameButton(MAXIMIZE_BUTTON_ICON);
m_maximize.setPressedIcon(PRESS_MAXIMIZE_BUTTON_ICON);
m_maximize.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
InnerFrame.this.setMaximized(!InnerFrame.this.isMaximized());
}
});
m_iconize = new InnerFrameButton(ICONIZE_BUTTON_ICON);
m_iconize.setPressedIcon(PRESS_ICONIZE_BUTTON_ICON);
m_iconize.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
InnerFrame.this.setIconified(!InnerFrame.this.isIconified());
}
});
m_buttonWrapperPanel = new JPanel();
m_buttonWrapperPanel.setOpaque(false);
m_buttonPanel = new JPanel(new GridLayout(1,3));
m_buttonPanel.setOpaque(false);
m_buttonPanel.add(m_iconize);
m_buttonPanel.add(m_maximize);
m_buttonPanel.add(m_close);
m_buttonPanel.setAlignmentX(0.5f);
m_buttonPanel.setAlignmentY(0.5f);
m_buttonWrapperPanel.add(m_buttonPanel);
m_iconLabel = new JLabel();
m_iconLabel.setBorder(new EmptyBorder(
FRAME_ICON_PADDING, FRAME_ICON_PADDING,
FRAME_ICON_PADDING, FRAME_ICON_PADDING));
if (m_frameIcon != null)
m_iconLabel.setIcon(m_frameIcon);
m_titlePanel.add(m_titleLabel, BorderLayout.CENTER);
m_titlePanel.add(m_buttonWrapperPanel, BorderLayout.EAST);
m_titlePanel.add(m_iconLabel, BorderLayout.WEST);
InnerFrameTitleBarMouseAdapter iftbma =
new InnerFrameTitleBarMouseAdapter(this);
m_titlePanel.addMouseListener(iftbma);
m_titlePanel.addMouseMotionListener(iftbma);
}
// title bar mouse adapter for frame dragging
class InnerFrameTitleBarMouseAdapter
extends MouseInputAdapter
{
InnerFrame m_if;
int m_XDifference, m_YDifference;
boolean m_dragging;
public InnerFrameTitleBarMouseAdapter(InnerFrame inf) {
m_if = inf;
}
// don't allow dragging outside of parent
public void mouseDragged(MouseEvent e) {
int ex = e.getX();
int ey = e.getY();
int x = m_if.getX();
int y = m_if.getY();
int w = m_if.getParent().getWidth();
int h = m_if.getParent().getHeight();
if (m_dragging & m_if.isDraggable()) {
if ((ey + y > 0 && ey + y < h) && (ex + x > 0 && ex + x < w))
m_if.setLocation(ex-m_XDifference + x, ey-m_YDifference + y);
else if (!(ey + y > 0 && ey + y < h) && (ex + x > 0 && ex + x < w)) {
if (!(ey + y > 0) && ey + y < h)
m_if.setLocation(ex-m_XDifference + x, 0-m_YDifference);
else if (ey + y > 0 && !(ey + y < h))
m_if.setLocation(ex-m_XDifference + x, h-m_YDifference);
}
else if ((ey + y > 0 && ey + y < h) && !(ex + x > 0 && ex + x < w)) {
if (!(ex + x > 0) && ex + x < w)
m_if.setLocation(0-m_XDifference, ey-m_YDifference + y);
else if (ex + x > 0 && !(ex + x < w))
m_if.setLocation(w-m_XDifference, ey-m_YDifference + y);
}
else if (!(ey + y > 0) && ey + y < h && !(ex + x > 0) && ex + x < w)
m_if.setLocation(0-m_XDifference, 0-m_YDifference);
else if (!(ey + y > 0) && ey + y < h && ex + x > 0 && !(ex + x < w))
m_if.setLocation(w-m_XDifference, 0-m_YDifference);
else if (ey + y > 0 && !(ey + y < h) && !(ex + x > 0) && ex + x < w)
m_if.setLocation(0-m_XDifference, h-m_YDifference);
else if (ey + y > 0 && !(ey + y < h) && ex + x > 0 && !(ex + x < w))
m_if.setLocation(w-m_XDifference, h-m_YDifference);
}
}
public void mousePressed(MouseEvent e) {
m_if.toFront();
m_XDifference = e.getX();
m_YDifference = e.getY();
m_dragging = true;
}
public void mouseReleased(MouseEvent e) {
m_dragging = false;
}
}
// custom button class for title bar
class InnerFrameButton extends JButton
{
/**
*
*/
private static final long serialVersionUID = 2630578175294290708L;
Dimension m_dim;
public InnerFrameButton(ImageIcon ii) {
super(ii);
m_dim = new Dimension(ii.getIconWidth(), ii.getIconHeight());
setOpaque(false);
setContentAreaFilled(false);
setBorder(null);
}
public Dimension getPreferredSize() {
return m_dim;
}
public Dimension getMinimumSize() {
return m_dim;
}
public Dimension getMaximumSize() {
return m_dim;
}
}
///////////////////////////////////////////////
/////////// Mouse Event Capturing /////////////
///////////////////////////////////////////////
class CapturePanel extends JPanel
{
/**
*
*/
private static final long serialVersionUID = 3318207365877407128L;
public CapturePanel() {
MouseInputAdapter mia = new MouseInputAdapter() {};
addMouseListener(mia);
addMouseMotionListener(mia);
}
}
///////////////////////////////////////////////
//////////////// Resizability /////////////////
///////////////////////////////////////////////
public boolean isResizeable() {
return m_resizeable;
}
public void setResizeable(boolean b) {
if (!b && m_resizeable == true) {
m_northResizer.removeMouseListener(m_northResizer);
m_northResizer.removeMouseMotionListener(m_northResizer);
m_southResizer.removeMouseListener(m_southResizer);
m_southResizer.removeMouseMotionListener(m_southResizer);
m_eastResizer.removeMouseListener(m_eastResizer);
m_eastResizer.removeMouseMotionListener(m_eastResizer);
m_westResizer.removeMouseListener(m_westResizer);
m_westResizer.removeMouseMotionListener(m_westResizer);
}
else if (b && m_resizeable == false) {
m_northResizer.addMouseListener(m_northResizer);
m_northResizer.addMouseMotionListener(m_northResizer);
m_southResizer.addMouseListener(m_southResizer);
m_southResizer.addMouseMotionListener(m_southResizer);
m_eastResizer.addMouseListener(m_eastResizer);
m_eastResizer.addMouseMotionListener(m_eastResizer);
m_westResizer.addMouseListener(m_westResizer);
m_westResizer.addMouseMotionListener(m_westResizer);
}
m_resizeable = b;
}
protected void attachNorthResizeEdge() {
m_northResizer = new NorthResizeEdge(this);
super.add(m_northResizer, BorderLayout.NORTH);
}
protected void attachSouthResizeEdge() {
m_southResizer = new SouthResizeEdge(this);
super.add(m_southResizer, BorderLayout.SOUTH);
}
protected void attachEastResizeEdge() {
m_eastResizer = new EastResizeEdge(this);
super.add(m_eastResizer, BorderLayout.EAST);
}
protected void attachWestResizeEdge() {
m_westResizer = new WestResizeEdge(this);
super.add(m_westResizer, BorderLayout.WEST);
}
class EastResizeEdge extends JPanel
implements MouseListener, MouseMotionListener {
/**
*
*/
private static final long serialVersionUID = -2372192134491782701L;
private int WIDTH = BORDER_THICKNESS;
private int MIN_WIDTH = ICONIZED_WIDTH;
private boolean m_dragging;
private JComponent m_resizeComponent;
protected EastResizeEdge(JComponent c) {
m_resizeComponent = c;
setOpaque(true);
setBackground(BORDER_COLOR);
}
public Dimension getPreferredSize() {
return new Dimension(WIDTH, m_resizeComponent.getHeight());
}
public void mouseClicked(MouseEvent e) {}
public void mouseMoved(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {
m_dragging = false;
}
public void mouseDragged(MouseEvent e) {
if (m_resizeComponent.getWidth() + e.getX() >= MIN_WIDTH)
m_resizeComponent.setBounds(m_resizeComponent.getX(),
m_resizeComponent.getY(),
m_resizeComponent.getWidth() + e.getX(),
m_resizeComponent.getHeight());
else
m_resizeComponent.setBounds(m_resizeComponent.getX(),
m_resizeComponent.getY(),
MIN_WIDTH, m_resizeComponent.getHeight());
m_resizeComponent.validate();
}
public void mouseEntered(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.E_RESIZE_CURSOR));
}
public void mouseExited(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.DEFAULT_CURSOR));
}
public void mousePressed(MouseEvent e) {
toFront();
m_dragging = true;
}
}
class WestResizeEdge extends JPanel
implements MouseListener, MouseMotionListener {
/**
*
*/
private static final long serialVersionUID = -8451514910533583780L;
private int WIDTH = BORDER_THICKNESS;
private int MIN_WIDTH = ICONIZED_WIDTH;
private int m_dragX, m_rightX;
private boolean m_dragging;
private JComponent m_resizeComponent;
protected WestResizeEdge(JComponent c) {
m_resizeComponent = c;
setOpaque(true);
setBackground(BORDER_COLOR);
}
public Dimension getPreferredSize() {
return new Dimension(WIDTH, m_resizeComponent.getHeight());
}
public void mouseClicked(MouseEvent e) {}
public void mouseMoved(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {
m_dragging = false;
}
public void mouseDragged(MouseEvent e) {
if (m_resizeComponent.getWidth()-
(e.getX()-m_dragX) >= MIN_WIDTH)
m_resizeComponent.setBounds(
m_resizeComponent.getX() + (e.getX()-m_dragX),
m_resizeComponent.getY(),
m_resizeComponent.getWidth()-(e.getX()-m_dragX),
m_resizeComponent.getHeight());
else
if (m_resizeComponent.getX() + MIN_WIDTH < m_rightX)
m_resizeComponent.setBounds(m_rightX-MIN_WIDTH,
m_resizeComponent.getY(),
MIN_WIDTH, m_resizeComponent.getHeight());
else
m_resizeComponent.setBounds(m_resizeComponent.getX(),
m_resizeComponent.getY(),
MIN_WIDTH, m_resizeComponent.getHeight());
m_resizeComponent.validate();
}
public void mouseEntered(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.W_RESIZE_CURSOR));
}
public void mouseExited(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.DEFAULT_CURSOR));
}
public void mousePressed(MouseEvent e) {
toFront();
m_rightX = m_resizeComponent.getX() +
m_resizeComponent.getWidth();
m_dragging = true;
m_dragX = e.getX();
}
}
class NorthResizeEdge extends JPanel
implements MouseListener, MouseMotionListener {
/**
*
*/
private static final long serialVersionUID = -2179877452401194523L;
private static final int NORTH = 0;
private static final int NORTHEAST = 1;
private static final int NORTHWEST = 2;
private int CORNER = 10;
private int HEIGHT = BORDER_THICKNESS;
private int MIN_WIDTH = ICONIZED_WIDTH;
private int MIN_HEIGHT = TITLE_BAR_HEIGHT+(2*HEIGHT);
private int /*m_width, */m_dragX, m_dragY, m_rightX, m_lowerY;
private boolean m_dragging;
private JComponent m_resizeComponent;
private int m_mode;
protected NorthResizeEdge(JComponent c) {
m_resizeComponent = c;
setOpaque(true);
setBackground(BORDER_COLOR);
}
public Dimension getPreferredSize() {
return new Dimension(m_resizeComponent.getWidth(), HEIGHT);
}
public void mouseClicked(MouseEvent e) {}
public void mouseMoved(MouseEvent e) {
if (!m_dragging) {
if (e.getX() < CORNER) {
setCursor(Cursor.getPredefinedCursor(
Cursor.NW_RESIZE_CURSOR));
}
else if(e.getX() > getWidth()-CORNER) {
setCursor(Cursor.getPredefinedCursor(
Cursor.NE_RESIZE_CURSOR));
}
else {
setCursor(Cursor.getPredefinedCursor(
Cursor.N_RESIZE_CURSOR));
}
}
}
public void mouseReleased(MouseEvent e) {
m_dragging = false;
}
public void mouseDragged(MouseEvent e) {
int h = m_resizeComponent.getHeight();
int w = m_resizeComponent.getWidth();
int x = m_resizeComponent.getX();
int y = m_resizeComponent.getY();
int ex = e.getX();
int ey = e.getY();
switch (m_mode) {
case NORTH:
if (h-(ey-m_dragY) >= MIN_HEIGHT)
m_resizeComponent.setBounds(x, y + (ey-m_dragY),
w, h-(ey-m_dragY));
else
m_resizeComponent.setBounds(x,
m_lowerY-MIN_HEIGHT, w, MIN_HEIGHT);
break;
case NORTHEAST:
if (h-(ey-m_dragY) >= MIN_HEIGHT
&& w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH)
m_resizeComponent.setBounds(x,
y + (ey-m_dragY), w + (ex-(getWidth()-CORNER)),
h-(ey-m_dragY));
else if (h-(ey-m_dragY) >= MIN_HEIGHT
&& !(w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH))
m_resizeComponent.setBounds(x,
y + (ey-m_dragY), MIN_WIDTH, h-(ey-m_dragY));
else if (!(h-(ey-m_dragY) >= MIN_HEIGHT)
&& w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH)
m_resizeComponent.setBounds(x,
m_lowerY-MIN_HEIGHT, w + (ex-(getWidth()-CORNER)),
MIN_HEIGHT);
else
m_resizeComponent.setBounds(x,
m_lowerY-MIN_HEIGHT, MIN_WIDTH, MIN_HEIGHT);
break;
case NORTHWEST:
if (h-(ey-m_dragY) >= MIN_HEIGHT
&& w-(ex-m_dragX) >= MIN_WIDTH)
m_resizeComponent.setBounds(x + (ex-m_dragX),
y + (ey-m_dragY), w-(ex-m_dragX),
h-(ey-m_dragY));
else if (h-(ey-m_dragY) >= MIN_HEIGHT
&& !(w-(ex-m_dragX) >= MIN_WIDTH)) {
if (x + MIN_WIDTH < m_rightX)
m_resizeComponent.setBounds(m_rightX-MIN_WIDTH,
y + (ey-m_dragY), MIN_WIDTH, h-(ey-m_dragY));
else
m_resizeComponent.setBounds(x,
y + (ey-m_dragY), w, h-(ey-m_dragY));
}
else if (!(h-(ey-m_dragY) >= MIN_HEIGHT)
&& w-(ex-m_dragX) >= MIN_WIDTH)
m_resizeComponent.setBounds(x + (ex-m_dragX),
m_lowerY-MIN_HEIGHT, w-(ex-m_dragX), MIN_HEIGHT);
else
m_resizeComponent.setBounds(m_rightX-MIN_WIDTH,
m_lowerY-MIN_HEIGHT, MIN_WIDTH, MIN_HEIGHT);
break;
}
m_rightX = x + w;
m_resizeComponent.validate();
}
public void mouseEntered(MouseEvent e) {
mouseMoved(e);
}
public void mouseExited(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.DEFAULT_CURSOR));
}
public void mousePressed(MouseEvent e) {
toFront();
m_dragging = true;
m_dragX = e.getX();
m_dragY = e.getY();
m_lowerY = m_resizeComponent.getY()
+ m_resizeComponent.getHeight();
if (e.getX() < CORNER) {
m_mode = NORTHWEST;
}
else if(e.getX() > getWidth()-CORNER) {
m_mode = NORTHEAST;
}
else {
m_mode = NORTH;
}
}
}
class SouthResizeEdge extends JPanel
implements MouseListener, MouseMotionListener {
/**
*
*/
private static final long serialVersionUID = -687949867012430204L;
private static final int SOUTH = 0;
private static final int SOUTHEAST = 1;
private static final int SOUTHWEST = 2;
private int CORNER = 10;
private int HEIGHT = BORDER_THICKNESS;
private int MIN_WIDTH = ICONIZED_WIDTH;
private int MIN_HEIGHT = TITLE_BAR_HEIGHT+(2*HEIGHT);
private int /*m_width, */m_dragX, m_dragY, m_rightX;
private boolean m_dragging;
private JComponent m_resizeComponent;
private int m_mode;
protected SouthResizeEdge(JComponent c) {
m_resizeComponent = c;
setOpaque(true);
setBackground(BORDER_COLOR);
}
public Dimension getPreferredSize() {
return new Dimension(m_resizeComponent.getWidth(), HEIGHT);
}
public void mouseClicked(MouseEvent e) {}
public void mouseMoved(MouseEvent e) {
if (!m_dragging) {
if (e.getX() < CORNER) {
setCursor(Cursor.getPredefinedCursor(
Cursor.SW_RESIZE_CURSOR));
}
else if(e.getX() > getWidth()-CORNER) {
setCursor(Cursor.getPredefinedCursor(
Cursor.SE_RESIZE_CURSOR));
}
else {
setCursor(Cursor.getPredefinedCursor(
Cursor.S_RESIZE_CURSOR));
}
}
}
public void mouseReleased(MouseEvent e) {
m_dragging = false;
}
public void mouseDragged(MouseEvent e) {
int h = m_resizeComponent.getHeight();
int w = m_resizeComponent.getWidth();
int x = m_resizeComponent.getX();
int y = m_resizeComponent.getY();
int ex = e.getX();
int ey = e.getY();
switch (m_mode) {
case SOUTH:
if (h+(ey-m_dragY) >= MIN_HEIGHT)
m_resizeComponent.setBounds(x, y, w, h+(ey-m_dragY));
else
m_resizeComponent.setBounds(x, y, w, MIN_HEIGHT);
break;
case SOUTHEAST:
if (h+(ey-m_dragY) >= MIN_HEIGHT
&& w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH)
m_resizeComponent.setBounds(x, y,
w + (ex-(getWidth()-CORNER)), h+(ey-m_dragY));
else if (h+(ey-m_dragY) >= MIN_HEIGHT
&& !(w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH))
m_resizeComponent.setBounds(x, y,
MIN_WIDTH, h+(ey-m_dragY));
else if (!(h+(ey-m_dragY) >= MIN_HEIGHT)
&& w + (ex-(getWidth()-CORNER)) >= MIN_WIDTH)
m_resizeComponent.setBounds(x, y,
w + (ex-(getWidth()-CORNER)), MIN_HEIGHT);
else
m_resizeComponent.setBounds(x,
y, MIN_WIDTH, MIN_HEIGHT);
break;
case SOUTHWEST:
if (h+(ey-m_dragY) >= MIN_HEIGHT
&& w-(ex-m_dragX) >= MIN_WIDTH)
m_resizeComponent.setBounds(x + (ex-m_dragX), y,
w-(ex-m_dragX), h+(ey-m_dragY));
else if (h+(ey-m_dragY) >= MIN_HEIGHT
&& !(w-(ex-m_dragX) >= MIN_WIDTH)) {
if (x + MIN_WIDTH < m_rightX)
m_resizeComponent.setBounds(m_rightX-MIN_WIDTH, y,
MIN_WIDTH, h+(ey-m_dragY));
else
m_resizeComponent.setBounds(x, y, w,
h+(ey-m_dragY));
}
else if (!(h+(ey-m_dragY) >= MIN_HEIGHT)
&& w-(ex-m_dragX) >= MIN_WIDTH)
m_resizeComponent.setBounds(x + (ex-m_dragX), y,
w-(ex-m_dragX), MIN_HEIGHT);
else
m_resizeComponent.setBounds(m_rightX-MIN_WIDTH,
y, MIN_WIDTH, MIN_HEIGHT);
break;
}
m_rightX = x + w;
m_resizeComponent.validate();
}
public void mouseEntered(MouseEvent e) {
mouseMoved(e);
}
public void mouseExited(MouseEvent e) {
if (!m_dragging)
setCursor(Cursor.getPredefinedCursor(
Cursor.DEFAULT_CURSOR));
}
public void mousePressed(MouseEvent e) {
toFront();
m_dragging = true;
m_dragX = e.getX();
m_dragY = e.getY();
if (e.getX() < CORNER) {
m_mode = SOUTHWEST;
}
else if(e.getX() > getWidth()-CORNER) {
m_mode = SOUTHEAST;
}
else {
m_mode = SOUTH;
}
}
}
} | gpl-3.0 |
SKCraft/Applied-Energistics-2 | src/main/java/appeng/core/features/FeaturedActiveChecker.java | 1304 | /*
* This file is part of Applied Energistics 2.
* Copyright (c) 2013 - 2014, AlgorithmX2, All rights reserved.
*
* Applied Energistics 2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Applied Energistics 2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Applied Energistics 2. If not, see <http://www.gnu.org/licenses/lgpl>.
*/
package appeng.core.features;
import java.util.Set;
import appeng.core.AEConfig;
public final class FeaturedActiveChecker
{
private final Set<AEFeature> features;
public FeaturedActiveChecker( Set<AEFeature> features )
{
this.features = features;
}
public ActivityState getActivityState()
{
for( AEFeature f : this.features )
{
if( !AEConfig.instance.isFeatureEnabled( f ) )
{
return ActivityState.Disabled;
}
}
return ActivityState.Enabled;
}
}
| gpl-3.0 |
virtoja13/Program2 | projects/PSP01/src/main/java/edu/uniandes/ecos/loc/DatosClase.java | 1116 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.uniandes.ecos.loc;
/**
*
* @author JAVIER
*/
public class DatosClase {
private String NombreClase;
private long numeroLineas;
private int numeroMetodos;
public DatosClase(String NombreClase, long numeroLineas, int numeroMetodos) {
this.NombreClase = NombreClase;
this.numeroLineas = numeroLineas;
this.numeroMetodos = numeroMetodos;
}
public String getNombreClase() {
return NombreClase;
}
public long getNumeroLineas() {
return numeroLineas;
}
public int getNumeroMetodos() {
return numeroMetodos;
}
public void setNombreClase(String NombreClase) {
this.NombreClase = NombreClase;
}
public void setNumeroLineas(long numeroLineas) {
this.numeroLineas = numeroLineas;
}
public void setNumeroMetodos(int numeroMetodos) {
this.numeroMetodos = numeroMetodos;
}
} | gpl-3.0 |
vhiribarren/Anki-Android | AnkiDroid/src/main/java/com/ichi2/anki/widgets/DeckAdapter.java | 12220 | /****************************************************************************************
* Copyright (c) 2015 Houssam Salem <houssam.salem.au@gmail.com> *
* *
* This program is free software; you can redistribute it and/or modify it under *
* the terms of the GNU General Public License as published by the Free Software *
* Foundation; either version 3 of the License, or (at your option) any later *
* version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A *
* PARTICULAR PURPOSE. See the GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License along with *
* this program. If not, see <http://www.gnu.org/licenses/>. *
****************************************************************************************/
package com.ichi2.anki.widgets;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.support.v7.widget.RecyclerView;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.ichi2.anki.R;
import com.ichi2.libanki.Collection;
import com.ichi2.libanki.Sched;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
public class DeckAdapter extends RecyclerView.Adapter<DeckAdapter.ViewHolder> {
// Number of dp units to use for each level of deck nesting
private static final int INDENT_WIDTH_DIP = 14;
// The calculated pixel width for the current screen
private float mIndentWidth;
private LayoutInflater mLayoutInflater;
private List<Sched.DeckDueTreeNode> mDeckList;
private int mZeroCountColor;
private int mNewCountColor;
private int mLearnCountColor;
private int mReviewCountColor;
private int mRowDefaultColor;
private int mRowCurrentColor;
private int mDeckNameDefaultColor;
private int mDeckNameDynColor;
private Drawable mExpandImage;
private Drawable mCollapseImage;
private Drawable mNoExpander = new ColorDrawable(Color.TRANSPARENT);
// Listeners
private View.OnClickListener mDeckClickListener;
private View.OnClickListener mDeckExpanderClickListener;
private View.OnLongClickListener mDeckLongClickListener;
private Collection mCol;
// Totals accumulated as each deck is processed
private int mNew;
private int mLrn;
private int mRev;
// ViewHolder class to save inflated views for recycling
public class ViewHolder extends RecyclerView.ViewHolder {
public RelativeLayout deckLayout;
public ImageButton deckExpander;
public TextView deckName;
public TextView deckNew, deckLearn, deckRev;
public ViewHolder(View v) {
super(v);
deckLayout = (RelativeLayout) v.findViewById(R.id.DeckPickerHoriz);
deckExpander = (ImageButton) v.findViewById(R.id.deckpicker_expander);
deckName = (TextView) v.findViewById(R.id.deckpicker_name);
deckNew = (TextView) v.findViewById(R.id.deckpicker_new);
deckLearn = (TextView) v.findViewById(R.id.deckpicker_lrn);
deckRev = (TextView) v.findViewById(R.id.deckpicker_rev);
}
}
public DeckAdapter(LayoutInflater layoutInflater, Context context) {
mLayoutInflater = layoutInflater;
mDeckList = new ArrayList<>();
// Get the colors from the theme attributes
int[] attrs = new int[] {
R.attr.zeroCountColor,
R.attr.newCountColor,
R.attr.learnCountColor,
R.attr.reviewCountColor,
android.R.attr.colorBackground,
R.attr.currentDeckBackgroundColor,
android.R.attr.textColor,
R.attr.dynDeckColor,
R.attr.expandRef,
R.attr.collapseRef };
TypedArray ta = context.obtainStyledAttributes(attrs);
Resources res = context.getResources();
mZeroCountColor = ta.getColor(0, res.getColor(R.color.zero_count));
mNewCountColor = ta.getColor(1, res.getColor(R.color.new_count));
mLearnCountColor = ta.getColor(2, res.getColor(R.color.learn_count));
mReviewCountColor = ta.getColor(3, res.getColor(R.color.review_count));
mRowDefaultColor = ta.getColor(4, res.getColor(R.color.black));
mRowCurrentColor = ta.getColor(5, res.getColor(R.color.deckadapter_row_current));
mDeckNameDefaultColor = ta.getColor(6, res.getColor(R.color.black));
mDeckNameDynColor = ta.getColor(7, res.getColor(R.color.deckadapter_deck_name_dyn));
mExpandImage = ta.getDrawable(8);
mCollapseImage = ta.getDrawable(9);
ta.recycle();
mIndentWidth = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, INDENT_WIDTH_DIP,
res.getDisplayMetrics());
}
public void setDeckClickListener(View.OnClickListener listener) {
mDeckClickListener = listener;
}
public void setDeckExpanderClickListener(View.OnClickListener listener) {
mDeckExpanderClickListener = listener;
}
public void setDeckLongClickListener(View.OnLongClickListener listener) {
mDeckLongClickListener = listener;
}
/**
* Consume a list of {@link Sched.DeckDueTreeNode}s to render a new deck list.
*/
public void buildDeckList(List<Sched.DeckDueTreeNode> nodes, Collection col) {
mCol = col;
mDeckList.clear();
mNew = mLrn = mRev = 0;
processNodes(nodes);
notifyDataSetChanged();
}
@Override
public DeckAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = mLayoutInflater.inflate(R.layout.deck_item, parent, false);
return new ViewHolder(v);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
// Update views for this node
Sched.DeckDueTreeNode node = mDeckList.get(position);
// Create the right expander for this deck
setDeckExpander(holder.deckExpander, node);
if (node.children.size() > 0) {
holder.deckExpander.setTag(node.did);
holder.deckExpander.setOnClickListener(mDeckExpanderClickListener);
}
// Set background colour. The current deck has its own color
if (node.did == mCol.getDecks().current().optLong("id")) {
holder.deckLayout.setBackgroundColor(mRowCurrentColor);
} else {
holder.deckLayout.setBackgroundColor(mRowDefaultColor);
}
// Set deck name and colour. Filtered decks have their own colour
holder.deckName.setText(node.names[0]);
if (mCol.getDecks().isDyn(node.did)) {
holder.deckName.setTextColor(mDeckNameDynColor);
} else {
holder.deckName.setTextColor(mDeckNameDefaultColor);
}
// Set the card counts and their colors
holder.deckNew.setText(String.valueOf(node.newCount));
holder.deckNew.setTextColor((node.newCount == 0) ? mZeroCountColor : mNewCountColor);
holder.deckLearn.setText(String.valueOf(node.lrnCount));
holder.deckLearn.setTextColor((node.lrnCount == 0) ? mZeroCountColor : mLearnCountColor);
holder.deckRev.setText(String.valueOf(node.revCount));
holder.deckRev.setTextColor((node.revCount == 0) ? mZeroCountColor : mReviewCountColor);
// Store deck ID in layout's tag for easy retrieval in our click listeners
holder.deckLayout.setTag(node.did);
// Set click listeners
holder.deckLayout.setOnClickListener(mDeckClickListener);
holder.deckLayout.setOnLongClickListener(mDeckLongClickListener);
}
@Override
public int getItemCount() {
return mDeckList.size();
}
private void setDeckExpander(ImageButton expander, Sched.DeckDueTreeNode node) {
boolean collapsed = mCol.getDecks().get(node.did).optBoolean("collapsed", false);
// Apply the correct expand/collapse drawable
if (collapsed) {
expander.setImageDrawable(mExpandImage);
} else if (node.children.size() > 0) {
expander.setImageDrawable(mCollapseImage);
} else {
expander.setImageDrawable(mNoExpander);
}
// Now set the padding on the left side to indent nested decks.
// The initial padding is the same width as an expander image. If the row requires
// an expander, we remove this padding. This results in aligned deck names with and
// without an expander.
int expanderWidth = mExpandImage.getIntrinsicWidth();
int indent = expanderWidth;
// Add some indenting for each nested level
indent += mIndentWidth * node.depth;
if (collapsed || node.children.size() > 0) {
// If an expand/collapse button exists, remove the initial padding.
indent -= expanderWidth;
}
expander.setPadding(indent, 0, 0, 0);
}
private void processNodes(List<Sched.DeckDueTreeNode> nodes) {
processNodes(nodes, 0);
}
private void processNodes(List<Sched.DeckDueTreeNode> nodes, int depth) {
for (Sched.DeckDueTreeNode node : nodes) {
// If the default deck is empty, hide it by not adding it to the deck list.
// We don't hide it if it's the only deck or if it has sub-decks.
if (node.did == 1 && nodes.size() > 1 && node.children.size() == 0) {
if (mCol.getDb().queryScalar("select 1 from cards where did = 1") == 0) {
continue;
}
}
// If any of this node's parents are collapsed, don't add it to the deck list
for (JSONObject parent : mCol.getDecks().parents(node.did)) {
if (parent.optBoolean("collapsed")) {
return;
}
}
mDeckList.add(node);
// Keep track of the depth. It's used to determine visual properties like indenting later
node.depth = depth;
// Add this node's counts to the totals if it's a parent deck
if (depth == 0) {
mNew += node.newCount;
mLrn += node.lrnCount;
mRev += node.revCount;
}
// Process sub-decks
processNodes(node.children, depth + 1);
}
}
/**
* Return the position of the deck in the deck list. If the deck is a child of a collapsed deck
* (i.e., not visible in the deck list), then the position of the parent deck is returned instead.
*
* An invalid deck ID will return position 0.
*/
public int findDeckPosition(long did) {
for (int i = 0; i < mDeckList.size(); i++) {
if (mDeckList.get(i).did == did) {
return i;
}
}
// If the deck is not in our list, we search again using the immediate parent
ArrayList<JSONObject> parents = mCol.getDecks().parents(did);
if (parents.size() == 0) {
return 0;
} else {
return findDeckPosition(parents.get(parents.size() - 1).optLong("id", 0));
}
}
public int getEta() {
return mCol.getSched().eta(new int[]{mNew, mLrn, mRev});
}
public int getDue() {
return mNew + mLrn + mRev;
}
} | gpl-3.0 |
BorderTech/wcomponents | wcomponents-core/src/main/java/com/github/bordertech/wcomponents/autocomplete/AutocompleteableURL.java | 781 | package com.github.bordertech.wcomponents.autocomplete;
import com.github.bordertech.wcomponents.autocomplete.type.Url;
/**
* Specific {@code autocomplete} attribute values for controls in the
* <a href="https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#control-group-url" target="_blank">url control group</a>.
*
* @author Mark Reeves
* @since 1.5.3
*/
public interface AutocompleteableURL extends Autocompleteable {
/**
* Set the {@code autocomplete} attribute to a specific URL auto-fill type: "url", "impp" or "photo".
* @param value the auto-fill hint value
*/
void setAutocomplete(final Url value);
/**
* Set the "url" auto-fill hint for the current field.
*/
default void setUrlAutocomplete() {
setAutocomplete(Url.URL);
}
}
| gpl-3.0 |
sanjupolus/KC6.oLatest | coeus-impl/src/main/java/org/kuali/coeus/sys/framework/scheduling/ScheduleService.java | 7393 | /*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2015 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.coeus.sys.framework.scheduling;
import org.kuali.coeus.sys.framework.scheduling.seq.ScheduleSequence;
import org.kuali.coeus.sys.framework.scheduling.util.CronSpecialChars;
import org.kuali.coeus.sys.framework.scheduling.util.Time24HrFmt;
import java.text.ParseException;
import java.util.Date;
import java.util.List;
public interface ScheduleService {
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, ScheduleSequence scheduleSequence)
throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param frequencyInDay
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, Integer frequencyInDay,
ScheduleSequence scheduleSequence) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param intervalInDays is the number of days in each repeating interval
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getIntervalInDaysScheduledDates(Date startDate, Date endDate, Time24HrFmt time, Integer intervalInDays) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param weekdays is array of CronSpecialChars containing week day values.
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, CronSpecialChars[] weekdays,
ScheduleSequence scheduleSequence) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param day is day of month.
* @param frequencyInMonth
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, Integer day, Integer frequencyInMonth,
ScheduleSequence scheduleSequence) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, ScheduleSequence scheduleSequence, Integer dayOfMonth) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param dayOfWeek is CronSpecialChars defining day of week.
* @param weekOfMonth is CronSpecialChars defining week of month.
* @param frequencyInMonth
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, CronSpecialChars dayOfWeek,
CronSpecialChars weekOfMonth, Integer frequencyInMonth, ScheduleSequence scheduleSequence) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param month is CronSpecialChars defining month.
* @param day is day of month.
* @param frequencyInYear
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, CronSpecialChars month, Integer day,
Integer frequencyInYear, ScheduleSequence scheduleSequence) throws ParseException;
/**
* This method must return schedule dates generated between provided parameters.
* @param startDate is begin date.
* @param endDate is end date.
* @param time is time.
* @param weekOfMonth is CronSpecialChars defining week of month.
* @param dayOfWeek is CronSpecialChars defining day of week.
* @param month is CronSpecialChars defining month.
* @param frequencyInYear
* @param scheduleSequence to used for generating sequence. If value passed is null, DefaultScheduleSequnce will be used.
* @return List<Date> of date sequence generated.
* @throws ParseException
*/
public List<Date> getScheduledDates(Date startDate, Date endDate, Time24HrFmt time, CronSpecialChars weekOfMonth,
CronSpecialChars dayOfWeek, CronSpecialChars month, Integer frequencyInYear, ScheduleSequence scheduleSequence)
throws ParseException;
}
| agpl-3.0 |
fhuertas/torodb | torod/torod-core/src/main/java/com/torodb/torod/core/subdocument/values/TimeValue.java | 2187 | /*
* This file is part of ToroDB.
*
* ToroDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ToroDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ToroDB. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (c) 2014, 8Kdata Technology
*
*/
package com.torodb.torod.core.subdocument.values;
import com.torodb.torod.core.subdocument.BasicType;
import org.threeten.bp.LocalTime;
/**
*
*/
public class TimeValue implements Value<LocalTime> {
private static final long serialVersionUID = 1L;
private final LocalTime value;
public TimeValue(LocalTime value) {
this.value = value;
}
@Override
public LocalTime getValue() {
return value;
}
@Override
public BasicType getType() {
return BasicType.DATETIME;
}
@Override
public String toString() {
return value.toString();
}
@Override
public int hashCode() {
int hash = 3;
hash = 67 * hash + (this.value != null ? this.value.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TimeValue other = (TimeValue) obj;
if ((this.value == null) ? (other.value != null) : !this.value.equals(
other.value)) {
return false;
}
return true;
}
@Override
public <Result, Arg> Result accept(ValueVisitor<Result, Arg> visitor,
Arg arg) {
return visitor.visit(this, arg);
}
}
| agpl-3.0 |
fhuertas/torodb | torod/torod-core/src/main/java/com/torodb/torod/core/language/querycriteria/ModIsQueryCriteria.java | 3111 | /*
* This file is part of ToroDB.
*
* ToroDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ToroDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ToroDB. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (c) 2014, 8Kdata Technology
*
*/
package com.torodb.torod.core.language.querycriteria;
import com.torodb.torod.core.language.AttributeReference;
import com.torodb.torod.core.language.querycriteria.utils.QueryCriteriaVisitor;
import com.torodb.torod.core.subdocument.values.Value;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import javax.annotation.Nonnull;
/**
*
*/
public class ModIsQueryCriteria extends AttributeQueryCriteria {
private static final long serialVersionUID = 1L;
@Nonnull
private final Value<? extends Number> divisor;
@Nonnull
private final Value<? extends Number> reminder;
public ModIsQueryCriteria(AttributeReference attributeReference, Value<? extends Number> divisor, Value<? extends Number> reminder) {
super(attributeReference);
this.divisor = divisor;
this.reminder = reminder;
}
@Nonnull
public Value<? extends Number> getDivisor() {
return divisor;
}
@Nonnull
public Value<? extends Number> getReminder() {
return reminder;
}
@Override
public String toString() {
return getAttributeReference() + " % " + getDivisor() + " = " + getReminder();
}
@Override
public int hashCode() {
int hash = 5;
hash = 47 * hash + this.getAttributeReference().hashCode();
hash = 47 * hash + this.divisor.hashCode();
hash = 47 * hash + this.reminder.hashCode();
return hash;
}
@SuppressFBWarnings("BC_UNCONFIRMED_CAST")
@Override
public boolean semanticEquals(QueryCriteria obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final ModIsQueryCriteria other = (ModIsQueryCriteria) obj;
if (!this.getAttributeReference().equals(other.getAttributeReference())) {
return false;
}
if (this.divisor != other.divisor && !this.divisor.equals(other.divisor)) {
return false;
}
if (this.reminder != other.reminder && !this.reminder.equals(other.reminder)) {
return false;
}
return true;
}
@Override
public <Result, Arg> Result accept(QueryCriteriaVisitor<Result, Arg> visitor, Arg arg) {
return visitor.visit(this, arg);
}
}
| agpl-3.0 |
DFieldFL/modeler | core/src/main/java/org/pentaho/agilebi/modeler/ModelerTreeHelper.java | 7286 | /*!
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright (c) 2002-2017 Hitachi Vantara.. All rights reserved.
*/
package org.pentaho.agilebi.modeler;
import java.util.Collections;
import java.util.Map;
import org.pentaho.agilebi.modeler.nodes.AbstractMetaDataModelNode;
import org.pentaho.agilebi.modeler.propforms.ModelerNodePropertiesForm;
import org.pentaho.metadata.model.LogicalColumn;
import org.pentaho.metadata.model.LogicalTable;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulEventSourceAdapter;
import org.pentaho.ui.xul.components.XulConfirmBox;
import org.pentaho.ui.xul.containers.XulDeck;
import org.pentaho.ui.xul.dnd.DropEvent;
import org.pentaho.ui.xul.dom.Document;
import org.pentaho.ui.xul.stereotype.Bindable;
import org.pentaho.ui.xul.util.AbstractModelNode;
import org.pentaho.ui.xul.util.XulDialogCallback;
/**
* Created: 3/21/11
*
* @author rfellows
*/
public abstract class ModelerTreeHelper extends XulEventSourceAdapter {
private transient Object selectedTreeItem;
private Map<Class<? extends ModelerNodePropertiesForm>, ModelerNodePropertiesForm> propertiesForms;
private ModelerNodePropertiesForm selectedForm;
private XulDeck propsDeck;
protected ModelerWorkspace workspace;
private Document document;
public ModelerTreeHelper() {
}
public ModelerTreeHelper( Map<Class<? extends ModelerNodePropertiesForm>, ModelerNodePropertiesForm> propertiesForms,
XulDeck propsDeck, ModelerWorkspace workspace, Document document ) {
this.propertiesForms = propertiesForms;
this.propsDeck = propsDeck;
this.workspace = workspace;
this.document = document;
}
@Bindable
public Object getSelectedTreeItem() {
return selectedTreeItem;
}
@Bindable
public void setSelectedTreeItem( Object selectedTreeItem ) {
this.selectedTreeItem = selectedTreeItem;
}
@Bindable
public void setTreeSelectionChanged( Object selection ) {
setSelectedTreeItem( selection );
if ( selection != null && selection instanceof AbstractMetaDataModelNode ) {
AbstractMetaDataModelNode node = (AbstractMetaDataModelNode) selection;
ModelerNodePropertiesForm form = propertiesForms.get( node.getPropertiesForm() );
if ( form != null ) {
if ( selectedForm != null && selectedForm != form ) {
selectedForm.setObject( null );
}
form.activate( (AbstractMetaDataModelNode) selection );
selectedForm = form;
return;
}
}
if ( propsDeck != null ) {
propsDeck.setSelectedIndex( 0 );
}
}
@Bindable
public void moveFieldUp() {
if ( selectedTreeItem == null ) {
return;
}
( (AbstractModelNode) selectedTreeItem ).getParent().moveChildUp( selectedTreeItem );
}
@Bindable
public void moveFieldDown() {
if ( selectedTreeItem == null ) {
return;
}
( (AbstractModelNode) selectedTreeItem ).getParent().moveChildDown( selectedTreeItem );
}
@Bindable
public void removeField() {
( (AbstractModelNode) selectedTreeItem ).getParent().remove( selectedTreeItem );
setTreeSelectionChanged( null );
}
@Bindable
public void clearFields() {
try {
XulConfirmBox confirm = (XulConfirmBox) document.createElement( "confirmbox" ); //$NON-NLS-1$
confirm.setTitle( ModelerMessagesHolder.getMessages().getString( "clear_model_title" ) ); //$NON-NLS-1$
confirm.setMessage( ModelerMessagesHolder.getMessages().getString( "clear_model_msg" ) ); //$NON-NLS-1$
confirm.setAcceptLabel( ModelerMessagesHolder.getMessages().getString( "yes" ) ); //$NON-NLS-1$
confirm.setCancelLabel( ModelerMessagesHolder.getMessages().getString( "no" ) ); //$NON-NLS-1$
confirm.addDialogCallback( new XulDialogCallback() {
public void onClose( XulComponent sender, Status returnCode, Object retVal ) {
if ( returnCode == Status.ACCEPT ) {
clearTreeModel();
}
}
public void onError( XulComponent sender, Throwable t ) {
}
} );
confirm.open();
} catch ( Exception e ) {
e.printStackTrace();
}
}
@Bindable
public abstract void clearTreeModel();
public void addField( Object[] selectedFields ) throws ModelerException {
try {
IDropTarget dropNode = (IDropTarget) getSelectedTreeItem();
for ( Object selectedField : selectedFields ) {
AbstractModelNode newNode = (AbstractModelNode) dropNode.onDrop( selectedField );
( (AbstractModelNode) dropNode ).add( newNode );
}
} catch ( IllegalStateException e ) {
throw new ModelerException( e );
}
}
public Map<Class<? extends ModelerNodePropertiesForm>, ModelerNodePropertiesForm> getPropertiesForms() {
return propertiesForms;
}
public void setPropertiesForms(
Map<Class<? extends ModelerNodePropertiesForm>, ModelerNodePropertiesForm> propertiesForms ) {
this.propertiesForms = propertiesForms;
}
public ModelerNodePropertiesForm getSelectedForm() {
return selectedForm;
}
public void setSelectedForm( ModelerNodePropertiesForm selectedForm ) {
this.selectedForm = selectedForm;
}
public XulDeck getPropsDeck() {
return propsDeck;
}
public void setPropsDeck( XulDeck propsDeck ) {
this.propsDeck = propsDeck;
}
public ModelerWorkspace getWorkspace() {
return workspace;
}
public void setWorkspace( ModelerWorkspace workspace ) {
this.workspace = workspace;
}
public Document getDocument() {
return document;
}
public void setDocument( Document document ) {
this.document = document;
}
protected void removeLogicalColumnFromParentTable( ColumnBackedNode node ) {
LogicalColumn lCol = node.getLogicalColumn();
if ( lCol != null && lCol.getLogicalTable() != null ) {
LogicalTable lTab = lCol.getLogicalTable();
lTab.getLogicalColumns().remove( lCol );
}
}
public void onModelDrop( DropEvent event ) throws ModelerException {
try {
IDropTarget dropNode = (IDropTarget) event.getDropParent();
Object newData = null;
for ( Object data : event.getDataTransfer().getData() ) {
newData = dropNode.onDrop( data );
}
if ( newData == null ) {
event.setAccepted( false );
} else {
event.getDataTransfer().setData( Collections.singletonList( newData ) );
}
} catch ( ModelerException e ) {
throw e;
}
}
protected abstract boolean isModelChanging();
protected abstract void setModelIsChanging( boolean changing );
}
| lgpl-2.1 |
liujed/polyglot-eclipse | src/polyglot/types/UnknownQualifier_c.java | 2085 | /*******************************************************************************
* This file is part of the Polyglot extensible compiler framework.
*
* Copyright (c) 2000-2012 Polyglot project group, Cornell University
* Copyright (c) 2006-2012 IBM Corporation
* All rights reserved.
*
* This program and the accompanying materials are made available under
* the terms of the Eclipse Public License v1.0 which accompanies this
* distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* This program and the accompanying materials are made available under
* the terms of the Lesser GNU Public License v2.0 which accompanies this
* distribution.
*
* The development of the Polyglot project has been supported by a
* number of funding sources, including DARPA Contract F30602-99-1-0533,
* monitored by USAF Rome Laboratory, ONR Grants N00014-01-1-0968 and
* N00014-09-1-0652, NSF Grants CNS-0208642, CNS-0430161, CCF-0133302,
* and CCF-1054172, AFRL Contract FA8650-10-C-7022, an Alfred P. Sloan
* Research Fellowship, and an Intel Research Ph.D. Fellowship.
*
* See README for contributors.
******************************************************************************/
package polyglot.types;
import polyglot.util.SerialVersionUID;
/**
* An unknown type qualifier. This is used as a place-holder until types
* are disambiguated.
*/
public class UnknownQualifier_c extends TypeObject_c implements
UnknownQualifier {
private static final long serialVersionUID = SerialVersionUID.generate();
public UnknownQualifier_c(TypeSystem ts) {
super(ts);
}
@Override
public boolean isCanonical() {
return false;
}
@Override
public boolean isPackage() {
return false;
}
@Override
public boolean isType() {
return false;
}
@Override
public Package toPackage() {
return null;
}
@Override
public Type toType() {
return null;
}
@Override
public String toString() {
return "<unknown>";
}
}
| lgpl-2.1 |
pwrose/biojava | biojava-core/src/main/java/org/biojava/nbio/core/util/InputStreamProvider.java | 6936 | /*
* BioJava development code
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
* Created on Dec 28, 2005
*
*/
package org.biojava.nbio.core.util;
import java.io.*;
import java.net.URL;
import java.util.Enumeration;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.zip.GZIPInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
/** A class that provides an InputStream from a File. The file can be compressed or uncompressed.
*
* Currently supported
* compressions:
* <ul>
* <li>Gzip (extension .gz)</li>
* <li>Zip (extension .zip) in this case a stream to the first entry in the zip file is returned </li>
* <li>Jar (extension .jar) same as .Zip; only stream to first entry is returned </li>
* <li>Z (extension .Z) compressed using the unix compress command </li>
* <li>for any other extension, no compression is assumed </li>
* </ul>
*
*
* @author Andreas Prlic
* @since 1.5
* @version %I% %G%
*
*/
public class InputStreamProvider {
//private final static Logger logger = LoggerFactory.getLogger(InputStreamProvider.class);
/**
* The magic number found at the start of a GZIP stream.
*/
public static final int GZIP_MAGIC = 0x1f8b;
public static final String CACHE_PROPERTY = "biojava.cache.files";
private boolean cacheRawFiles ;
public InputStreamProvider() {
super();
cacheRawFiles = false;
String prop = System.getProperty(CACHE_PROPERTY);
if ( prop != null && prop.equals("true")) {
cacheRawFiles = true;
}
}
/**
* Get an InputStream for given file path.
* The caller is responsible for closing the stream or otherwise
* a resource leak can occur.
* @param pathToFile the path of the file.
* @return an InputStream for the file located at the path.
* @throws IOException
*/
public InputStream getInputStream(String pathToFile)
throws IOException
{
File f = new File(pathToFile);
return getInputStream(f);
}
/** open the file and read the magic number from the beginning
* this is used to determine the compression type
*
* @param in an input stream to read from
* @return the magic number
* @throws IOException
*/
private int getMagicNumber(InputStream in)
throws IOException {
int t = in.read();
if (t < 0) throw new EOFException("Failed to read magic number");
int magic = (t & 0xff) << 8;
t = in.read();
if (t < 0) throw new EOFException("Failed to read magic number");
magic += t & 0xff;
return magic;
}
public InputStream getInputStream(URL u)
throws IOException{
int magic = 0;
InputStream inStream = u.openStream();
magic = getMagicNumber(inStream);
inStream.close();
if (magic == UncompressInputStream.LZW_MAGIC ) {
// a Z compressed file
return openCompressedURL(u);
} else if (magic == GZIP_MAGIC ) {
return openGZIPURL(u);
} else if ( u.getPath().endsWith(".gz")) {
return openGZIPURL(u);
} else if ( u.getPath().endsWith(".Z")) {
// unix compressed
return openCompressedURL(u);
} else {
inStream = u.openStream();
return inStream;
}
}
/**
* Get an InputStream for the file.
* The caller is responsible for closing the stream or otherwise
* a resource leak can occur.
* @param f a File
* @return an InputStream for the file
* @throws IOException
*/
public InputStream getInputStream(File f)
throws IOException
{
// use the magic numbers to determine the compression type,
// use file extension only as 2nd choice
int magic = 0;
InputStream test = getInputStreamFromFile(f);
magic = getMagicNumber(test);
test.close();
InputStream inputStream = null;
String fileName = f.getName();
if (magic == UncompressInputStream.LZW_MAGIC ) {
// a Z compressed file
return openCompressedFile(f);
}
else if (magic == GZIP_MAGIC ) {
return openGZIPFile(f);
}
else if ( fileName.endsWith(".gz")) {
return openGZIPFile(f);
}
else if ( fileName.endsWith(".zip")){
ZipFile zipfile = new ZipFile(f);
// stream to first entry is returned ...
ZipEntry entry;
Enumeration<? extends ZipEntry> e = zipfile.entries();
if ( e.hasMoreElements()){
entry = e.nextElement();
inputStream = zipfile.getInputStream(entry);
} else {
throw new IOException ("Zip file has no entries");
}
}
else if ( fileName.endsWith(".jar")) {
JarFile jarFile = new JarFile(f);
// stream to first entry is returned
JarEntry entry;
Enumeration<JarEntry> e = jarFile.entries();
if ( e.hasMoreElements()){
entry = e.nextElement();
inputStream = jarFile.getInputStream(entry);
} else {
throw new IOException ("Jar file has no entries");
}
}
else if ( fileName.endsWith(".Z")) {
// unix compressed
return openCompressedFile(f);
}
else {
// no particular extension found, assume that it is an uncompressed file
inputStream = getInputStreamFromFile(f);
}
return inputStream;
}
/**
* Wrapper for new FileInputStream. if System.property biojava.cache.files is set, will try to load files from memory cache.
*
* @param f
* @return
* @throws FileNotFoundException
*/
private InputStream getInputStreamFromFile(File f) throws FileNotFoundException{
InputStream stream = null;
if ( cacheRawFiles ){
stream = FlatFileCache.getInputStream(f.getAbsolutePath());
if ( stream == null){
FlatFileCache.addToCache(f.getAbsolutePath(),f);
stream = FlatFileCache.getInputStream(f.getAbsolutePath());
}
}
if ( stream == null)
stream = new FileInputStream(f);
return stream;
}
private InputStream openCompressedFile(File f)
throws IOException{
InputStream is = getInputStreamFromFile(f);
InputStream inputStream = new UncompressInputStream(is);
return inputStream;
}
private InputStream openCompressedURL(URL u)
throws IOException{
InputStream is = u.openStream();
InputStream inputStream = new UncompressInputStream(is);
return inputStream;
}
private InputStream openGZIPFile(File f)
throws IOException{
InputStream is = getInputStreamFromFile(f);
InputStream inputStream = new GZIPInputStream(is);
return inputStream;
}
private InputStream openGZIPURL(URL u)
throws IOException{
InputStream is = u.openStream();
InputStream inputStream = new GZIPInputStream(is);
return inputStream;
}
}
| lgpl-2.1 |
marclaporte/jitsi | src/net/java/sip/communicator/service/protocol/media/ConferenceInfoDocument.java | 43708 | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.service.protocol.media;
import net.java.sip.communicator.util.*;
import org.jitsi.util.xml.*;
import org.w3c.dom.*;
import javax.xml.parsers.*;
import javax.xml.transform.*;
import javax.xml.transform.dom.*;
import javax.xml.transform.stream.*;
import java.io.*;
import java.util.*;
/**
* A class that represents a Conference Information XML document as defined in
* RFC4575. It wraps around a DOM <tt>Document</tt> providing convenience
* functions.
*
* {@link "http://tools.ietf.org/html/rfc4575"}
*
* @author Boris Grozev
* @author Sebastien Vincent
*/
public class ConferenceInfoDocument
{
/**
* The <tt>Logger</tt> used by the <tt>ConferenceInfoDocument</tt> class
* and its instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(ConferenceInfoDocument.class);
/**
* The namespace of the conference-info element.
*/
public static final String NAMESPACE
= "urn:ietf:params:xml:ns:conference-info";
/**
* The name of the "conference-info" element.
*/
public static final String CONFERENCE_INFO_ELEMENT_NAME = "conference-info";
/**
* The name of the "conference-description" element.
*/
public static final String CONFERENCE_DESCRIPTION_ELEMENT_NAME
= "conference-description";
/**
* The name of the "conference-state" element.
*/
public static final String CONFERENCE_STATE_ELEMENT_NAME
= "conference-state";
/**
* The name of the "state" attribute.
*/
public static final String STATE_ATTR_NAME = "state";
/**
* The name of the "entity" attribute.
*/
public static final String ENTITY_ATTR_NAME = "entity";
/**
* The name of the "version" attribute.
*/
public static final String VERSION_ATTR_NAME = "version";
/**
* The name of the "user" element.
*/
public static final String USER_ELEMENT_NAME = "user";
/**
* The name of the "users" element.
*/
public static final String USERS_ELEMENT_NAME = "users";
/**
* The name of the "endpoint" element.
*/
public static final String ENDPOINT_ELEMENT_NAME = "endpoint";
/**
* The name of the "media" element.
*/
public static final String MEDIA_ELEMENT_NAME = "media";
/**
* The name of the "id" attribute.
*/
public static final String ID_ATTR_NAME = "id";
/**
* The name of the "status" element.
*/
public static final String STATUS_ELEMENT_NAME = "status";
/**
* The name of the "src-id" element.
*/
public static final String SRC_ID_ELEMENT_NAME = "src-id";
/**
* The name of the "type" element.
*/
public static final String TYPE_ELEMENT_NAME = "type";
/**
* The name of the "user-count" element.
*/
public static final String USER_COUNT_ELEMENT_NAME = "user-count";
/**
* The mane of the "display-text" element.
*/
public static final String DISPLAY_TEXT_ELEMENT_NAME = "display-text";
/**
* The <tt>Document</tt> object that we wrap around.
*/
private Document document;
/**
* The single <tt>conference-info</tt> element of <tt>document</tt>
*/
private Element conferenceInfo;
/**
* The <tt>conference-description</tt> child element of
* <tt>conference-info</tt>.
*/
private Element conferenceDescription;
/**
* The <tt>conference-state</tt> child element of <tt>conference-info</tt>.
*/
private Element conferenceState;
/**
* The <tt>conference-state</tt> child element of <tt>conference-state</tt>.
*/
private Element userCount;
/**
* The <tt>users</tt> child element of <tt>conference-info</tt>.
*/
private Element users;
/**
* A list of <tt>User</tt>s representing the children of <tt>users</tt>
*/
private final List<User> usersList = new LinkedList<User>();
/**
* Creates a new <tt>ConferenceInfoDocument</tt> instance.
*
* @throws XMLException if a document failed to be created.
*/
public ConferenceInfoDocument()
throws XMLException
{
try
{
document = XMLUtils.createDocument();
}
catch (Exception e)
{
logger.error("Failed to create a new document.", e);
throw(new XMLException(e.getMessage()));
}
conferenceInfo = document
.createElementNS(NAMESPACE, CONFERENCE_INFO_ELEMENT_NAME);
document.appendChild(conferenceInfo);
setVersion(1);
conferenceDescription
= document.createElement(CONFERENCE_DESCRIPTION_ELEMENT_NAME);
conferenceInfo.appendChild(conferenceDescription);
conferenceState = document.createElement(CONFERENCE_STATE_ELEMENT_NAME);
conferenceInfo.appendChild(conferenceState);
setUserCount(0);
users = document.createElement(USERS_ELEMENT_NAME);
conferenceInfo.appendChild(users);
}
/**
* Creates a new <tt>ConferenceInfoDocument</tt> instance and populates it
* by parsing the XML in <tt>xml</tt>
*
* @param xml the XML string to parse
*
* @throws XMLException If parsing failed
*/
public ConferenceInfoDocument(String xml)
throws XMLException
{
byte[] bytes;
try
{
bytes = xml.getBytes("UTF-8");
}
catch (UnsupportedEncodingException uee)
{
logger.warn(
"Failed to gets bytes from String for the UTF-8 charset",
uee);
bytes = xml.getBytes();
}
try
{
document
= XMLUtils.newDocumentBuilderFactory().newDocumentBuilder()
.parse(new ByteArrayInputStream(bytes));
}
catch (Exception e)
{
throw new XMLException(e.getMessage());
}
conferenceInfo = document.getDocumentElement();
if (conferenceInfo == null)
{
throw new XMLException("Could not parse conference-info document,"
+ " conference-info element not found");
}
conferenceDescription = XMLUtils
.findChild(conferenceInfo, CONFERENCE_DESCRIPTION_ELEMENT_NAME);
//conference-description is mandatory
if (conferenceDescription == null)
{
throw new XMLException("Could not parse conference-info document,"
+ " conference-description element not found");
}
conferenceState
= XMLUtils.findChild(conferenceInfo, CONFERENCE_STATE_ELEMENT_NAME);
if (conferenceState != null)
userCount = XMLUtils
.findChild(conferenceState, USER_COUNT_ELEMENT_NAME);
users = XMLUtils.findChild(conferenceInfo, USERS_ELEMENT_NAME);
if (users == null)
{
throw new XMLException("Could not parse conference-info document,"
+ " 'users' element not found");
}
NodeList usersNodeList = users.getElementsByTagName(USER_ELEMENT_NAME);
for(int i=0; i<usersNodeList.getLength(); i++)
{
User user = new User((Element)usersNodeList.item(i));
usersList.add(user);
}
}
/**
* Creates a new <tt>ConferenceInfoDocument</tt> instance that represents
* a copy of <tt>confInfo</tt>
* @param confInfo the document to copy
* @throws XMLException if a document failed to be created.
*/
public ConferenceInfoDocument(ConferenceInfoDocument confInfo)
throws XMLException
{
this();
//temporary
String sid = confInfo.getSid();
if(sid != null && !sid.equals(""))
setSid(sid);
setEntity(confInfo.getEntity());
setState(confInfo.getState());
setUserCount(confInfo.getUserCount());
setUsersState(confInfo.getUsersState());
setVersion(confInfo.getVersion());
for (User user : confInfo.getUsers())
addUser(user);
}
/**
* Returns the value of the <tt>version</tt> attribute of the
* <tt>conference-info</tt> element, or -1 if there is no <tt>version</tt>
* attribute or if it's value couldn't be parsed as an integer.
* @return the value of the <tt>version</tt> attribute of the
* <tt>conference-info</tt> element, or -1 if there is no <tt>version</tt>
* attribute or if it's value couldn't be parsed as an integer.
*/
public int getVersion()
{
String versionString = conferenceInfo.getAttribute(VERSION_ATTR_NAME);
if (versionString == null)
return -1;
int version = -1;
try
{
version = Integer.parseInt(versionString);
}
catch (NumberFormatException e)
{
if (logger.isInfoEnabled())
logger.info("Failed to parse version string: " + versionString);
}
return version;
}
/**
* Sets the <tt>version</tt> attribute of the <tt>conference-info</tt>
* element.
* @param version the value to set the <tt>version</tt> attribute of the
* <tt>conference-info</tt> element to.
*/
public void setVersion(int version)
{
conferenceInfo.setAttribute(VERSION_ATTR_NAME, Integer.toString(version));
}
/**
* Gets the value of the <tt>state</tt> attribute of the
* <tt>conference-info</tt> element.
* @return the value of the <tt>state</tt> attribute of the
* <tt>conference-info</tt> element.
*/
public State getState()
{
return getState(conferenceInfo);
}
/**
* Returns the value of the <tt>state</tt> attribute of the <tt>users</tt>
* child of the <tt>conference-info</tt> element.
*
* @return the value of the <tt>state</tt> attribute of the <tt>users</tt>
* child of the <tt>conference-info</tt> element.
*/
public State getUsersState()
{
return getState(users);
}
/**
* Sets the <tt>state</tt> attribute of the <tt>users</tt> chuld of the
* <tt>conference-info</tt> element.
*
* @param state the state to set
*/
public void setUsersState(State state)
{
setState(users, state);
}
/**
* Sets the value of the <tt>state</tt> attribute of the
* <tt>conference-info</tt> element.
* @param state the value to set the <tt>state</tt> attribute of the
* <tt>conference-info</tt> element to.
*/
public void setState(State state)
{
setState(conferenceInfo, state);
}
/**
* Sets the value of the <tt>sid</tt> attribute of the
* <tt>conference-info</tt> element.
* This is not part of RFC4575 and is here because we are temporarily using
* it in our XMPP implementation.
* TODO: remote it when we define another way to handle the Jingle SID
*
* @param sid the value to set the <tt>sid</tt> attribute of the
* <tt>conference-info</tt> element to.
*/
public void setSid(String sid)
{
if (sid == null || sid.equals(""))
conferenceInfo.removeAttribute("sid");
else
conferenceInfo.setAttribute("sid", sid);
}
/**
* Gets the value of the <tt>sid</tt> attribute of the
* <tt>conference-info</tt> element.
* This is not part of RFC4575 and is here because we are temporarily using
* it in our XMPP implementation.
* TODO: remote it when we define another way to handle the Jingle SID
*/
public String getSid()
{
return conferenceInfo.getAttribute("sid");
}
/**
* Sets the value of the <tt>entity</tt> attribute of the
* <tt>conference-info</tt> element.
* @param entity the value to set the <tt>entity</tt> attribute of the
* <tt>conference-info</tt> document to.
*/
public void setEntity(String entity)
{
if (entity == null || entity.equals(""))
conferenceInfo.removeAttribute(ENTITY_ATTR_NAME);
else
conferenceInfo.setAttribute(ENTITY_ATTR_NAME, entity);
}
/**
* Gets the value of the <tt>entity</tt> attribute of the
* <tt>conference-info</tt> element.
* @return The value of the <tt>entity</tt> attribute of the
* <tt>conference-info</tt> element.
*/
public String getEntity()
{
return conferenceInfo.getAttribute(ENTITY_ATTR_NAME);
}
/**
* Sets the content of the <tt>user-count</tt> child element of the
* <tt>conference-state</tt> child element of <tt>conference-info</tt>
* @param count the value to set the content of <tt>user-count</tt> to
*/
public void setUserCount(int count)
{
// conference-state and its user-count child aren't mandatory
if (userCount != null)
{
userCount.setTextContent(Integer.toString(count));
}
else
{
if (conferenceState == null)
{
conferenceState
= document.createElement(CONFERENCE_STATE_ELEMENT_NAME);
conferenceInfo.appendChild(conferenceState);
}
userCount = document.createElement(USER_COUNT_ELEMENT_NAME);
userCount.setTextContent(Integer.toString(count));
conferenceState.appendChild(userCount);
}
}
/**
* Returns the content of the <tt>user-count</tt> child of the
* <tt>conference-state</tt> child of <tt>conference-info</tt>, parsed as
* an integer, if they exist. Returns -1 if either there isn't a
* <tt>conference-state</tt> element, it doesn't have a <tt>user-count</tt>
* child, or parsing as integer failed.
*
* @return the content of the <tt>user-count</tt> child of the
* <tt>conference-state</tt> child of <tt>conference-info</tt> element.
*/
public int getUserCount()
{
int ret = -1;
try
{
ret = Integer.parseInt(userCount.getTextContent());
}
catch (Exception e)
{
logger.warn("Could not parse user-count field");
}
return ret;
}
/**
* Returns the XML representation of the <tt>conference-info</tt> tree,
* or <tt>null</tt> if an error occurs while trying to get it.
*
* @return the XML representation of the <tt>conference-info</tt> tree,
* or <tt>null</tt> if an error occurs while trying to get it.
*/
public String toXml()
{
try
{
Transformer transformer
= TransformerFactory.newInstance().newTransformer();
StringWriter buffer = new StringWriter();
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION,
"yes");
transformer.transform(new DOMSource(conferenceInfo),
new StreamResult(buffer));
return buffer.toString();
}
catch (Exception e)
{
return null;
}
}
/**
* Returns the XML representation of the document (from the
* <tt>conference-info</tt> element down), or an error string in case the
* XML cannot be generated for some reason.
* @return the XML representation of the document or an error string.
*/
@Override
public String toString()
{
String s = toXml();
return s == null
? "Could not get conference-info XML"
: s;
}
/**
* Returns the list of <tt>User</tt> that represents the <tt>user</tt>
* children of the <tt>users</tt> child element of <tt>conference-info</tt>
* @return the list of <tt>User</tt> that represents the <tt>user</tt>
* children of the <tt>users</tt> child element of <tt>conference-info</tt>
*/
public List<User> getUsers()
{
return usersList;
}
/**
* Searches this document's <tt>User</tt>s and returns the one with
* <tt>entity</tt> attribute <tt>entity</tt>, or <tt>null</tt> if one
* wasn't found.
* @param entity The value of the <tt>entity</tt> attribute to search for.
* @return the <tt>User</tt> of this document with <tt>entity</tt>
* attribute <tt>entity</tt>, or <tt>null</tt> if one wasn't found.
* */
public User getUser(String entity)
{
if (entity == null)
return null;
for(User u : usersList)
{
if (entity.equals(u.getEntity()))
return u;
}
return null;
}
/**
* Creates a new <tt>User</tt> instance, adds it to the document and
* returns it.
* @param entity The value to use for the <tt>entity</tt> attribute of the
* new <tt>User</tt>.
* @return the newly created <tt>User</tt> instance.
*/
public User addNewUser(String entity)
{
Element userElement = document.createElement(USER_ELEMENT_NAME);
User user = new User(userElement);
user.setEntity(entity);
users.appendChild(userElement);
usersList.add(user);
return user;
}
/**
* Adds a copy of <tt>user</tt> to this <tt>ConferenceInfoDocument</tt>
* @param user the <tt>User</tt> to add a copy of
*/
public void addUser(User user)
{
User newUser = addNewUser(user.getEntity());
newUser.setDisplayText(user.getDisplayText());
newUser.setState(user.getState());
for (Endpoint endpoint : user.getEndpoints())
newUser.addEndpoint(endpoint);
}
/**
* Removes a specific <tt>User</tt> (the one with entity <tt>entity</tt>)
* from the document.
* @param entity the entity of the <tt>User</tt> to remove.
*/
public void removeUser(String entity)
{
User user = getUser(entity);
if (user != null)
{
usersList.remove(user);
users.removeChild(user.userElement);
}
}
/**
* Returns the <tt>Document</tt> that this instance wraps around.
* @return the <tt>Document</tt> that this instance wraps around.
*/
public Document getDocument()
{
return document;
}
/**
* Returns the <tt>State</tt> corresponding to the <tt>state</tt> attribute
* of an <tt>Element</tt>. Default to <tt>State.FULL</tt> which is the
* RFC4575 default.
* @param element the <tt>Element</tt>
* @return the <tt>State</tt> corresponding to the <tt>state</tt> attribute
* of an <tt>Element</tt>.
*/
private State getState(Element element)
{
State state = State.parseString(element.getAttribute(STATE_ATTR_NAME));
return state == null
? State.FULL
: state;
}
/**
* Sets the "state" attribute of <tt>element</tt> to <tt>state</tt>.
* If <tt>state</tt> is <tt>State.FULL</tt> removes the "state" attribute,
* because this is the default value.
* @param element The <tt>Element</tt> for which to set the "state"
* attribute of.
* @param state the <tt>State</tt> which to set.
*/
private void setState(Element element, State state)
{
if (element != null)
{
if (state == State.FULL || state == null)
element.removeAttribute(STATE_ATTR_NAME);
else
element.setAttribute(STATE_ATTR_NAME, state.toString());
}
}
/**
* Sets the <tt>status</tt> child element of <tt>element</tt>. If
* <tt>statusString</tt> is <tt>null</tt>, the child element is removed
* if present.
* @param element the <tt>Element</tt> for which to set the <tt>status</tt>
* child element.
* @param statusString the <tt>String</tt> to use for the text content of
* the <tt>status</tt> element
*/
private void setStatus(Element element, String statusString)
{
Element statusElement
= XMLUtils.findChild(element, STATUS_ELEMENT_NAME);
if (statusString == null || statusString.equals(""))
{
if(statusElement != null)
element.removeChild(statusElement);
}
else
{
if (statusElement == null)
{
statusElement = document.createElement(STATUS_ELEMENT_NAME);
element.appendChild(statusElement);
}
statusElement.setTextContent(statusString);
}
}
/**
* Represents the possible values for the <tt>state</tt> attribute (see
* RFC4575)
*/
public enum State
{
/**
* State <tt>full</tt>
*/
FULL("full"),
/**
* State <tt>partial</tt>
*/
PARTIAL("partial"),
/**
* State <tt>deleted</tt>
*/
DELETED("deleted");
/**
* The name of this <tt>State</tt>
*/
private String name;
/**
* Creates a <tt>State</tt> instance with the specified name.
* @param name
*/
private State(String name)
{
this.name = name;
}
/**
* Returns the name of this <tt>State</tt>
* @return the name of this <tt>State</tt>
*/
@Override
public String toString()
{
return name;
}
/**
* Returns a <tt>State</tt> value corresponding to the specified
* <tt>name</tt>
* @return a <tt>State</tt> value corresponding to the specified
* <tt>name</tt>
*/
public static State parseString(String name)
{
if (FULL.toString().equals(name))
return FULL;
else if(PARTIAL.toString().equals(name))
return PARTIAL;
else if(DELETED.toString().equals(name))
return DELETED;
else
return null;
}
}
/**
* Wraps around an <tt>Element</tt> and represents a <tt>user</tt>
* element (child of the <tt>users</tt> element). See RFC4575.
*/
public class User
{
/**
* The underlying <tt>Element</tt>.
*/
private Element userElement;
/**
* The list of <tt>Endpoint</tt>s representing the <tt>endpoint</tt>
* children of this <tt>User</tt>'s element.
*/
private List<Endpoint> endpointsList = new LinkedList<Endpoint>();
/**
* Creates a new <tt>User</tt> instance with the specified
* <tt>Element</tt> as its underlying element.
* @param user the <tt>Element</tt> to use
*/
private User(Element user)
{
this.userElement = user;
NodeList endpointsNodeList
= user.getElementsByTagName(ENDPOINT_ELEMENT_NAME);
for (int i=0; i<endpointsNodeList.getLength(); i++)
{
Endpoint endpoint
= new Endpoint((Element)endpointsNodeList.item(i));
endpointsList.add(endpoint);
}
}
/**
* Sets the <tt>entity</tt> attribute of this <tt>User</tt>'s element
* to <tt>entity</tt>
* @param entity the value to set for the <tt>entity</tt> attribute.
*/
public void setEntity(String entity)
{
if (entity == null || entity.equals(""))
userElement.removeAttribute(ENTITY_ATTR_NAME);
else
userElement.setAttribute(ENTITY_ATTR_NAME, entity);
}
/**
* Returns the value of the <tt>entity</tt> attribute of this
* <tt>User</tt>'s element.
* @return the value of the <tt>entity</tt> attribute of this
* <tt>User</tt>'s element.
*/
public String getEntity()
{
return userElement.getAttribute(ENTITY_ATTR_NAME);
}
/**
* Sets the <tt>state</tt> attribute of this <tt>User</tt>'s element to
* <tt>state</tt>
* @param state the value to use for the <tt>state</tt> attribute.
*/
public void setState(State state)
{
ConferenceInfoDocument.this.setState(userElement, state);
}
/**
* Returns the value of the <tt>state</tt> attribute of this
* <tt>User</tt>'s element
* @return the value of the <tt>state</tt> attribute of this
* <tt>User</tt>'s element
*/
public State getState()
{
return ConferenceInfoDocument.this.getState(userElement);
}
/**
* Sets the <tt>display-text</tt> child element to this <tt>User</tt>'s
* element.
* @param text the text content to use for the <tt>display-text</tt>
* element.
*/
public void setDisplayText(String text)
{
Element displayText
= XMLUtils.findChild(userElement, DISPLAY_TEXT_ELEMENT_NAME);
if (text == null || text.equals(""))
{
if (displayText != null)
userElement.removeChild(displayText);
}
else
{
if (displayText == null)
{
displayText
= document.createElement(DISPLAY_TEXT_ELEMENT_NAME);
userElement.appendChild(displayText);
}
displayText.setTextContent(text);
}
}
/**
* Returns the text content of the <tt>display-text</tt> child element
* of this <tt>User</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
* @return the text content of the <tt>display-text</tt> child element
* of this <tt>User</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
*/
public String getDisplayText()
{
Element displayText
= XMLUtils.findChild(userElement, DISPLAY_TEXT_ELEMENT_NAME);
if (displayText != null)
return displayText.getTextContent();
return null;
}
/**
* Returns the list of <tt>Endpoint</tt>s which represent the
* <tt>endpoint</tt> children of this <tt>User</tt>'s element.
* @return the list of <tt>Endpoint</tt>s which represent the
* <tt>endpoint</tt> children of this <tt>User</tt>'s element.
*/
public List<Endpoint> getEndpoints()
{
return endpointsList;
}
/**
* Searches this <tt>User</tt>'s associated <tt>Endpoint</tt>s
* and returns the one with <tt>entity</tt> attribute <tt>entity</tt>,
* or <tt>null</tt> if one wasn't found.
* @param entity The value of the <tt>entity</tt> attribute to search
* for.
* @return The <tt>Endpoint</tt> with <tt>entity</tt> attribute
* <tt>entity</tt>, or <tt>null</tt> if one wasn't found.
*/
public Endpoint getEndpoint(String entity)
{
if (entity == null)
return null;
for (Endpoint e : endpointsList)
{
if (entity.equals(e.getEntity()))
return e;
}
return null;
}
/**
* Creates a new <tt>Endpoint</tt> instance, adds it to this
* <tt>User</tt> and returns it.
* @param entity The value to use for the <tt>entity</tt> attribute of
* the new <tt>Endpoint</tt>.
* @return the newly created <tt>Endpoint</tt> instance.
*/
public Endpoint addNewEndpoint(String entity)
{
Element endpointElement
= document.createElement(ENDPOINT_ELEMENT_NAME);
Endpoint endpoint = new Endpoint(endpointElement);
endpoint.setEntity(entity);
userElement.appendChild(endpointElement);
endpointsList.add(endpoint);
return endpoint;
}
/**
* Adds a copy of <tt>endpoint</tt> to this <tt>User</tt>
* @param endpoint the <tt>Endpoint</tt> to add a copy of
*/
public void addEndpoint(Endpoint endpoint)
{
Endpoint newEndpoint = addNewEndpoint(endpoint.getEntity());
newEndpoint.setStatus(endpoint.getStatus());
newEndpoint.setState(endpoint.getState());
for (Media media : endpoint.getMedias())
newEndpoint.addMedia(media);
}
/**
* Removes a specific <tt>Endpoint</tt> (the one with entity
* <tt>entity</tt>) from this <tt>User</tt>.
* @param entity the <tt>entity</tt> of the <tt>Endpoint</tt> to remove
*/
public void removeEndpoint(String entity)
{
Endpoint endpoint = getEndpoint(entity);
if (endpoint != null)
{
endpointsList.remove(endpoint);
userElement.removeChild(endpoint.endpointElement);
}
}
}
/**
* Wraps around an <tt>Element</tt> and represents an <tt>endpoint</tt>
* element. See RFC4575.
*/
public class Endpoint
{
/**
* The underlying <tt>Element</tt>.
*/
private Element endpointElement;
/**
* The list of <tt>Media</tt>s representing the <tt>media</tt>
* children elements of this <tt>Endpoint</tt>'s element.
*/
private List<Media> mediasList = new LinkedList<Media>();
/**
* Creates a new <tt>Endpoint</tt> instance with the specified
* <tt>Element</tt> as its underlying element.
* @param endpoint the <tt>Element</tt> to use
*/
private Endpoint(Element endpoint)
{
this.endpointElement = endpoint;
NodeList mediaNodeList
= endpoint.getElementsByTagName(MEDIA_ELEMENT_NAME);
for (int i=0; i<mediaNodeList.getLength(); i++)
{
Media media = new Media((Element)mediaNodeList.item(i));
mediasList.add(media);
}
}
/**
* Sets the <tt>entity</tt> attribute of this <tt>Endpoint</tt>'s
* element to <tt>entity</tt>
* @param entity the value to set for the <tt>entity</tt> attribute.
*/
public void setEntity(String entity)
{
if (entity == null || entity.equals(""))
endpointElement.removeAttribute(ENTITY_ATTR_NAME);
else
endpointElement.setAttribute(ENTITY_ATTR_NAME, entity);
}
/**
* Returns the <tt>entity</tt> attribute of this <tt>Endpoint</tt>'s
* element.
* @return the <tt>entity</tt> attribute of this <tt>Endpoint</tt>'s
* element.
*/
public String getEntity()
{
return endpointElement.getAttribute(ENTITY_ATTR_NAME);
}
/**
* Sets the <tt>state</tt> attribute of this <tt>User</tt>'s element to
* <tt>state</tt>
* @param state the value to use for the <tt>state</tt> attribute.
*/
public void setState(State state)
{
ConferenceInfoDocument.this.setState(endpointElement, state);
}
/**
* Returns the value of the <tt>state</tt> attribute of this
* <tt>Endpoint</tt>'s element
* @return the value of the <tt>state</tt> attribute of this
* <tt>Endpoint</tt>'s element
*/
public State getState()
{
return ConferenceInfoDocument.this.getState(endpointElement);
}
/**
* Sets the <tt>status</tt> child element of this <tt>Endpoint</tt>'s
* element.
* @param status the value to be used for the text content of the
* <tt>status</tt> element.
*/
public void setStatus(EndpointStatusType status)
{
ConferenceInfoDocument.this.setStatus(endpointElement,
status == null
? null
: status.toString());
}
/**
* Returns the <tt>EndpointStatusType</tt> corresponding to the
* <tt>status</tt> child of this <tt>Endpoint</tt>'s element, or
* <tt>null</tt>.
* @return the <tt>EndpointStatusType</tt> corresponding to the
* <tt>status</tt> child of this <tt>Endpoint</tt>'s element, or
* <tt>null</tt>.
*/
public EndpointStatusType getStatus()
{
Element statusElement
= XMLUtils.findChild(endpointElement, STATUS_ELEMENT_NAME);
return statusElement == null
? null
: EndpointStatusType.parseString(statusElement.getTextContent());
}
/**
* Returns the list of <tt>Media</tt>s which represent the
* <tt>media</tt> children of this <tt>Endpoint</tt>'s element.
* @return the list of <tt>Media</tt>s which represent the
* <tt>media</tt> children of this <tt>Endpoint</tt>'s element.
*/
public List<Media> getMedias()
{
return mediasList;
}
/**
* Searches this <tt>Endpoint</tt>'s associated <tt>Media</tt>s
* and returns the one with <tt>id</tt> attribute <tt>id</tt>, or
* <tt>null</tt> if one wasn't found.
* @param id The value of the <tt>id</tt> attribute to search
* for.
* @return The <tt>Media</tt>s with <tt>id</tt> attribute <tt>id</tt>,
* or <tt>null</tt> if one wasn't found.
*/
public Media getMedia(String id)
{
if (id == null)
return null;
for (Media m : mediasList)
{
if (id.equals(m.getId()))
return m;
}
return null;
}
/**
* Creates a new <tt>Media</tt> instance, adds it to this
* <tt>Endpoint</tt> and returns it.
* @param id The value to use for the <tt>id</tt> attribute of the
* new <tt>Media</tt>'s element.
* @return the newly created <tt>Media</tt> instance.
*/
public Media addNewMedia(String id)
{
Element mediaElement = document.createElement(MEDIA_ELEMENT_NAME);
Media media = new Media(mediaElement);
media.setId(id);
endpointElement.appendChild(mediaElement);
mediasList.add(media);
return media;
}
/**
* Adds a copy of <tt>media</tt> to this <tt>Endpoint</tt>
* @param media the <tt>Media</tt> to add a copy of
*/
public void addMedia(Media media)
{
Media newMedia = addNewMedia(media.getId());
newMedia.setSrcId(media.getSrcId());
newMedia.setType(media.getType());
newMedia.setStatus(media.getStatus());
}
/**
* Removes a specific <tt>Media</tt> (the one with id <tt>id</tt>) from
* this <tt>Endpoint</tt>.
* @param id the <tt>id</tt> of the <tt>Media</tt> to remove.
*/
public void removeMedia(String id)
{
Media media = getMedia(id);
if (media != null)
{
mediasList.remove(media);
endpointElement.removeChild(media.mediaElement);
}
}
}
/**
* Wraps around an <tt>Element</tt> and represents a <tt>media</tt>
* element. See RFC4575.
*/
public class Media
{
/**
* The underlying <tt>Element</tt>.
*/
private Element mediaElement;
/**
* Creates a new <tt>Media</tt> instance with the specified
* <tt>Element</tt> as its underlying element.
* @param media the <tt>Element</tt> to use
*/
private Media(Element media)
{
this.mediaElement = media;
}
/**
* Sets the <tt>id</tt> attribute of this <tt>Media</tt>'s element to
* <tt>id</tt>
* @param id the value to set for the <tt>id</tt> attribute.
*/
public void setId(String id)
{
if (id == null || id.equals(""))
mediaElement.removeAttribute(ID_ATTR_NAME);
else
mediaElement.setAttribute(ID_ATTR_NAME, id);
}
/**
* Returns the <tt>id</tt> attribute of this <tt>Media</tt>'s element.
* @return the <tt>id</tt> attribute of this <tt>Media</tt>'s element.
*/
public String getId()
{
return mediaElement.getAttribute(ID_ATTR_NAME);
}
/**
* Sets the <tt>src-id</tt> child element of this <tt>Media</tt>'s
* element.
* @param srcId the value to be used for the text content of the
* <tt>src-id</tt> element.
*/
public void setSrcId(String srcId)
{
Element srcIdElement
= XMLUtils.findChild(mediaElement, SRC_ID_ELEMENT_NAME);
if (srcId == null || srcId.equals(""))
{
if (srcIdElement != null)
mediaElement.removeChild(srcIdElement);
}
else
{
if (srcIdElement == null)
{
srcIdElement
= document.createElement(SRC_ID_ELEMENT_NAME);
mediaElement.appendChild(srcIdElement);
}
srcIdElement.setTextContent(srcId);
}
}
/**
* Returns the text content of the <tt>src-id</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
* @return the text content of the <tt>src-id</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
*/
public String getSrcId()
{
Element srcIdElement
= XMLUtils.findChild(mediaElement, SRC_ID_ELEMENT_NAME);
return srcIdElement == null
? null
: srcIdElement.getTextContent();
}
/**
* Sets the <tt>type</tt> child element of this <tt>Media</tt>'s
* element.
* @param type the value to be used for the text content of the
* <tt>type</tt> element.
*/
public void setType(String type)
{
Element typeElement
= XMLUtils.findChild(mediaElement, TYPE_ELEMENT_NAME);
if (type == null || type.equals(""))
{
if (typeElement != null)
mediaElement.removeChild(typeElement);
}
else
{
if (typeElement == null)
{
typeElement = document.createElement(TYPE_ELEMENT_NAME);
mediaElement.appendChild(typeElement);
}
typeElement.setTextContent(type);
}
}
/**
* Returns the text content of the <tt>type</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
* @return the text content of the <tt>type</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
*/
public String getType()
{
Element typeElement
= XMLUtils.findChild(mediaElement, TYPE_ELEMENT_NAME);
return typeElement == null
? null
: typeElement.getTextContent();
}
/**
* Sets the <tt>status</tt> child element of this <tt>Media</tt>'s
* element.
* @param status the value to be used for the text content of the
* <tt>status</tt> element.
*/
public void setStatus(String status)
{
ConferenceInfoDocument.this.setStatus(mediaElement, status);
}
/**
* Returns the text content of the <tt>status</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
* @return the text content of the <tt>status</tt> child element
* of this <tt>Media</tt>'s element, if it has such a child. Returns
* <tt>null</tt> otherwise.
*/
public String getStatus()
{
Element statusElement
= XMLUtils.findChild(mediaElement, STATUS_ELEMENT_NAME);
return statusElement == null
? null
: statusElement.getTextContent();
}
}
/**
* Endpoint status type.
*
* @author Sebastien Vincent
*/
public enum EndpointStatusType
{
/**
* Pending.
*/
pending("pending"),
/**
* Dialing-out.
*/
dialing_out ("dialing-out"),
/**
* Dialing-in.
*/
dialing_in("dialing-in"),
/**
* Alerting.
*/
alerting("alerting"),
/**
* On-hold.
*/
on_hold("on-hold"),
/**
* Connected.
*/
connected("connected"),
/**
* Muted via focus.
*/
muted_via_focus("mute-via-focus"),
/**
* Disconnecting.
*/
disconnecting("disconnecting"),
/**
* Disconnected.
*/
disconnected("disconnected");
/**
* The name of this type.
*/
private final String type;
/**
* Creates a <tt>EndPointType</tt> instance with the specified name.
*
* @param type type name.
*/
private EndpointStatusType(String type)
{
this.type = type;
}
/**
* Returns the type name.
*
* @return type name
*/
@Override
public String toString()
{
return type;
}
/**
* Returns a <tt>EndPointType</tt>.
*
* @param typeStr the <tt>String</tt> that we'd like to
* parse.
* @return an EndPointType.
*
* @throws IllegalArgumentException in case <tt>typeStr</tt> is
* not a valid <tt>EndPointType</tt>.
*/
public static EndpointStatusType parseString(String typeStr)
throws IllegalArgumentException
{
for (EndpointStatusType value : values())
if (value.toString().equals(typeStr))
return value;
throw new IllegalArgumentException(
typeStr + " is not a valid reason");
}
}
}
| lgpl-2.1 |
Alfresco/community-edition | projects/web-client/source/java/org/alfresco/web/bean/generator/SpaceIconPickerGenerator.java | 4523 | /*
* #%L
* Alfresco Repository WAR Community
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.web.bean.generator;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.el.ValueBinding;
import org.alfresco.service.cmr.dictionary.PropertyDefinition;
import org.alfresco.web.app.servlet.FacesHelper;
import org.alfresco.web.ui.common.component.UIListItems;
import org.alfresco.web.ui.repo.RepoConstants;
import org.alfresco.web.ui.repo.component.property.PropertySheetItem;
import org.alfresco.web.ui.repo.component.property.UIPropertySheet;
/**
* Generates the image picker component with rounded corners for selecting
* an icon for a space.
*
* @author gavinc
*/
public class SpaceIconPickerGenerator extends BaseComponentGenerator
{
@SuppressWarnings("unchecked")
public UIComponent generate(FacesContext context, String id)
{
// create the outer component
UIComponent component = context.getApplication().
createComponent(RepoConstants.ALFRESCO_FACES_IMAGE_PICKER);
// setup the outer component
component.setRendererType(RepoConstants.ALFRESCO_FACES_RADIO_PANEL_RENDERER);
FacesHelper.setupComponentId(context, component, id);
component.getAttributes().put("columns", new Integer(6));
component.getAttributes().put("spacing", new Integer(4));
component.getAttributes().put("panelBorder", "blue");
component.getAttributes().put("panelBgcolor", "#D3E6FE");
return component;
}
@Override
@SuppressWarnings("unchecked")
protected void setupProperty(FacesContext context,
UIPropertySheet propertySheet, PropertySheetItem item,
PropertyDefinition propertyDef, UIComponent component)
{
// do the standard setup
super.setupProperty(context, propertySheet, item, propertyDef, component);
// if the property sheet is in edit mode we also need to setup the
// list of icons the user can select from
if (propertySheet.inEditMode())
{
// create the list items child component
UIListItems items = (UIListItems)context.getApplication().
createComponent(RepoConstants.ALFRESCO_FACES_LIST_ITEMS);
// setup the value binding for the list of icons, this needs
// to be sensitive to the bean used for the property sheet
// we therefore need to get the value binding expression and
// extract the bean name and then add '.icons' to the end,
// this means any page that uses this component must supply
// a getIcons method that returns a List of UIListItem's
ValueBinding binding = propertySheet.getValueBinding("value");
String expression = binding.getExpressionString();
String beanName = expression.substring(2, expression.indexOf(".")+1);
if (beanName.equals("DialogManager.") || beanName.equals("WizardManager."))
{
// deal with the special dialog and wizard manager beans by
// adding .bean
beanName = beanName + "bean.";
}
String newExpression = "#{" + beanName + "icons}";
ValueBinding vb = context.getApplication().createValueBinding(newExpression);
items.setValueBinding("value", vb);
// add the list items component to the image picker component
component.getChildren().add(items);
}
}
}
| lgpl-3.0 |
gauravpuri/MDP_Repp | src/burlap/behavior/singleagent/planning/deterministic/informed/astar/StaticWeightedAStar.java | 2820 | package burlap.behavior.singleagent.planning.deterministic.informed.astar;
import burlap.oomdp.auxiliary.stateconditiontest.StateConditionTest;
import burlap.behavior.singleagent.planning.deterministic.informed.Heuristic;
import burlap.behavior.singleagent.planning.deterministic.informed.PrioritizedSearchNode;
import burlap.oomdp.statehashing.HashableStateFactory;
import burlap.oomdp.statehashing.HashableState;
import burlap.oomdp.core.Domain;
import burlap.oomdp.singleagent.GroundedAction;
import burlap.oomdp.singleagent.RewardFunction;
/**
* Statically weighted A* [1] implementation. Epsilon is a parameter > 1. The larger the value the more greedy the search. The returned solution
* is guaranteed to be at most \epsilon times the optimal solution cost.
*
* <p/>
* If a terminal function is provided via the setter method defined for OO-MDPs, then the BestFirst search algorithm will not expand any nodes
* that are terminal states, as if there were no actions that could be executed from that state. Note that terminal states
* are not necessarily the same as goal states, since there could be a fail condition from which the agent cannot act, but
* that is not explicitly represented in the transition dynamics.
*
* 1. Pohl, Ira (1970). "First results on the effect of error in heuristic search". Machine Intelligence 5: 219-236.
*
* @author James MacGlashan
*
*/
public class StaticWeightedAStar extends AStar {
/**
* The > 1 epsilon parameter. The larger the value the more greedy.
*/
protected double epsilonP1;
/**
* Initializes the valueFunction. Returned solution will be at most \epsilon times the optimal solution cost.
* @param domain the domain in which to plan
* @param rf the reward function that represents costs as negative reward
* @param gc should evaluate to true for goal states; false otherwise
* @param hashingFactory the state hashing factory to use
* @param heuristic the planning heuristic. Should return non-positive values.
* @param epsilon parameter > 1. The larger the value the more greedy.
*/
public StaticWeightedAStar(Domain domain, RewardFunction rf, StateConditionTest gc, HashableStateFactory hashingFactory, Heuristic heuristic, double epsilon) {
super(domain, rf, gc, hashingFactory, heuristic);
this.epsilonP1 = 1. + epsilon;
}
@Override
public double computeF(PrioritizedSearchNode parentNode, GroundedAction generatingAction, HashableState successorState) {
double cumR = 0.;
double r = 0.;
if(parentNode != null){
double pCumR = cumulatedRewardMap.get(parentNode.s);
r = rf.reward(parentNode.s.s, generatingAction, successorState.s);
cumR = pCumR + r;
}
double H = heuristic.h(successorState.s);
lastComputedCumR = cumR;
double F = cumR + (this.epsilonP1*H);
return F;
}
}
| lgpl-3.0 |
codev777/spring-boot-all | spring-boot-samples/src/test/java/com/lance/AppTest.java | 637 | package com.lance;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| apache-2.0 |
cgpllx/SettingView_Old | SettingViewLibrary/src/com/dtr/settingview/lib/entity/SettingViewItemData.java | 716 | package com.dtr.settingview.lib.entity;
import android.widget.FrameLayout;
public class SettingViewItemData {
private int id;
private FrameLayout itemView;
private SettingData data;
private boolean isClickable = true;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public FrameLayout getItemView() {
return itemView;
}
public void setItemView(FrameLayout itemView) {
this.itemView = itemView;
}
public SettingData getData() {
return data;
}
public void setData(SettingData data) {
this.data = data;
}
public boolean isClickable() {
return isClickable;
}
public void setClickable(boolean isClickable) {
this.isClickable = isClickable;
}
}
| apache-2.0 |
axbing/weex | android/sdk/src/main/java/com/taobao/weex/ui/view/WXBackgroundDrawable.java | 15183 | /**
* Copyright (c) 2015-present, Facebook, Inc. All rights reserved.
* This source code is licensed under the BSD-style license found in the LICENSE file in the root
* directory of this source tree. An additional grant of patent rights can be found in the PATENTS
* file in the same directory.
*/
package com.taobao.weex.ui.view;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.DashPathEffect;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathEffect;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.taobao.weex.dom.flex.CSSConstants;
import com.taobao.weex.dom.flex.FloatUtil;
import com.taobao.weex.dom.flex.Spacing;
import com.taobao.weex.utils.WXLogUtils;
import com.taobao.weex.utils.WXViewUtils;
import java.util.Arrays;
import java.util.Locale;
/**
* A subclass of {@link Drawable} used for background of {@link com.taobao.weex.ui.component.WXComponent}. It supports
* drawing background color and borders (including rounded borders) by providing a react friendly
* API (setter for each of those properties).
*
* The implementation tries to allocate as few objects as possible depending on which properties are
* set. E.g. for views with rounded background/borders we allocate {@code mPathForBorderRadius} and
* {@code mTempRectForBorderRadius}. In case when view have a rectangular borders we allocate
* {@code mBorderWidthResult} and similar. When only background color is set we won't allocate any
* extra/unnecessary objects.
*/
public class WXBackgroundDrawable extends Drawable {
public final static int BORDER_TOP_LEFT_RADIUS = 0;
public final static int BORDER_TOP_RIGHT_RADIUS = 1;
public final static int BORDER_BOTTOM_RIGHT_RADIUS = 2;
public final static int BORDER_BOTTOM_LEFT_RADIUS = 3;
private static final int DEFAULT_BORDER_COLOR = Color.BLACK;
/* Used by all types of background and for drawing borders */
private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
/* Value at Spacing.ALL index used for rounded borders, whole array used by rectangular borders */
private
@Nullable
Spacing mBorderWidth;
private
@Nullable
Spacing mBorderColor;
private
@Nullable
BorderStyle mBorderStyle;
/* Used for rounded border and rounded background */
private
@Nullable
PathEffect mPathEffectForBorderStyle;
private
@Nullable
Path mPathForBorderRadius;
private
@Nullable
Path mPathForBorderRadiusOutline;
private
@Nullable
Path mPathForBorder;
private
@Nullable
RectF mTempRectForBorderRadius;
private
@Nullable
RectF mTempRectForBorderRadiusOutline;
private boolean mNeedUpdatePathForBorderRadius = false;
private float mBorderRadius = CSSConstants.UNDEFINED;
private int mColor = Color.TRANSPARENT;
private int mAlpha = 255;
private
@Nullable
float[] mBorderCornerRadii;
@Override
public void draw(Canvas canvas) {
if ((!CSSConstants.isUndefined(mBorderRadius) && mBorderRadius > 0) || mBorderCornerRadii != null) {
drawRoundedBackgroundWithBorders(canvas);
} else {
drawRectangularBackgroundWithBorders(canvas);
}
}
private void drawRoundedBackgroundWithBorders(Canvas canvas) {
updatePath();
int useColor = WXViewUtils.multiplyColorAlpha(mColor, mAlpha);
if ((useColor >>> 24) != 0) { // color is not transparent
mPaint.setColor(useColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawPath(mPathForBorderRadius, mPaint);
}
// maybe draw borders?
float fullBorderWidth = getFullBorderWidth();
if (fullBorderWidth > 0) {
int borderColor = getFullBorderColor();
mPaint.setColor(WXViewUtils.multiplyColorAlpha(borderColor, mAlpha));
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeWidth(fullBorderWidth);
mPaint.setPathEffect(mPathEffectForBorderStyle);
canvas.drawPath(mPathForBorderRadius, mPaint);
}
}
static Paint debugPaint = new Paint();
static {
debugPaint.setColor(Color.GREEN);
debugPaint.setStyle(Paint.Style.STROKE);
}
private void drawRectangularBackgroundWithBorders(Canvas canvas) {
int useColor = WXViewUtils.multiplyColorAlpha(mColor, mAlpha);
Rect bounds = getBounds();
if ((useColor >>> 24) != 0) { // color is not transparent
mPaint.setColor(useColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawRect(bounds, mPaint);
}
// maybe draw borders?
if (getBorderWidth(Spacing.LEFT) > 0 || getBorderWidth(Spacing.TOP) > 0 ||
getBorderWidth(Spacing.RIGHT) > 0 || getBorderWidth(Spacing.BOTTOM) > 0) {
int borderLeft = getBorderWidth(Spacing.LEFT);
int borderTop = getBorderWidth(Spacing.TOP);
int borderRight = getBorderWidth(Spacing.RIGHT);
int borderBottom = getBorderWidth(Spacing.BOTTOM);
int colorLeft = getBorderColor(Spacing.LEFT);
int colorTop = getBorderColor(Spacing.TOP);
int colorRight = getBorderColor(Spacing.RIGHT);
int colorBottom = getBorderColor(Spacing.BOTTOM);
int width = bounds.width();
int height = bounds.height();
// If the path drawn previously is of the same color,
// there would be a slight white space between borders
// with anti-alias set to true.
// Therefore we need to disable anti-alias, and
// after drawing is done, we will re-enable it.
mPaint.setPathEffect(mPathEffectForBorderStyle);
mPaint.setAntiAlias(false);
if (mPathForBorder == null) {
mPathForBorder = new Path();
}
if (borderLeft > 0 && colorLeft != Color.TRANSPARENT) {
drawBorder(canvas,colorLeft,new float[]{
0, 0,
borderLeft, borderTop,
borderLeft, height - borderBottom,
0, height
},mPathForBorder,mPaint);
}
if (borderTop > 0 && colorTop != Color.TRANSPARENT) {
drawBorder(canvas,colorTop,new float[]{
0, 0,
borderLeft, borderTop,
width - borderRight, borderTop,
width, 0
},mPathForBorder,mPaint);
}
if (borderRight > 0 && colorRight != Color.TRANSPARENT) {
drawBorder(canvas,colorRight,new float[]{
width, 0,
width, height,
width - borderRight, height - borderBottom,
width - borderRight, borderTop
},mPathForBorder,mPaint);
}
if (borderBottom > 0 && colorBottom != Color.TRANSPARENT) {
drawBorder(canvas,colorBottom,new float[]{
0, height,
width, height,
width - borderRight, height - borderBottom,
borderLeft, height - borderBottom
},mPathForBorder,mPaint);
}
// re-enable anti alias
mPaint.setAntiAlias(true);
}
}
/**
* draw one border
* @param canvas
* @param color
* @param pts
* @param path
* @param paint
*/
private void drawBorder(Canvas canvas,int color,float[] pts,Path path,Paint paint){
paint.setColor(color);
path.reset();
path.moveTo(pts[0],pts[1]);
//exclude start point
for(int i=1,len=pts.length/2;i<len;i++){
path.lineTo(pts[i*2],pts[i*2+1]);
}
//back to start point
path.lineTo(pts[0],pts[1]);
canvas.drawPath(path,paint);
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
mNeedUpdatePathForBorderRadius = true;
}
private void updatePath() {
if (!mNeedUpdatePathForBorderRadius) {
return;
}
mNeedUpdatePathForBorderRadius = false;
if (mPathForBorderRadius == null) {
mPathForBorderRadius = new Path();
}
if (mTempRectForBorderRadius == null) {
mTempRectForBorderRadius = new RectF();
}
if (mPathForBorderRadiusOutline == null) {
mPathForBorderRadiusOutline = new Path();
}
if (mTempRectForBorderRadiusOutline == null) {
mTempRectForBorderRadiusOutline = new RectF();
}
mPathForBorderRadius.reset();
mPathForBorderRadiusOutline.reset();
mTempRectForBorderRadius.set(getBounds());
mTempRectForBorderRadiusOutline.set(getBounds());
float fullBorderWidth = getFullBorderWidth();
if (fullBorderWidth > 0) {
mTempRectForBorderRadius.inset(fullBorderWidth * 0.5f, fullBorderWidth * 0.5f);
}
float defaultBorderRadius = !CSSConstants.isUndefined(mBorderRadius) ? mBorderRadius : 0;
float topLeftRadius = mBorderCornerRadii != null && !CSSConstants.isUndefined(mBorderCornerRadii[0]) ? mBorderCornerRadii[0] : defaultBorderRadius;
float topRightRadius = mBorderCornerRadii != null && !CSSConstants.isUndefined(mBorderCornerRadii[1]) ? mBorderCornerRadii[1] : defaultBorderRadius;
float bottomRightRadius = mBorderCornerRadii != null && !CSSConstants.isUndefined(mBorderCornerRadii[2]) ? mBorderCornerRadii[2] : defaultBorderRadius;
float bottomLeftRadius = mBorderCornerRadii != null && !CSSConstants.isUndefined(mBorderCornerRadii[3]) ? mBorderCornerRadii[3] : defaultBorderRadius;
mPathForBorderRadius.addRoundRect(
mTempRectForBorderRadius,
new float[]{
topLeftRadius,
topLeftRadius,
topRightRadius,
topRightRadius,
bottomRightRadius,
bottomRightRadius,
bottomLeftRadius,
bottomLeftRadius
},
Path.Direction.CW);
float extraRadiusForOutline = 0;
if (mBorderWidth != null) {
extraRadiusForOutline = mBorderWidth.get(Spacing.ALL) / 2f;
}
mPathForBorderRadiusOutline.addRoundRect(
mTempRectForBorderRadiusOutline,
new float[]{
topLeftRadius + extraRadiusForOutline,
topLeftRadius + extraRadiusForOutline,
topRightRadius + extraRadiusForOutline,
topRightRadius + extraRadiusForOutline,
bottomRightRadius + extraRadiusForOutline,
bottomRightRadius + extraRadiusForOutline,
bottomLeftRadius + extraRadiusForOutline,
bottomLeftRadius + extraRadiusForOutline
},
Path.Direction.CW);
mPathEffectForBorderStyle = mBorderStyle != null
? mBorderStyle.getPathEffect(getFullBorderWidth())
: null;
} @Override
public void setAlpha(int alpha) {
if (alpha != mAlpha) {
mAlpha = alpha;
invalidateSelf();
}
}
/**
* For rounded borders we use default "borderWidth" property.
*/
private float getFullBorderWidth() {
return (mBorderWidth != null && !CSSConstants.isUndefined(mBorderWidth.getRaw(Spacing.ALL))) ?
mBorderWidth.getRaw(Spacing.ALL) : 0f;
} @Override
public int getAlpha() {
return mAlpha;
}
/**
* We use this method for getting color for rounded borders only similarly as for
* {@link #getFullBorderWidth}.
*/
private int getFullBorderColor() {
return (mBorderColor != null && !CSSConstants.isUndefined(mBorderColor.getRaw(Spacing.ALL))) ?
(int) (long) mBorderColor.getRaw(Spacing.ALL) : DEFAULT_BORDER_COLOR;
} @Override
public void setColorFilter(ColorFilter cf) {
// do nothing
}
private int getBorderWidth(int position) {
return mBorderWidth != null ? Math.round(mBorderWidth.get(position)) : 0;
} @Override
public int getOpacity() {
return WXViewUtils.getOpacityFromColor(WXViewUtils.multiplyColorAlpha(
mColor, mAlpha));
}
private int getBorderColor(int position) {
// Check ReactStylesDiffMap#getColorInt() to see why this is needed
return mBorderColor != null ? (int) (long) mBorderColor.get(position) : DEFAULT_BORDER_COLOR;
} /* Android's elevation implementation requires this to be implemented to know where to draw the shadow. */
@Override
public void getOutline(@NonNull Outline outline) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
super.getOutline(outline);
} else {
if ((!CSSConstants.isUndefined(mBorderRadius) && mBorderRadius > 0) || mBorderCornerRadii != null) {
updatePath();
outline.setConvexPath(mPathForBorderRadiusOutline);
} else {
outline.setRect(getBounds());
}
}
}
public void setBorderWidth(int position, float width) {
if (mBorderWidth == null) {
mBorderWidth = new Spacing();
}
if (!FloatUtil.floatsEqual(mBorderWidth.getRaw(position), width)) {
mBorderWidth.set(position, width);
if (position == Spacing.ALL) {
mNeedUpdatePathForBorderRadius = true;
}
invalidateSelf();
}
}
public void setBorderColor(int position, float color) {
if (mBorderColor == null) {
mBorderColor = new Spacing();
mBorderColor.setDefault(Spacing.LEFT, DEFAULT_BORDER_COLOR);
mBorderColor.setDefault(Spacing.TOP, DEFAULT_BORDER_COLOR);
mBorderColor.setDefault(Spacing.RIGHT, DEFAULT_BORDER_COLOR);
mBorderColor.setDefault(Spacing.BOTTOM, DEFAULT_BORDER_COLOR);
}
if (!FloatUtil.floatsEqual(mBorderColor.getRaw(position), color)) {
mBorderColor.set(position, color);
invalidateSelf();
}
}
public void setBorderStyle(@Nullable String style) {
BorderStyle borderStyle = style == null
? null
: BorderStyle.valueOf(style.toUpperCase(Locale.US));
if (mBorderStyle != borderStyle) {
mBorderStyle = borderStyle;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public void setRadius(float radius) {
if (!FloatUtil.floatsEqual(mBorderRadius, radius)) {
mBorderRadius = radius;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public void setRadius(float radius, int position) {
if (mBorderCornerRadii == null) {
mBorderCornerRadii = new float[4];
Arrays.fill(mBorderCornerRadii, CSSConstants.UNDEFINED);
}
if (!FloatUtil.floatsEqual(mBorderCornerRadii[position], radius)) {
mBorderCornerRadii[position] = radius;
mNeedUpdatePathForBorderRadius = true;
invalidateSelf();
}
}
public int getColor() {
return mColor;
}
public void setColor(int color) {
mColor = color;
invalidateSelf();
}
private enum BorderStyle {
SOLID,
DASHED,
DOTTED;
public
@Nullable
PathEffect getPathEffect(float borderWidth) {
switch (this) {
case SOLID:
return null;
case DASHED:
return new DashPathEffect(
new float[]{borderWidth * 3, borderWidth * 3, borderWidth * 3, borderWidth * 3}, 0);
case DOTTED:
return new DashPathEffect(
new float[]{borderWidth, borderWidth, borderWidth, borderWidth}, 0);
default:
return null;
}
}
}
}
| apache-2.0 |
stewartpark/presto | presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaRecordSetProvider.java | 3639 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.kafka;
import com.facebook.presto.decoder.DispatchingRowDecoderFactory;
import com.facebook.presto.decoder.RowDecoder;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.connector.ConnectorRecordSetProvider;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.kafka.KafkaHandleResolver.convertSplit;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static java.util.Objects.requireNonNull;
/**
* Factory for Kafka specific {@link RecordSet} instances.
*/
public class KafkaRecordSetProvider
implements ConnectorRecordSetProvider
{
private DispatchingRowDecoderFactory decoderFactory;
private final KafkaSimpleConsumerManager consumerManager;
@Inject
public KafkaRecordSetProvider(DispatchingRowDecoderFactory decoderFactory, KafkaSimpleConsumerManager consumerManager)
{
this.decoderFactory = requireNonNull(decoderFactory, "decoderFactory is null");
this.consumerManager = requireNonNull(consumerManager, "consumerManager is null");
}
@Override
public RecordSet getRecordSet(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorSplit split, List<? extends ColumnHandle> columns)
{
KafkaSplit kafkaSplit = convertSplit(split);
List<KafkaColumnHandle> kafkaColumns = columns.stream()
.map(KafkaHandleResolver::convertColumnHandle)
.collect(ImmutableList.toImmutableList());
RowDecoder keyDecoder = decoderFactory.create(
kafkaSplit.getKeyDataFormat(),
getDecoderParameters(kafkaSplit.getKeyDataSchemaContents()),
kafkaColumns.stream()
.filter(col -> !col.isInternal())
.filter(KafkaColumnHandle::isKeyDecoder)
.collect(toImmutableSet()));
RowDecoder messageDecoder = decoderFactory.create(
kafkaSplit.getMessageDataFormat(),
getDecoderParameters(kafkaSplit.getMessageDataSchemaContents()),
kafkaColumns.stream()
.filter(col -> !col.isInternal())
.filter(col -> !col.isKeyDecoder())
.collect(toImmutableSet()));
return new KafkaRecordSet(kafkaSplit, consumerManager, kafkaColumns, keyDecoder, messageDecoder);
}
private Map<String, String> getDecoderParameters(Optional<String> dataSchema)
{
ImmutableMap.Builder<String, String> parameters = ImmutableMap.builder();
dataSchema.ifPresent(schema -> parameters.put("dataSchema", schema));
return parameters.build();
}
}
| apache-2.0 |
luckyyeah/YiDu-Novel | src/main/java/org/yidu/novel/bean/ReplaceKeywordsBean.java | 491 | package org.yidu.novel.bean;
public class ReplaceKeywordsBean {
private String key;
private String value;
private Boolean replaced;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public Boolean getReplaced() {
return replaced;
}
public void setReplaced(Boolean replaced) {
this.replaced = replaced;
}
}
| apache-2.0 |
apereo/cas | support/cas-server-support-trusted-mfa-core/src/main/java/org/apereo/cas/trusted/authentication/MultifactorAuthenticationTrustedDeviceNamingStrategy.java | 1821 | package org.apereo.cas.trusted.authentication;
import org.apereo.cas.authentication.Authentication;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.services.RegisteredService;
import lombok.val;
import org.apereo.inspektr.common.web.ClientInfoHolder;
import javax.servlet.http.HttpServletRequest;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
/**
* This is {@link MultifactorAuthenticationTrustedDeviceNamingStrategy}.
*
* @author Misagh Moayyed
* @since 6.4.0
*/
@FunctionalInterface
public interface MultifactorAuthenticationTrustedDeviceNamingStrategy {
/**
* Generate device name based on request IP and date/time.
*
* @return the multifactor authentication trusted device naming strategy
*/
static MultifactorAuthenticationTrustedDeviceNamingStrategy random() {
return (registeredService, service, request, authentication) -> {
val builder = new StringBuilder();
val clientInfo = ClientInfoHolder.getClientInfo();
if (clientInfo != null) {
builder.append(clientInfo.getClientIpAddress());
builder.append('@');
}
builder.append(LocalDateTime.now(ZoneOffset.UTC));
return builder.toString();
};
}
/**
* Determine device name.
*
* @param registeredService the registered service
* @param service the service
* @param request the request
* @param authentication the authentication
* @return the string
*/
String determineDeviceName(RegisteredService registeredService,
Service service,
HttpServletRequest request,
Authentication authentication);
}
| apache-2.0 |
gcoders/gerrit | gerrit-gwtexpui/src/main/java/com/google/gwtexpui/linker/server/UserAgentRule.java | 2547 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gwtexpui.linker.server;
import static java.util.regex.Pattern.compile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
/**
* Selects the value for the {@code user.agent} property.
* <p>
* Examines the {@code User-Agent} HTTP request header, and tries to match it to
* known {@code user.agent} values.
* <p>
* Ported from JavaScript in {@code com.google.gwt.user.UserAgent.gwt.xml}.
*/
public class UserAgentRule {
private static final Pattern msie = compile(".*msie ([0-9]+)\\.([0-9]+).*");
private static final Pattern gecko = compile(".*rv:([0-9]+)\\.([0-9]+).*");
public String getName() {
return "user.agent";
}
public String select(HttpServletRequest req) {
String ua = req.getHeader("User-Agent");
if (ua == null) {
return null;
}
ua = ua.toLowerCase();
if (ua.contains("opera")) {
return "opera";
} else if (ua.contains("webkit")) {
return "safari";
} else if (ua.contains("msie")) {
// GWT 2.0 uses document.documentMode here, which we can't do
// on the server side.
Matcher m = msie.matcher(ua);
if (m.matches() && m.groupCount() == 2) {
int v = makeVersion(m);
if (v >= 10000) {
return "ie10";
}
if (v >= 9000) {
return "ie9";
}
if (v >= 8000) {
return "ie8";
}
if (v >= 6000) {
return "ie6";
}
}
return null;
} else if (ua.contains("gecko")) {
Matcher m = gecko.matcher(ua);
if (m.matches() && m.groupCount() == 2) {
if (makeVersion(m) >= 1008) {
return "gecko1_8";
}
}
return "gecko";
}
return null;
}
private int makeVersion(Matcher result) {
return (Integer.parseInt(result.group(1)) * 1000)
+ Integer.parseInt(result.group(2));
}
}
| apache-2.0 |
WilliamNouet/nifi | nifi-commons/nifi-data-provenance-utils/src/main/java/org/apache/nifi/provenance/FileBasedKeyProvider.java | 2913 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.provenance;
import java.io.IOException;
import java.security.KeyManagementException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import javax.naming.OperationNotSupportedException;
import org.apache.nifi.properties.NiFiPropertiesLoader;
import org.bouncycastle.util.encoders.Hex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileBasedKeyProvider extends StaticKeyProvider {
private static final Logger logger = LoggerFactory.getLogger(FileBasedKeyProvider.class);
private String filepath;
FileBasedKeyProvider(String location) throws KeyManagementException {
this(location, getMasterKey());
}
FileBasedKeyProvider(String location, SecretKey masterKey) throws KeyManagementException {
super(CryptoUtils.readKeys(location, masterKey));
this.filepath = location;
}
private static SecretKey getMasterKey() throws KeyManagementException {
try {
// Get the master encryption key from bootstrap.conf
String masterKeyHex = NiFiPropertiesLoader.extractKeyFromBootstrapFile();
return new SecretKeySpec(Hex.decode(masterKeyHex), "AES");
} catch (IOException e) {
logger.error("Encountered an error: ", e);
throw new KeyManagementException(e);
}
}
/**
* Adds the key to the provider and associates it with the given ID. Some implementations may not allow this operation.
*
* @param keyId the key identifier
* @param key the key
* @return true if the key was successfully added
* @throws OperationNotSupportedException if this implementation doesn't support adding keys
* @throws KeyManagementException if the key is invalid, the ID conflicts, etc.
*/
@Override
public boolean addKey(String keyId, SecretKey key) throws OperationNotSupportedException, KeyManagementException {
throw new OperationNotSupportedException("This implementation does not allow adding keys. Modify the file backing this provider at " + filepath);
}
}
| apache-2.0 |
baldimir/optaplanner | optaplanner-examples/src/main/java/org/optaplanner/examples/meetingscheduling/domain/Person.java | 1227 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.meetingscheduling.domain;
import org.optaplanner.examples.common.domain.AbstractPersistable;
import org.optaplanner.examples.common.swingui.components.Labeled;
public class Person extends AbstractPersistable implements Labeled {
private String fullName;
public String getFullName() {
return fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
@Override
public String getLabel() {
return fullName;
}
@Override
public String toString() {
return fullName;
}
}
| apache-2.0 |
forGGe/kaa | client/client-multi/client-java-core/src/test/java/org/kaaproject/kaa/client/notification/DefaultNotificationManagerTest.java | 24448 | /*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.client.notification;
import static org.mockito.Mockito.mock;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.kaaproject.kaa.client.KaaClientProperties;
import org.kaaproject.kaa.client.channel.NotificationTransport;
import org.kaaproject.kaa.client.context.ExecutorContext;
import org.kaaproject.kaa.client.persistance.KaaClientPropertiesStateTest;
import org.kaaproject.kaa.client.persistence.FilePersistentStorage;
import org.kaaproject.kaa.client.persistence.KaaClientPropertiesState;
import org.kaaproject.kaa.client.util.CommonsBase64;
import org.kaaproject.kaa.common.avro.AvroByteArrayConverter;
import org.kaaproject.kaa.common.endpoint.gen.Notification;
import org.kaaproject.kaa.common.endpoint.gen.NotificationType;
import org.kaaproject.kaa.common.endpoint.gen.SubscriptionType;
import org.kaaproject.kaa.common.endpoint.gen.Topic;
import org.mockito.Mockito;
public class DefaultNotificationManagerTest {
private static ExecutorContext executorContext;
private static ExecutorService executor;
private static final String workDir = "work_dir" + System.getProperty("file.separator");
private static final Long UNKNOWN_TOPIC_ID = 100500l;
@BeforeClass
public static void beforeSuite(){
executorContext = Mockito.mock(ExecutorContext.class);
executor = Executors.newSingleThreadExecutor();
Mockito.when(executorContext.getApiExecutor()).thenReturn(executor);
Mockito.when(executorContext.getCallbackExecutor()).thenReturn(executor);
}
@AfterClass
public static void afterSuite(){
executor.shutdown();
}
@Test
public void testEmptyTopicList() throws IOException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
for (Topic t : notificationManager.getTopics()) {
System.out.println(t);
}
Assert.assertTrue(notificationManager.getTopics().isEmpty());
}
@Test
public void testTopicsAfterUpdate() throws IOException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(),KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topics = Arrays.asList(new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topics);
Assert.assertTrue(notificationManager.getTopics().size() == topics.size());
}
@Test
public void testTopicPersistence() throws IOException {
KaaClientProperties props = KaaClientPropertiesStateTest.getProperties();
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(),props);
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topics = Arrays.asList(new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topics);
state.persist();
KaaClientPropertiesState newState = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
DefaultNotificationManager newNotificationManager = new DefaultNotificationManager(newState, executorContext, transport);
Assert.assertTrue(newNotificationManager.getTopics().size() == topics.size());
boolean deleted = new File(workDir + props.getProperty("state.file_name")).delete();
Assert.assertTrue(deleted);
}
@Test
public void testTwiceTopicUpdate() throws IOException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
Topic topic1 = new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION);
Topic topic2 = new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION);
Topic topic3 = new Topic(3l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION);
List<Topic> topicUpdates = new LinkedList<>();
topicUpdates.add(topic1);
topicUpdates.add(topic2);
notificationManager.topicsListUpdated(topicUpdates);
topicUpdates.remove(topic2);
topicUpdates.add(topic3);
notificationManager.topicsListUpdated(topicUpdates);
List<Topic> newTopics = notificationManager.getTopics();
Assert.assertTrue(newTopics.size() == topicUpdates.size());
Assert.assertTrue(newTopics.contains(topic1));
Assert.assertTrue(newTopics.contains(topic3));
}
@Test
public void testAddTopicUpdateListener() throws Exception {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
final List<Topic> topicUpdates = new LinkedList<>();
topicUpdates.add(new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
topicUpdates.add(new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
topicUpdates.add(new Topic(3l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.addTopicListListener(new NotificationTopicListListener() {
@Override
public void onListUpdated(List<Topic> list) {
Assert.assertArrayEquals(topicUpdates.toArray(), list.toArray());
topicUpdates.clear();
}
});
notificationManager.topicsListUpdated(topicUpdates);
Thread.sleep(500);
Assert.assertTrue(topicUpdates.isEmpty());
}
@Test
public void testRemoveTopicUpdateListener() throws IOException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
NotificationTopicListListener listener1 = Mockito.mock(NotificationTopicListListener.class);
NotificationTopicListListener listener2 = Mockito.mock(NotificationTopicListListener.class);
notificationManager.addTopicListListener(listener1);
notificationManager.addTopicListListener(listener2);
List<Topic> topicUpdate = Arrays.asList(new Topic());
notificationManager.topicsListUpdated(topicUpdate);
notificationManager.removeTopicListListener(listener2);
notificationManager.topicsListUpdated(topicUpdate);
Mockito.verify(listener1, Mockito.timeout(1000).times(2)).onListUpdated(topicUpdate);
Mockito.verify(listener2, Mockito.timeout(1000).times(1)).onListUpdated(topicUpdate);
}
@Test
public void testGlobalNotificationListeners() throws Exception {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
ByteBuffer notificationBody = ByteBuffer.wrap(new AvroByteArrayConverter<>(org.kaaproject.kaa.schema.base.Notification.class).toByteArray(
new org.kaaproject.kaa.schema.base.Notification()));
notificationManager.topicsListUpdated(topicsUpdate);
List<Notification> notificationUpdate = Arrays.asList(
new Notification(1l, NotificationType.CUSTOM, null, 1, notificationBody),
new Notification(2l, NotificationType.CUSTOM, null, 1, notificationBody));
NotificationListener mandatoryListener = Mockito.mock(NotificationListener.class);
NotificationListener globalListener = Mockito.mock(NotificationListener.class);
notificationManager.addNotificationListener(mandatoryListener);
notificationManager.notificationReceived(notificationUpdate);
Thread.sleep(500);
notificationManager.removeNotificationListener(mandatoryListener);
notificationManager.addNotificationListener(globalListener);
notificationManager.notificationReceived(notificationUpdate);
notificationManager.notificationReceived(notificationUpdate);
Mockito.verify(mandatoryListener, Mockito.timeout(1000).times(notificationUpdate.size()))
.onNotification(Mockito.anyLong(), Mockito.any(org.kaaproject.kaa.schema.base.Notification.class));
Mockito.verify(globalListener, Mockito.timeout(1000).times(notificationUpdate.size() * 2))
.onNotification(Mockito.anyLong(), Mockito.any(org.kaaproject.kaa.schema.base.Notification.class));
}
@Test
public void testNotificationListenerOnTopic() throws Exception{
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
ByteBuffer notificationBody = ByteBuffer.wrap(new AvroByteArrayConverter<>(Topic.class).toByteArray(
new Topic(3l, "name", SubscriptionType.MANDATORY_SUBSCRIPTION)));
notificationManager.topicsListUpdated(topicsUpdate);
List<Notification> notificationUpdate = Arrays.asList(
new Notification(1l, NotificationType.CUSTOM, null, 1, notificationBody),
new Notification(2l, NotificationType.CUSTOM, null, 1, notificationBody));
NotificationListener globalListener = Mockito.mock(NotificationListener.class);
NotificationListener topicListener = Mockito.mock(NotificationListener.class);
notificationManager.addNotificationListener(globalListener);
notificationManager.addNotificationListener(2l, topicListener);
notificationManager.notificationReceived(notificationUpdate);
notificationManager.removeNotificationListener(2l, topicListener);
notificationManager.notificationReceived(notificationUpdate);
Mockito.verify(globalListener, Mockito.timeout(1000).times(notificationUpdate.size() * 2 - 1))
.onNotification(Mockito.anyLong(), Mockito.any(org.kaaproject.kaa.schema.base.Notification.class));
Mockito.verify(topicListener, Mockito.timeout(1000).times(1))
.onNotification(Mockito.anyLong(), Mockito.any(org.kaaproject.kaa.schema.base.Notification.class));
}
@Test(expected=UnavailableTopicException.class)
public void testAddListenerForUnknownTopic() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(),KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
NotificationListener listener = Mockito.mock(NotificationListener.class);
notificationManager.addNotificationListener(UNKNOWN_TOPIC_ID, listener);
}
@Test(expected=UnavailableTopicException.class)
public void testRemoveListenerForUnknownTopic() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
NotificationListener listener = Mockito.mock(NotificationListener.class);
notificationManager.removeNotificationListener(UNKNOWN_TOPIC_ID, listener);
}
@Test(expected=UnavailableTopicException.class)
public void testSubscribeOnUnknownTopic1() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.subscribeToTopic(UNKNOWN_TOPIC_ID, true);
}
@Test(expected=UnavailableTopicException.class)
public void testSubscribeOnUnknownTopic2() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.subscribeToTopics(Arrays.asList(1l, 2l, UNKNOWN_TOPIC_ID), true);
}
@Test(expected=UnavailableTopicException.class)
public void testUnsubscribeFromUnknownTopic1() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.unsubscribeFromTopic(UNKNOWN_TOPIC_ID, true);
}
@Test(expected=UnavailableTopicException.class)
public void testUnsubscribeFromUnknownTopic2() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.unsubscribeFromTopics(Arrays.asList(1l, 2l, UNKNOWN_TOPIC_ID), true);
}
@Test(expected=UnavailableTopicException.class)
public void testSubscribeOnMandatoryTopic1() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.subscribeToTopic(2l, true);
}
@Test(expected=UnavailableTopicException.class)
public void testSubscribeOnMandatoryTopic2() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.subscribeToTopics(Arrays.asList(1l, 2l), true);
}
@Test(expected=UnavailableTopicException.class)
public void testUnsubscribeFromMandatoryTopic1() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.unsubscribeFromTopic(2l, true);
}
@Test(expected=UnavailableTopicException.class)
public void testUnsubscribeFromMandatoryTopic2() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.MANDATORY_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.unsubscribeFromTopics(Arrays.asList(1l, 2l), true);
}
@Test
public void testSuccessSubscriptionToTopic() throws IOException, UnavailableTopicException {
KaaClientPropertiesState state = new KaaClientPropertiesState(
new FilePersistentStorage(), CommonsBase64.getInstance(), KaaClientPropertiesStateTest.getProperties());
NotificationTransport transport = mock(NotificationTransport.class);
DefaultNotificationManager notificationManager = new DefaultNotificationManager(state, executorContext, transport);
List<Topic> topicsUpdate = Arrays.asList(
new Topic(1l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(2l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION)
, new Topic(3l, "topic_name1", SubscriptionType.OPTIONAL_SUBSCRIPTION));
notificationManager.topicsListUpdated(topicsUpdate);
notificationManager.subscribeToTopic(1l, true);
Mockito.verify(transport, Mockito.times(1)).sync();
notificationManager.subscribeToTopics(Arrays.asList(1l, 2l), false);
notificationManager.unsubscribeFromTopic(1l, false);
Mockito.verify(transport, Mockito.times(1)).sync();
notificationManager.sync();
Mockito.verify(transport, Mockito.times(2)).sync();
notificationManager.unsubscribeFromTopics(Arrays.asList(1l, 2l), true);
Mockito.verify(transport, Mockito.times(3)).sync();
}
}
| apache-2.0 |
jinglining/flink | flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/WebFrontendITCase.java | 15038 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.webmonitor;
import org.apache.flink.api.common.JobID;
import org.apache.flink.client.ClientUtils;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.configuration.WebOptions;
import org.apache.flink.runtime.client.JobStatusMessage;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.runtime.webmonitor.testutils.HttpTestClient;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.test.util.TestBaseUtils;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import java.io.File;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.time.Duration;
import java.time.LocalTime;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests for the WebFrontend.
*/
public class WebFrontendITCase extends TestLogger {
private static final int NUM_TASK_MANAGERS = 2;
private static final int NUM_SLOTS = 4;
private static final Configuration CLUSTER_CONFIGURATION = getClusterConfiguration();
@ClassRule
public static final MiniClusterWithClientResource CLUSTER = new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(CLUSTER_CONFIGURATION)
.setNumberTaskManagers(NUM_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(NUM_SLOTS)
.build());
private static Configuration getClusterConfiguration() {
Configuration config = new Configuration();
try {
File logDir = File.createTempFile("TestBaseUtils-logdir", null);
assertTrue("Unable to delete temp file", logDir.delete());
assertTrue("Unable to create temp directory", logDir.mkdir());
File logFile = new File(logDir, "jobmanager.log");
File outFile = new File(logDir, "jobmanager.out");
Files.createFile(logFile.toPath());
Files.createFile(outFile.toPath());
config.setString(WebOptions.LOG_PATH, logFile.getAbsolutePath());
config.setString(ConfigConstants.TASK_MANAGER_LOG_PATH_KEY, logFile.getAbsolutePath());
} catch (Exception e) {
throw new AssertionError("Could not setup test.", e);
}
// !!DO NOT REMOVE!! next line is required for tests
config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("12m"));
return config;
}
@After
public void tearDown() {
BlockingInvokable.reset();
}
@Test
public void getFrontPage() throws Exception {
String fromHTTP = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/index.html");
assertThat(fromHTTP, containsString("Apache Flink Web Dashboard"));
}
private int getRestPort() {
return CLUSTER.getRestAddres().getPort();
}
@Test
public void testResponseHeaders() throws Exception {
// check headers for successful json response
URL taskManagersUrl = new URL("http://localhost:" + getRestPort() + "/taskmanagers");
HttpURLConnection taskManagerConnection = (HttpURLConnection) taskManagersUrl.openConnection();
taskManagerConnection.setConnectTimeout(100000);
taskManagerConnection.connect();
if (taskManagerConnection.getResponseCode() >= 400) {
// error!
InputStream is = taskManagerConnection.getErrorStream();
String errorMessage = IOUtils.toString(is, ConfigConstants.DEFAULT_CHARSET);
fail(errorMessage);
}
// we don't set the content-encoding header
Assert.assertNull(taskManagerConnection.getContentEncoding());
Assert.assertEquals("application/json; charset=UTF-8", taskManagerConnection.getContentType());
// check headers in case of an error
URL notFoundJobUrl = new URL("http://localhost:" + getRestPort() + "/jobs/dontexist");
HttpURLConnection notFoundJobConnection = (HttpURLConnection) notFoundJobUrl.openConnection();
notFoundJobConnection.setConnectTimeout(100000);
notFoundJobConnection.connect();
if (notFoundJobConnection.getResponseCode() >= 400) {
// we don't set the content-encoding header
Assert.assertNull(notFoundJobConnection.getContentEncoding());
Assert.assertEquals("application/json; charset=UTF-8", notFoundJobConnection.getContentType());
} else {
fail("Request for non-existing job did not return an error.");
}
}
@Test
public void getNumberOfTaskManagers() throws Exception {
String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");
ObjectMapper mapper = new ObjectMapper();
JsonNode response = mapper.readTree(json);
ArrayNode taskManagers = (ArrayNode) response.get("taskmanagers");
assertNotNull(taskManagers);
assertEquals(NUM_TASK_MANAGERS, taskManagers.size());
}
@Test
public void getTaskManagers() throws Exception {
String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");
ObjectMapper mapper = new ObjectMapper();
JsonNode parsed = mapper.readTree(json);
ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");
assertNotNull(taskManagers);
assertEquals(NUM_TASK_MANAGERS, taskManagers.size());
JsonNode taskManager = taskManagers.get(0);
assertNotNull(taskManager);
assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt());
assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS);
}
@Test
public void getLogAndStdoutFiles() throws Exception {
WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);
FileUtils.writeStringToFile(logFiles.logFile, "job manager log");
String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/jobmanager/log");
assertThat(logs, containsString("job manager log"));
FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out");
logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/jobmanager/stdout");
assertThat(logs, containsString("job manager out"));
}
@Test
public void getCustomLogFiles() throws Exception {
WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);
String customFileName = "test.log";
final String logDir = logFiles.logFile.getParent();
final String expectedLogContent = "job manager custom log";
FileUtils.writeStringToFile(new File(logDir, customFileName), expectedLogContent);
String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/jobmanager/logs/" + customFileName);
assertThat(logs, containsString(expectedLogContent));
}
@Test
public void getTaskManagerLogAndStdoutFiles() throws Exception {
String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");
ObjectMapper mapper = new ObjectMapper();
JsonNode parsed = mapper.readTree(json);
ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");
JsonNode taskManager = taskManagers.get(0);
String id = taskManager.get("id").asText();
WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);
//we check for job manager log files, since no separate taskmanager logs exist
FileUtils.writeStringToFile(logFiles.logFile, "job manager log");
String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log");
assertThat(logs, containsString("job manager log"));
FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out");
logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout");
assertThat(logs, containsString("job manager out"));
}
@Test
public void getConfiguration() throws Exception {
String config = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/jobmanager/config");
Map<String, String> conf = WebMonitorUtils.fromKeyValueJsonArray(config);
MemorySize expected = CLUSTER_CONFIGURATION.get(TaskManagerOptions.MANAGED_MEMORY_SIZE);
MemorySize actual = MemorySize.parse(conf.get(TaskManagerOptions.MANAGED_MEMORY_SIZE.key()));
assertEquals(expected, actual);
}
@Test
public void testCancel() throws Exception {
// this only works if there is no active job at this point
assertTrue(getRunningJobs(CLUSTER.getClusterClient()).isEmpty());
// Create a task
final JobVertex sender = new JobVertex("Sender");
sender.setParallelism(2);
sender.setInvokableClass(BlockingInvokable.class);
final JobGraph jobGraph = new JobGraph("Stoppable streaming test job", sender);
final JobID jid = jobGraph.getJobID();
ClusterClient<?> clusterClient = CLUSTER.getClusterClient();
ClientUtils.submitJob(clusterClient, jobGraph);
// wait for job to show up
while (getRunningJobs(CLUSTER.getClusterClient()).isEmpty()) {
Thread.sleep(10);
}
// wait for tasks to be properly running
BlockingInvokable.latch.await();
final Duration testTimeout = Duration.ofMinutes(2);
final LocalTime deadline = LocalTime.now().plus(testTimeout);
try (HttpTestClient client = new HttpTestClient("localhost", getRestPort())) {
// cancel the job
client.sendPatchRequest("/jobs/" + jid + "/", getTimeLeft(deadline));
HttpTestClient.SimpleHttpResponse response = client.getNextResponse(getTimeLeft(deadline));
assertEquals(HttpResponseStatus.ACCEPTED, response.getStatus());
assertEquals("application/json; charset=UTF-8", response.getType());
assertEquals("{}", response.getContent());
}
// wait for cancellation to finish
while (!getRunningJobs(CLUSTER.getClusterClient()).isEmpty()) {
Thread.sleep(20);
}
// ensure we can access job details when its finished (FLINK-4011)
try (HttpTestClient client = new HttpTestClient("localhost", getRestPort())) {
Duration timeout = Duration.ofSeconds(30);
client.sendGetRequest("/jobs/" + jid + "/config", timeout);
HttpTestClient.SimpleHttpResponse response = client.getNextResponse(timeout);
assertEquals(HttpResponseStatus.OK, response.getStatus());
assertEquals("application/json; charset=UTF-8", response.getType());
assertEquals("{\"jid\":\"" + jid + "\",\"name\":\"Stoppable streaming test job\"," +
"\"execution-config\":{\"execution-mode\":\"PIPELINED\",\"restart-strategy\":\"Cluster level default restart strategy\"," +
"\"job-parallelism\":1,\"object-reuse-mode\":false,\"user-config\":{}}}", response.getContent());
}
BlockingInvokable.reset();
}
@Test
public void testCancelYarn() throws Exception {
// this only works if there is no active job at this point
assertTrue(getRunningJobs(CLUSTER.getClusterClient()).isEmpty());
// Create a task
final JobVertex sender = new JobVertex("Sender");
sender.setParallelism(2);
sender.setInvokableClass(BlockingInvokable.class);
final JobGraph jobGraph = new JobGraph("Stoppable streaming test job", sender);
final JobID jid = jobGraph.getJobID();
ClusterClient<?> clusterClient = CLUSTER.getClusterClient();
ClientUtils.submitJob(clusterClient, jobGraph);
// wait for job to show up
while (getRunningJobs(CLUSTER.getClusterClient()).isEmpty()) {
Thread.sleep(10);
}
// wait for tasks to be properly running
BlockingInvokable.latch.await();
final Duration testTimeout = Duration.ofMinutes(2);
final LocalTime deadline = LocalTime.now().plus(testTimeout);
try (HttpTestClient client = new HttpTestClient("localhost", getRestPort())) {
// Request the file from the web server
client.sendGetRequest("/jobs/" + jid + "/yarn-cancel", getTimeLeft(deadline));
HttpTestClient.SimpleHttpResponse response = client.getNextResponse(getTimeLeft(deadline));
assertEquals(HttpResponseStatus.ACCEPTED, response.getStatus());
assertEquals("application/json; charset=UTF-8", response.getType());
assertEquals("{}", response.getContent());
}
// wait for cancellation to finish
while (!getRunningJobs(CLUSTER.getClusterClient()).isEmpty()) {
Thread.sleep(20);
}
BlockingInvokable.reset();
}
private static List<JobID> getRunningJobs(ClusterClient<?> client) throws Exception {
Collection<JobStatusMessage> statusMessages = client.listJobs().get();
return statusMessages.stream()
.filter(status -> !status.getJobState().isGloballyTerminalState())
.map(JobStatusMessage::getJobId)
.collect(Collectors.toList());
}
private static Duration getTimeLeft(LocalTime deadline) {
return Duration.between(LocalTime.now(), deadline);
}
/**
* Test invokable that allows waiting for all subtasks to be running.
*/
public static class BlockingInvokable extends AbstractInvokable {
private static CountDownLatch latch = new CountDownLatch(2);
private volatile boolean isRunning = true;
public BlockingInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
latch.countDown();
while (isRunning) {
Thread.sleep(100);
}
}
@Override
public void cancel() {
this.isRunning = false;
}
public static void reset() {
latch = new CountDownLatch(2);
}
}
}
| apache-2.0 |
sesuncedu/elk-reasoner | elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/indexing/conversion/ElkUnexpectedIndexingException.java | 1505 | package org.semanticweb.elk.reasoner.indexing.conversion;
import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObject;
/*
* #%L
* ELK Reasoner
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2013 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* An exception to signal incorrect indexing behavior.
*
* @author "Yevgeny Kazakov"
*
*/
public class ElkUnexpectedIndexingException extends ElkIndexingException {
private static final long serialVersionUID = -6297215279078361253L;
protected ElkUnexpectedIndexingException() {
}
public ElkUnexpectedIndexingException(String message) {
super(message);
}
public ElkUnexpectedIndexingException(IndexedObject object) {
super("Error indexing " + object);
}
public ElkUnexpectedIndexingException(String message, Throwable cause) {
super(message, cause);
}
public ElkUnexpectedIndexingException(Throwable cause) {
super(cause);
}
}
| apache-2.0 |
RanjithKumar5550/RanMifos | fineract-provider/src/main/java/org/apache/fineract/accounting/glaccount/handler/UpdateGLAccountCommandHandler.java | 1997 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.accounting.glaccount.handler;
import org.apache.fineract.accounting.glaccount.service.GLAccountWritePlatformService;
import org.apache.fineract.commands.annotation.CommandType;
import org.apache.fineract.commands.handler.NewCommandSourceHandler;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.CommandProcessingResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@CommandType(entity = "GLACCOUNT", action = "UPDATE")
public class UpdateGLAccountCommandHandler implements NewCommandSourceHandler {
private final GLAccountWritePlatformService writePlatformService;
@Autowired
public UpdateGLAccountCommandHandler(final GLAccountWritePlatformService writePlatformService) {
this.writePlatformService = writePlatformService;
}
@Transactional
@Override
public CommandProcessingResult processCommand(final JsonCommand command) {
return this.writePlatformService.updateGLAccount(command.entityId(), command);
}
} | apache-2.0 |
ksfzhaohui/spring-loaded | testdata/src/main/java/proxy/three/TestInvocationHandlerA1.java | 820 | package proxy.three;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
public class TestInvocationHandlerA1 implements InvocationHandler {
// Object obj;
public TestInvocationHandlerA1() {
// this.obj = obj;
}
public Object invoke(Object proxy, Method m, Object[] args) throws Throwable {
System.out.println("TestInvocationHandler1.invoke() for " + m.getName());
return null;
// return m.invoke(obj, args);
// try {
// System.out.println("before");
// } catch (Exception e) {
// e.printStackTrace();
// }
// return null;
}
static public Object newInstance(Class<?>... interfaces) {
return Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), interfaces, new TestInvocationHandlerA1());
}
}
| apache-2.0 |
rameshvoltella/AndroidViewUtils | library/src/main/java/com/sage42/android/view/fonts/MyToggleButton.java | 1996 | package com.sage42.android.view.fonts;
import android.content.Context;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.widget.ToggleButton;
/**
* Copyright (C) 2013- Sage 42 App Sdn Bhd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Corey Scott (corey.scott@sage42.com)
*
*/
public class MyToggleButton extends ToggleButton
{
public MyToggleButton(final Context context, final AttributeSet attrs, final int defStyle)
{
super(context, attrs, defStyle);
this.initCustomFonts(context, attrs);
}
public MyToggleButton(final Context context, final AttributeSet attrs)
{
super(context, attrs);
this.initCustomFonts(context, attrs);
}
public MyToggleButton(final Context context)
{
super(context);
this.initCustomFonts(context, null);
}
/**
* Extract any custom font related settings from supplied args.
*
* @param context
* @param attrs
*/
private void initCustomFonts(final Context context, final AttributeSet attrs)
{
if (this.isInEditMode())
{
// this is preview mode so we need to stop processing
return;
}
// Fonts work as a combination of particular family and the style.
final Typeface typeface = FontManager.extractTypeface(context, attrs);
if (typeface != null)
{
this.setTypeface(typeface);
}
}
}
| apache-2.0 |
maheshika/product-ss | modules/samples/css/CLI/css-ks-keycounter/src/main/java/org/wso2/carbon/cassandra/sample/keycounter/KSUtils.java | 1873 | /*
* Copyright (c) 2005-2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.cassandra.sample.keycounter;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.service.KeyIterator;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.query.RangeSlicesQuery;
import java.util.Iterator;
public class KSUtils {
private static Cluster cluster;
private static StringSerializer stringSerializer = StringSerializer.get();
public static void getKeyCount(Keyspace keyspace, String columnFamilyName){
RangeSlicesQuery<String, String, String> rangeSlicesQuery =
HFactory.createRangeSlicesQuery(keyspace, stringSerializer, stringSerializer, stringSerializer);
KeyIterator<String> keyIterator = new KeyIterator<String>(keyspace,columnFamilyName,stringSerializer);
long keyCount = 0;
for(Iterator<String> iterator = keyIterator.iterator(); keyIterator.iterator().hasNext();){
System.out.println(iterator.next());
keyCount ++;
}
System.out.println( "Key count: " + keyCount);
}
}
| apache-2.0 |
sijie/bookkeeper | stream/distributedlog/core/src/test/java/org/apache/distributedlog/zk/TestZKTransaction.java | 3522 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.zk;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import java.util.concurrent.CountDownLatch;
import javax.annotation.Nullable;
import org.apache.distributedlog.ZooKeeperClient;
import org.apache.distributedlog.exceptions.DLIllegalStateException;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.Op;
import org.apache.zookeeper.OpResult;
import org.junit.Test;
/**
* Test Case for zookeeper transaction.
*/
public class TestZKTransaction {
static class CountDownZKOp extends ZKOp {
final CountDownLatch commitLatch;
final CountDownLatch abortLatch;
CountDownZKOp(CountDownLatch commitLatch,
CountDownLatch abortLatch) {
super(mock(Op.class));
this.commitLatch = commitLatch;
this.abortLatch = abortLatch;
}
@Override
protected void commitOpResult(OpResult opResult) {
this.commitLatch.countDown();
}
@Override
protected void abortOpResult(Throwable t, @Nullable OpResult opResult) {
this.abortLatch.countDown();
}
}
@Test(timeout = 60000)
public void testProcessNullResults() throws Exception {
ZooKeeperClient zkc = mock(ZooKeeperClient.class);
ZKTransaction transaction = new ZKTransaction(zkc);
int numOps = 3;
final CountDownLatch commitLatch = new CountDownLatch(numOps);
final CountDownLatch abortLatch = new CountDownLatch(numOps);
for (int i = 0; i < numOps; i++) {
transaction.addOp(new CountDownZKOp(commitLatch, abortLatch));
}
transaction.processResult(
KeeperException.Code.CONNECTIONLOSS.intValue(),
"test-path",
null,
null);
abortLatch.await();
assertEquals(0, abortLatch.getCount());
assertEquals(numOps, commitLatch.getCount());
}
@Test(timeout = 60000)
public void testAbortTransaction() throws Exception {
ZooKeeperClient zkc = mock(ZooKeeperClient.class);
ZKTransaction transaction = new ZKTransaction(zkc);
int numOps = 3;
final CountDownLatch commitLatch = new CountDownLatch(numOps);
final CountDownLatch abortLatch = new CountDownLatch(numOps);
for (int i = 0; i < numOps; i++) {
transaction.addOp(new CountDownZKOp(commitLatch, abortLatch));
}
transaction.abort(new DLIllegalStateException("Illegal State"));
abortLatch.await();
assertEquals(0, abortLatch.getCount());
assertEquals(numOps, commitLatch.getCount());
}
}
| apache-2.0 |
rorogarcete/jbpm-console-ng | jbpm-console-ng-executor-service/jbpm-console-ng-executor-service-client/src/main/java/org/jbpm/console/ng/es/client/resources/ShowcaseResources.java | 1293 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.console.ng.es.client.resources;
import com.google.gwt.core.client.GWT;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.resources.client.ImageResource.ImageOptions;
import com.google.gwt.resources.client.ImageResource.RepeatStyle;
public interface ShowcaseResources extends ClientBundle {
ShowcaseResources INSTANCE = GWT.create(ShowcaseResources.class);
@ImageOptions(repeatStyle = RepeatStyle.Horizontal)
@Source("images/jbossrulesBlue.gif")
ImageResource jbossrulesBlue();
@Source("css/Showcase.css")
ShowcaseCss showcaseCss();
ShowcaseImages showcaseImages();
}
| apache-2.0 |
meggermo/jackrabbit-oak | oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/osgi/SolrQueryIndexProviderService.java | 4484 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.solr.osgi;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.ReferencePolicy;
import org.apache.felix.scr.annotations.ReferencePolicyOption;
import org.apache.jackrabbit.oak.commons.PropertiesUtil;
import org.apache.jackrabbit.oak.plugins.index.aggregate.AggregateIndexProvider;
import org.apache.jackrabbit.oak.plugins.index.aggregate.NodeAggregator;
import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfigurationProvider;
import org.apache.jackrabbit.oak.plugins.index.solr.query.SolrQueryIndexProvider;
import org.apache.jackrabbit.oak.plugins.index.solr.server.SolrServerProvider;
import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Osgi Service that provides Solr based {@link org.apache.jackrabbit.oak.spi.query.QueryIndex}es
*
* @see org.apache.jackrabbit.oak.plugins.index.solr.query.SolrQueryIndexProvider
* @see QueryIndexProvider
*/
@Component(metatype = true, immediate = true, label = "Apache Jackrabbit Oak Solr Query index provider configuration")
public class SolrQueryIndexProviderService {
private static final boolean QUERY_TIME_AGGREGATION_DEFAULT = false;
private final Logger log = LoggerFactory.getLogger(getClass());
private final List<ServiceRegistration> regs = Lists.newArrayList();
@Reference
private SolrServerProvider solrServerProvider;
@Reference
private OakSolrConfigurationProvider oakSolrConfigurationProvider;
@Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY,
policyOption = ReferencePolicyOption.GREEDY,
policy = ReferencePolicy.DYNAMIC
)
private volatile NodeAggregator nodeAggregator;
@Property(boolValue = QUERY_TIME_AGGREGATION_DEFAULT, label = "query time aggregation",
description = "enable query time aggregation for Solr index")
private static final String QUERY_TIME_AGGREGATION = "query.aggregation";
@SuppressWarnings("UnusedDeclaration")
@Activate
protected void activate(ComponentContext componentContext) {
Object value = componentContext.getProperties().get(QUERY_TIME_AGGREGATION);
boolean queryTimeAggregation = PropertiesUtil.toBoolean(value, QUERY_TIME_AGGREGATION_DEFAULT);
if (solrServerProvider != null && oakSolrConfigurationProvider != null) {
QueryIndexProvider solrQueryIndexProvider = new SolrQueryIndexProvider(solrServerProvider,
oakSolrConfigurationProvider, nodeAggregator);
log.debug("creating Solr query index provider {} query time aggregation", queryTimeAggregation ? "with" : "without");
if (queryTimeAggregation) {
solrQueryIndexProvider = AggregateIndexProvider.wrap(solrQueryIndexProvider);
}
regs.add(componentContext.getBundleContext().registerService(QueryIndexProvider.class.getName(), solrQueryIndexProvider, null));
}
}
@SuppressWarnings("UnusedDeclaration")
@Deactivate
protected void deactivate() {
for (ServiceRegistration registration : regs) {
registration.unregister();
}
}
}
| apache-2.0 |
sanlingdd/broadleaf | core/broadleaf-framework/src/main/java/org/broadleafcommerce/core/search/domain/CategoryExcludedSearchFacetImpl.java | 3587 | /*
* #%L
* BroadleafCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.core.search.domain;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransform;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformMember;
import org.broadleafcommerce.common.extensibility.jpa.copy.DirectCopyTransformTypes;
import org.broadleafcommerce.core.catalog.domain.Category;
import org.broadleafcommerce.core.catalog.domain.CategoryImpl;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_CAT_SEARCH_FACET_EXCL_XREF")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blStandardElements")
@DirectCopyTransform({
@DirectCopyTransformMember(templateTokens = DirectCopyTransformTypes.SANDBOX, skipOverlaps = true),
@DirectCopyTransformMember(templateTokens = DirectCopyTransformTypes.MULTITENANT_CATALOG)
})
public class CategoryExcludedSearchFacetImpl implements CategoryExcludedSearchFacet, Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "CategoryExcludedSearchFacetId")
@GenericGenerator(
name="CategoryExcludedSearchFacetId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CategoryExcludedSearchFacetImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.search.domain.CategoryExcludedSearchFacetImpl")
}
)
@Column(name = "CAT_EXCL_SEARCH_FACET_ID")
protected Long id;
@ManyToOne(targetEntity = CategoryImpl.class)
@JoinColumn(name = "CATEGORY_ID")
protected Category category;
@ManyToOne(targetEntity = SearchFacetImpl.class)
@JoinColumn(name = "SEARCH_FACET_ID")
protected SearchFacet searchFacet;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Category getCategory() {
return category;
}
@Override
public void setCategory(Category category) {
this.category = category;
}
@Override
public SearchFacet getSearchFacet() {
return searchFacet;
}
@Override
public void setSearchFacet(SearchFacet searchFacet) {
this.searchFacet = searchFacet;
}
}
| apache-2.0 |
PlayFab/JavaSDK | PlayFabServerSDK/src/main/java/com/playfab/PlayFabGroupsAPI.java | 74401 | package com.playfab;
import com.playfab.internal.*;
import com.playfab.PlayFabGroupsModels.*;
import com.playfab.PlayFabErrors.*;
import com.playfab.PlayFabSettings;
import java.util.concurrent.*;
import java.util.*;
import com.google.gson.*;
import com.google.gson.reflect.*;
/**
* The Groups API is designed for any permanent or semi-permanent collections of Entities (players, or non-players). If you
* want to make Guilds/Clans/Corporations/etc., then you should use groups. Groups can also be used to make chatrooms,
* parties, or any other persistent collection of entities.
*/
public class PlayFabGroupsAPI {
private static Gson gson = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").create();
/**
* Accepts an outstanding invitation to to join a group
* @param request AcceptGroupApplicationRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> AcceptGroupApplicationAsync(final AcceptGroupApplicationRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAcceptGroupApplicationAsync(request);
}
});
}
/**
* Accepts an outstanding invitation to to join a group
* @param request AcceptGroupApplicationRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> AcceptGroupApplication(final AcceptGroupApplicationRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAcceptGroupApplicationAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Accepts an outstanding invitation to to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateAcceptGroupApplicationAsync(final AcceptGroupApplicationRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/AcceptGroupApplication"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Accepts an invitation to join a group
* @param request AcceptGroupInvitationRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> AcceptGroupInvitationAsync(final AcceptGroupInvitationRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAcceptGroupInvitationAsync(request);
}
});
}
/**
* Accepts an invitation to join a group
* @param request AcceptGroupInvitationRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> AcceptGroupInvitation(final AcceptGroupInvitationRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAcceptGroupInvitationAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Accepts an invitation to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateAcceptGroupInvitationAsync(final AcceptGroupInvitationRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/AcceptGroupInvitation"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Adds members to a group or role.
* @param request AddMembersRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> AddMembersAsync(final AddMembersRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAddMembersAsync(request);
}
});
}
/**
* Adds members to a group or role.
* @param request AddMembersRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> AddMembers(final AddMembersRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateAddMembersAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Adds members to a group or role. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateAddMembersAsync(final AddMembersRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/AddMembers"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Applies to join a group
* @param request ApplyToGroupRequest
* @return Async Task will return ApplyToGroupResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ApplyToGroupResponse>> ApplyToGroupAsync(final ApplyToGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<ApplyToGroupResponse>>() {
public PlayFabResult<ApplyToGroupResponse> call() throws Exception {
return privateApplyToGroupAsync(request);
}
});
}
/**
* Applies to join a group
* @param request ApplyToGroupRequest
* @return ApplyToGroupResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ApplyToGroupResponse> ApplyToGroup(final ApplyToGroupRequest request) {
FutureTask<PlayFabResult<ApplyToGroupResponse>> task = new FutureTask(new Callable<PlayFabResult<ApplyToGroupResponse>>() {
public PlayFabResult<ApplyToGroupResponse> call() throws Exception {
return privateApplyToGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ApplyToGroupResponse> exceptionResult = new PlayFabResult<ApplyToGroupResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Applies to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<ApplyToGroupResponse> privateApplyToGroupAsync(final ApplyToGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ApplyToGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ApplyToGroupResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ApplyToGroupResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ApplyToGroupResponse>>(){}.getType());
ApplyToGroupResponse result = resultData.data;
PlayFabResult<ApplyToGroupResponse> pfResult = new PlayFabResult<ApplyToGroupResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Blocks a list of entities from joining a group.
* @param request BlockEntityRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> BlockEntityAsync(final BlockEntityRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateBlockEntityAsync(request);
}
});
}
/**
* Blocks a list of entities from joining a group.
* @param request BlockEntityRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> BlockEntity(final BlockEntityRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateBlockEntityAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Blocks a list of entities from joining a group. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateBlockEntityAsync(final BlockEntityRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/BlockEntity"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Changes the role membership of a list of entities from one role to another.
* @param request ChangeMemberRoleRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> ChangeMemberRoleAsync(final ChangeMemberRoleRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateChangeMemberRoleAsync(request);
}
});
}
/**
* Changes the role membership of a list of entities from one role to another.
* @param request ChangeMemberRoleRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> ChangeMemberRole(final ChangeMemberRoleRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateChangeMemberRoleAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Changes the role membership of a list of entities from one role to another. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateChangeMemberRoleAsync(final ChangeMemberRoleRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ChangeMemberRole"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Creates a new group.
* @param request CreateGroupRequest
* @return Async Task will return CreateGroupResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<CreateGroupResponse>> CreateGroupAsync(final CreateGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<CreateGroupResponse>>() {
public PlayFabResult<CreateGroupResponse> call() throws Exception {
return privateCreateGroupAsync(request);
}
});
}
/**
* Creates a new group.
* @param request CreateGroupRequest
* @return CreateGroupResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<CreateGroupResponse> CreateGroup(final CreateGroupRequest request) {
FutureTask<PlayFabResult<CreateGroupResponse>> task = new FutureTask(new Callable<PlayFabResult<CreateGroupResponse>>() {
public PlayFabResult<CreateGroupResponse> call() throws Exception {
return privateCreateGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<CreateGroupResponse> exceptionResult = new PlayFabResult<CreateGroupResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Creates a new group. */
@SuppressWarnings("unchecked")
private static PlayFabResult<CreateGroupResponse> privateCreateGroupAsync(final CreateGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/CreateGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<CreateGroupResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<CreateGroupResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<CreateGroupResponse>>(){}.getType());
CreateGroupResponse result = resultData.data;
PlayFabResult<CreateGroupResponse> pfResult = new PlayFabResult<CreateGroupResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Creates a new group role.
* @param request CreateGroupRoleRequest
* @return Async Task will return CreateGroupRoleResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<CreateGroupRoleResponse>> CreateRoleAsync(final CreateGroupRoleRequest request) {
return new FutureTask(new Callable<PlayFabResult<CreateGroupRoleResponse>>() {
public PlayFabResult<CreateGroupRoleResponse> call() throws Exception {
return privateCreateRoleAsync(request);
}
});
}
/**
* Creates a new group role.
* @param request CreateGroupRoleRequest
* @return CreateGroupRoleResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<CreateGroupRoleResponse> CreateRole(final CreateGroupRoleRequest request) {
FutureTask<PlayFabResult<CreateGroupRoleResponse>> task = new FutureTask(new Callable<PlayFabResult<CreateGroupRoleResponse>>() {
public PlayFabResult<CreateGroupRoleResponse> call() throws Exception {
return privateCreateRoleAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<CreateGroupRoleResponse> exceptionResult = new PlayFabResult<CreateGroupRoleResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Creates a new group role. */
@SuppressWarnings("unchecked")
private static PlayFabResult<CreateGroupRoleResponse> privateCreateRoleAsync(final CreateGroupRoleRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/CreateRole"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<CreateGroupRoleResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<CreateGroupRoleResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<CreateGroupRoleResponse>>(){}.getType());
CreateGroupRoleResponse result = resultData.data;
PlayFabResult<CreateGroupRoleResponse> pfResult = new PlayFabResult<CreateGroupRoleResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Deletes a group and all roles, invitations, join requests, and blocks associated with it.
* @param request DeleteGroupRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> DeleteGroupAsync(final DeleteGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateDeleteGroupAsync(request);
}
});
}
/**
* Deletes a group and all roles, invitations, join requests, and blocks associated with it.
* @param request DeleteGroupRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> DeleteGroup(final DeleteGroupRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateDeleteGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Deletes a group and all roles, invitations, join requests, and blocks associated with it. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateDeleteGroupAsync(final DeleteGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/DeleteGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Deletes an existing role in a group.
* @param request DeleteRoleRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> DeleteRoleAsync(final DeleteRoleRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateDeleteRoleAsync(request);
}
});
}
/**
* Deletes an existing role in a group.
* @param request DeleteRoleRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> DeleteRole(final DeleteRoleRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateDeleteRoleAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Deletes an existing role in a group. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateDeleteRoleAsync(final DeleteRoleRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/DeleteRole"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Gets information about a group and its roles
* @param request GetGroupRequest
* @return Async Task will return GetGroupResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<GetGroupResponse>> GetGroupAsync(final GetGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<GetGroupResponse>>() {
public PlayFabResult<GetGroupResponse> call() throws Exception {
return privateGetGroupAsync(request);
}
});
}
/**
* Gets information about a group and its roles
* @param request GetGroupRequest
* @return GetGroupResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<GetGroupResponse> GetGroup(final GetGroupRequest request) {
FutureTask<PlayFabResult<GetGroupResponse>> task = new FutureTask(new Callable<PlayFabResult<GetGroupResponse>>() {
public PlayFabResult<GetGroupResponse> call() throws Exception {
return privateGetGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<GetGroupResponse> exceptionResult = new PlayFabResult<GetGroupResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Gets information about a group and its roles */
@SuppressWarnings("unchecked")
private static PlayFabResult<GetGroupResponse> privateGetGroupAsync(final GetGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/GetGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<GetGroupResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<GetGroupResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<GetGroupResponse>>(){}.getType());
GetGroupResponse result = resultData.data;
PlayFabResult<GetGroupResponse> pfResult = new PlayFabResult<GetGroupResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Invites a player to join a group
* @param request InviteToGroupRequest
* @return Async Task will return InviteToGroupResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<InviteToGroupResponse>> InviteToGroupAsync(final InviteToGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<InviteToGroupResponse>>() {
public PlayFabResult<InviteToGroupResponse> call() throws Exception {
return privateInviteToGroupAsync(request);
}
});
}
/**
* Invites a player to join a group
* @param request InviteToGroupRequest
* @return InviteToGroupResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<InviteToGroupResponse> InviteToGroup(final InviteToGroupRequest request) {
FutureTask<PlayFabResult<InviteToGroupResponse>> task = new FutureTask(new Callable<PlayFabResult<InviteToGroupResponse>>() {
public PlayFabResult<InviteToGroupResponse> call() throws Exception {
return privateInviteToGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<InviteToGroupResponse> exceptionResult = new PlayFabResult<InviteToGroupResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Invites a player to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<InviteToGroupResponse> privateInviteToGroupAsync(final InviteToGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/InviteToGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<InviteToGroupResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<InviteToGroupResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<InviteToGroupResponse>>(){}.getType());
InviteToGroupResponse result = resultData.data;
PlayFabResult<InviteToGroupResponse> pfResult = new PlayFabResult<InviteToGroupResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Checks to see if an entity is a member of a group or role within the group
* @param request IsMemberRequest
* @return Async Task will return IsMemberResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<IsMemberResponse>> IsMemberAsync(final IsMemberRequest request) {
return new FutureTask(new Callable<PlayFabResult<IsMemberResponse>>() {
public PlayFabResult<IsMemberResponse> call() throws Exception {
return privateIsMemberAsync(request);
}
});
}
/**
* Checks to see if an entity is a member of a group or role within the group
* @param request IsMemberRequest
* @return IsMemberResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<IsMemberResponse> IsMember(final IsMemberRequest request) {
FutureTask<PlayFabResult<IsMemberResponse>> task = new FutureTask(new Callable<PlayFabResult<IsMemberResponse>>() {
public PlayFabResult<IsMemberResponse> call() throws Exception {
return privateIsMemberAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<IsMemberResponse> exceptionResult = new PlayFabResult<IsMemberResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Checks to see if an entity is a member of a group or role within the group */
@SuppressWarnings("unchecked")
private static PlayFabResult<IsMemberResponse> privateIsMemberAsync(final IsMemberRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/IsMember"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<IsMemberResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<IsMemberResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<IsMemberResponse>>(){}.getType());
IsMemberResponse result = resultData.data;
PlayFabResult<IsMemberResponse> pfResult = new PlayFabResult<IsMemberResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all outstanding requests to join a group
* @param request ListGroupApplicationsRequest
* @return Async Task will return ListGroupApplicationsResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListGroupApplicationsResponse>> ListGroupApplicationsAsync(final ListGroupApplicationsRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListGroupApplicationsResponse>>() {
public PlayFabResult<ListGroupApplicationsResponse> call() throws Exception {
return privateListGroupApplicationsAsync(request);
}
});
}
/**
* Lists all outstanding requests to join a group
* @param request ListGroupApplicationsRequest
* @return ListGroupApplicationsResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListGroupApplicationsResponse> ListGroupApplications(final ListGroupApplicationsRequest request) {
FutureTask<PlayFabResult<ListGroupApplicationsResponse>> task = new FutureTask(new Callable<PlayFabResult<ListGroupApplicationsResponse>>() {
public PlayFabResult<ListGroupApplicationsResponse> call() throws Exception {
return privateListGroupApplicationsAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListGroupApplicationsResponse> exceptionResult = new PlayFabResult<ListGroupApplicationsResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all outstanding requests to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListGroupApplicationsResponse> privateListGroupApplicationsAsync(final ListGroupApplicationsRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListGroupApplications"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListGroupApplicationsResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListGroupApplicationsResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListGroupApplicationsResponse>>(){}.getType());
ListGroupApplicationsResponse result = resultData.data;
PlayFabResult<ListGroupApplicationsResponse> pfResult = new PlayFabResult<ListGroupApplicationsResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all entities blocked from joining a group
* @param request ListGroupBlocksRequest
* @return Async Task will return ListGroupBlocksResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListGroupBlocksResponse>> ListGroupBlocksAsync(final ListGroupBlocksRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListGroupBlocksResponse>>() {
public PlayFabResult<ListGroupBlocksResponse> call() throws Exception {
return privateListGroupBlocksAsync(request);
}
});
}
/**
* Lists all entities blocked from joining a group
* @param request ListGroupBlocksRequest
* @return ListGroupBlocksResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListGroupBlocksResponse> ListGroupBlocks(final ListGroupBlocksRequest request) {
FutureTask<PlayFabResult<ListGroupBlocksResponse>> task = new FutureTask(new Callable<PlayFabResult<ListGroupBlocksResponse>>() {
public PlayFabResult<ListGroupBlocksResponse> call() throws Exception {
return privateListGroupBlocksAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListGroupBlocksResponse> exceptionResult = new PlayFabResult<ListGroupBlocksResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all entities blocked from joining a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListGroupBlocksResponse> privateListGroupBlocksAsync(final ListGroupBlocksRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListGroupBlocks"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListGroupBlocksResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListGroupBlocksResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListGroupBlocksResponse>>(){}.getType());
ListGroupBlocksResponse result = resultData.data;
PlayFabResult<ListGroupBlocksResponse> pfResult = new PlayFabResult<ListGroupBlocksResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all outstanding invitations for a group
* @param request ListGroupInvitationsRequest
* @return Async Task will return ListGroupInvitationsResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListGroupInvitationsResponse>> ListGroupInvitationsAsync(final ListGroupInvitationsRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListGroupInvitationsResponse>>() {
public PlayFabResult<ListGroupInvitationsResponse> call() throws Exception {
return privateListGroupInvitationsAsync(request);
}
});
}
/**
* Lists all outstanding invitations for a group
* @param request ListGroupInvitationsRequest
* @return ListGroupInvitationsResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListGroupInvitationsResponse> ListGroupInvitations(final ListGroupInvitationsRequest request) {
FutureTask<PlayFabResult<ListGroupInvitationsResponse>> task = new FutureTask(new Callable<PlayFabResult<ListGroupInvitationsResponse>>() {
public PlayFabResult<ListGroupInvitationsResponse> call() throws Exception {
return privateListGroupInvitationsAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListGroupInvitationsResponse> exceptionResult = new PlayFabResult<ListGroupInvitationsResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all outstanding invitations for a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListGroupInvitationsResponse> privateListGroupInvitationsAsync(final ListGroupInvitationsRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListGroupInvitations"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListGroupInvitationsResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListGroupInvitationsResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListGroupInvitationsResponse>>(){}.getType());
ListGroupInvitationsResponse result = resultData.data;
PlayFabResult<ListGroupInvitationsResponse> pfResult = new PlayFabResult<ListGroupInvitationsResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all members for a group
* @param request ListGroupMembersRequest
* @return Async Task will return ListGroupMembersResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListGroupMembersResponse>> ListGroupMembersAsync(final ListGroupMembersRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListGroupMembersResponse>>() {
public PlayFabResult<ListGroupMembersResponse> call() throws Exception {
return privateListGroupMembersAsync(request);
}
});
}
/**
* Lists all members for a group
* @param request ListGroupMembersRequest
* @return ListGroupMembersResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListGroupMembersResponse> ListGroupMembers(final ListGroupMembersRequest request) {
FutureTask<PlayFabResult<ListGroupMembersResponse>> task = new FutureTask(new Callable<PlayFabResult<ListGroupMembersResponse>>() {
public PlayFabResult<ListGroupMembersResponse> call() throws Exception {
return privateListGroupMembersAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListGroupMembersResponse> exceptionResult = new PlayFabResult<ListGroupMembersResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all members for a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListGroupMembersResponse> privateListGroupMembersAsync(final ListGroupMembersRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListGroupMembers"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListGroupMembersResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListGroupMembersResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListGroupMembersResponse>>(){}.getType());
ListGroupMembersResponse result = resultData.data;
PlayFabResult<ListGroupMembersResponse> pfResult = new PlayFabResult<ListGroupMembersResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all groups and roles for an entity
* @param request ListMembershipRequest
* @return Async Task will return ListMembershipResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListMembershipResponse>> ListMembershipAsync(final ListMembershipRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListMembershipResponse>>() {
public PlayFabResult<ListMembershipResponse> call() throws Exception {
return privateListMembershipAsync(request);
}
});
}
/**
* Lists all groups and roles for an entity
* @param request ListMembershipRequest
* @return ListMembershipResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListMembershipResponse> ListMembership(final ListMembershipRequest request) {
FutureTask<PlayFabResult<ListMembershipResponse>> task = new FutureTask(new Callable<PlayFabResult<ListMembershipResponse>>() {
public PlayFabResult<ListMembershipResponse> call() throws Exception {
return privateListMembershipAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListMembershipResponse> exceptionResult = new PlayFabResult<ListMembershipResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all groups and roles for an entity */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListMembershipResponse> privateListMembershipAsync(final ListMembershipRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListMembership"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListMembershipResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListMembershipResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListMembershipResponse>>(){}.getType());
ListMembershipResponse result = resultData.data;
PlayFabResult<ListMembershipResponse> pfResult = new PlayFabResult<ListMembershipResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Lists all outstanding invitations and group applications for an entity
* @param request ListMembershipOpportunitiesRequest
* @return Async Task will return ListMembershipOpportunitiesResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<ListMembershipOpportunitiesResponse>> ListMembershipOpportunitiesAsync(final ListMembershipOpportunitiesRequest request) {
return new FutureTask(new Callable<PlayFabResult<ListMembershipOpportunitiesResponse>>() {
public PlayFabResult<ListMembershipOpportunitiesResponse> call() throws Exception {
return privateListMembershipOpportunitiesAsync(request);
}
});
}
/**
* Lists all outstanding invitations and group applications for an entity
* @param request ListMembershipOpportunitiesRequest
* @return ListMembershipOpportunitiesResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<ListMembershipOpportunitiesResponse> ListMembershipOpportunities(final ListMembershipOpportunitiesRequest request) {
FutureTask<PlayFabResult<ListMembershipOpportunitiesResponse>> task = new FutureTask(new Callable<PlayFabResult<ListMembershipOpportunitiesResponse>>() {
public PlayFabResult<ListMembershipOpportunitiesResponse> call() throws Exception {
return privateListMembershipOpportunitiesAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<ListMembershipOpportunitiesResponse> exceptionResult = new PlayFabResult<ListMembershipOpportunitiesResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Lists all outstanding invitations and group applications for an entity */
@SuppressWarnings("unchecked")
private static PlayFabResult<ListMembershipOpportunitiesResponse> privateListMembershipOpportunitiesAsync(final ListMembershipOpportunitiesRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/ListMembershipOpportunities"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<ListMembershipOpportunitiesResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<ListMembershipOpportunitiesResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<ListMembershipOpportunitiesResponse>>(){}.getType());
ListMembershipOpportunitiesResponse result = resultData.data;
PlayFabResult<ListMembershipOpportunitiesResponse> pfResult = new PlayFabResult<ListMembershipOpportunitiesResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Removes an application to join a group
* @param request RemoveGroupApplicationRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> RemoveGroupApplicationAsync(final RemoveGroupApplicationRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveGroupApplicationAsync(request);
}
});
}
/**
* Removes an application to join a group
* @param request RemoveGroupApplicationRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> RemoveGroupApplication(final RemoveGroupApplicationRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveGroupApplicationAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Removes an application to join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateRemoveGroupApplicationAsync(final RemoveGroupApplicationRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/RemoveGroupApplication"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Removes an invitation join a group
* @param request RemoveGroupInvitationRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> RemoveGroupInvitationAsync(final RemoveGroupInvitationRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveGroupInvitationAsync(request);
}
});
}
/**
* Removes an invitation join a group
* @param request RemoveGroupInvitationRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> RemoveGroupInvitation(final RemoveGroupInvitationRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveGroupInvitationAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Removes an invitation join a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateRemoveGroupInvitationAsync(final RemoveGroupInvitationRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/RemoveGroupInvitation"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Removes members from a group.
* @param request RemoveMembersRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> RemoveMembersAsync(final RemoveMembersRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveMembersAsync(request);
}
});
}
/**
* Removes members from a group.
* @param request RemoveMembersRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> RemoveMembers(final RemoveMembersRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateRemoveMembersAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Removes members from a group. */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateRemoveMembersAsync(final RemoveMembersRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/RemoveMembers"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Unblocks a list of entities from joining a group
* @param request UnblockEntityRequest
* @return Async Task will return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<EmptyResponse>> UnblockEntityAsync(final UnblockEntityRequest request) {
return new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateUnblockEntityAsync(request);
}
});
}
/**
* Unblocks a list of entities from joining a group
* @param request UnblockEntityRequest
* @return EmptyResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<EmptyResponse> UnblockEntity(final UnblockEntityRequest request) {
FutureTask<PlayFabResult<EmptyResponse>> task = new FutureTask(new Callable<PlayFabResult<EmptyResponse>>() {
public PlayFabResult<EmptyResponse> call() throws Exception {
return privateUnblockEntityAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<EmptyResponse> exceptionResult = new PlayFabResult<EmptyResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Unblocks a list of entities from joining a group */
@SuppressWarnings("unchecked")
private static PlayFabResult<EmptyResponse> privateUnblockEntityAsync(final UnblockEntityRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/UnblockEntity"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<EmptyResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<EmptyResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<EmptyResponse>>(){}.getType());
EmptyResponse result = resultData.data;
PlayFabResult<EmptyResponse> pfResult = new PlayFabResult<EmptyResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Updates non-membership data about a group.
* @param request UpdateGroupRequest
* @return Async Task will return UpdateGroupResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<UpdateGroupResponse>> UpdateGroupAsync(final UpdateGroupRequest request) {
return new FutureTask(new Callable<PlayFabResult<UpdateGroupResponse>>() {
public PlayFabResult<UpdateGroupResponse> call() throws Exception {
return privateUpdateGroupAsync(request);
}
});
}
/**
* Updates non-membership data about a group.
* @param request UpdateGroupRequest
* @return UpdateGroupResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<UpdateGroupResponse> UpdateGroup(final UpdateGroupRequest request) {
FutureTask<PlayFabResult<UpdateGroupResponse>> task = new FutureTask(new Callable<PlayFabResult<UpdateGroupResponse>>() {
public PlayFabResult<UpdateGroupResponse> call() throws Exception {
return privateUpdateGroupAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<UpdateGroupResponse> exceptionResult = new PlayFabResult<UpdateGroupResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Updates non-membership data about a group. */
@SuppressWarnings("unchecked")
private static PlayFabResult<UpdateGroupResponse> privateUpdateGroupAsync(final UpdateGroupRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/UpdateGroup"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<UpdateGroupResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<UpdateGroupResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<UpdateGroupResponse>>(){}.getType());
UpdateGroupResponse result = resultData.data;
PlayFabResult<UpdateGroupResponse> pfResult = new PlayFabResult<UpdateGroupResponse>();
pfResult.Result = result;
return pfResult;
}
/**
* Updates metadata about a role.
* @param request UpdateGroupRoleRequest
* @return Async Task will return UpdateGroupRoleResponse
*/
@SuppressWarnings("unchecked")
public static FutureTask<PlayFabResult<UpdateGroupRoleResponse>> UpdateRoleAsync(final UpdateGroupRoleRequest request) {
return new FutureTask(new Callable<PlayFabResult<UpdateGroupRoleResponse>>() {
public PlayFabResult<UpdateGroupRoleResponse> call() throws Exception {
return privateUpdateRoleAsync(request);
}
});
}
/**
* Updates metadata about a role.
* @param request UpdateGroupRoleRequest
* @return UpdateGroupRoleResponse
*/
@SuppressWarnings("unchecked")
public static PlayFabResult<UpdateGroupRoleResponse> UpdateRole(final UpdateGroupRoleRequest request) {
FutureTask<PlayFabResult<UpdateGroupRoleResponse>> task = new FutureTask(new Callable<PlayFabResult<UpdateGroupRoleResponse>>() {
public PlayFabResult<UpdateGroupRoleResponse> call() throws Exception {
return privateUpdateRoleAsync(request);
}
});
try {
task.run();
return task.get();
} catch(Exception e) {
PlayFabResult<UpdateGroupRoleResponse> exceptionResult = new PlayFabResult<UpdateGroupRoleResponse>();
exceptionResult.Error = PlayFabHTTP.GeneratePfError(-1, PlayFabErrorCode.Unknown, e.getMessage(), null, null);
return exceptionResult;
}
}
/** Updates metadata about a role. */
@SuppressWarnings("unchecked")
private static PlayFabResult<UpdateGroupRoleResponse> privateUpdateRoleAsync(final UpdateGroupRoleRequest request) throws Exception {
if (PlayFabSettings.EntityToken == null) throw new Exception ("Must call GetEntityToken before you can use the Entity API");
FutureTask<Object> task = PlayFabHTTP.doPost(PlayFabSettings.GetURL("/Group/UpdateRole"), request, "X-EntityToken", PlayFabSettings.EntityToken);
task.run();
Object httpResult = task.get();
if (httpResult instanceof PlayFabError) {
PlayFabError error = (PlayFabError)httpResult;
if (PlayFabSettings.GlobalErrorHandler != null)
PlayFabSettings.GlobalErrorHandler.callback(error);
PlayFabResult result = new PlayFabResult<UpdateGroupRoleResponse>();
result.Error = error;
return result;
}
String resultRawJson = (String) httpResult;
PlayFabJsonSuccess<UpdateGroupRoleResponse> resultData = gson.fromJson(resultRawJson, new TypeToken<PlayFabJsonSuccess<UpdateGroupRoleResponse>>(){}.getType());
UpdateGroupRoleResponse result = resultData.data;
PlayFabResult<UpdateGroupRoleResponse> pfResult = new PlayFabResult<UpdateGroupRoleResponse>();
pfResult.Result = result;
return pfResult;
}
}
| apache-2.0 |
dhalperi/batfish | projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/FibNullRoute.java | 574 | package org.batfish.datamodel;
import org.batfish.datamodel.visitors.FibActionVisitor;
/** A {@link FibAction} that discards a packet. */
public final class FibNullRoute implements FibAction {
public static final FibNullRoute INSTANCE = new FibNullRoute();
private FibNullRoute() {}
@Override
public boolean equals(Object obj) {
return this == obj || obj instanceof FibNullRoute;
}
@Override
public int hashCode() {
return 0;
}
@Override
public <T> T accept(FibActionVisitor<T> visitor) {
return visitor.visitFibNullRoute(this);
}
}
| apache-2.0 |
chtyim/cdap | cdap-watchdog/src/main/java/co/cask/cdap/logging/save/CheckpointingLogFileWriter.java | 3351 | /*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.logging.save;
import co.cask.cdap.logging.kafka.KafkaLogEvent;
import co.cask.cdap.logging.write.AvroFileWriter;
import co.cask.cdap.logging.write.LogFileWriter;
import com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* LogFileWriter that checkpoints kafka offsets for each partition.
*/
public class CheckpointingLogFileWriter implements LogFileWriter<KafkaLogEvent> {
private static final Logger LOG = LoggerFactory.getLogger(CheckpointingLogFileWriter.class);
private final AvroFileWriter avroFileWriter;
private final CheckpointManager checkpointManager;
private final long flushIntervalMs;
private long lastCheckpointTime = System.currentTimeMillis();
private final Map<Integer, Checkpoint> partitionCheckpointMap = Maps.newHashMap();
private final AtomicBoolean closed = new AtomicBoolean(false);
public CheckpointingLogFileWriter(AvroFileWriter avroFileWriter, CheckpointManager checkpointManager,
long flushIntervalMs) {
this.avroFileWriter = avroFileWriter;
this.checkpointManager = checkpointManager;
this.flushIntervalMs = flushIntervalMs;
}
@Override
public void append(List<KafkaLogEvent> events) throws Exception {
if (events.isEmpty()) {
return;
}
KafkaLogEvent event = events.get(0);
int partition = event.getPartition();
Checkpoint maxCheckpoint = partitionCheckpointMap.get(partition);
maxCheckpoint = maxCheckpoint == null ? new Checkpoint(-1, -1) : maxCheckpoint;
for (KafkaLogEvent e : events) {
if (e.getNextOffset() > maxCheckpoint.getNextOffset()) {
maxCheckpoint = new Checkpoint(e.getNextOffset(), e.getLogEvent().getTimeStamp());
}
}
partitionCheckpointMap.put(partition, maxCheckpoint);
avroFileWriter.append(events);
flush(false);
}
@Override
public void close() throws IOException {
if (!closed.compareAndSet(false, true)) {
return;
}
flush();
avroFileWriter.close();
}
@Override
public void flush() throws IOException {
try {
flush(true);
} catch (Exception e) {
LOG.error("Got exception: ", e);
throw new IOException(e);
}
}
private void flush(boolean force) throws Exception {
long currentTs = System.currentTimeMillis();
if (!force && currentTs - lastCheckpointTime < flushIntervalMs) {
return;
}
avroFileWriter.flush();
// Save the max checkpoint seen for each partition
checkpointManager.saveCheckpoint(partitionCheckpointMap);
lastCheckpointTime = currentTs;
}
}
| apache-2.0 |
goldmansachs/reladomo | reladomo/src/main/java/com/gs/fw/common/mithra/behavior/persisted/PersistedSameTxBehavior.java | 11896 | /*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.behavior.persisted;
import com.gs.fw.common.mithra.*;
import com.gs.fw.common.mithra.attribute.*;
import com.gs.fw.common.mithra.attribute.update.*;
import com.gs.fw.common.mithra.behavior.TransactionalBehavior;
import com.gs.fw.common.mithra.behavior.state.PersistedState;
import com.gs.fw.common.mithra.util.Time;
import java.util.List;
import java.util.Date;
import java.sql.Timestamp;
import java.math.BigDecimal;
public class PersistedSameTxBehavior extends PersistedBehavior
{
public PersistedSameTxBehavior()
{
super(TRANSACTIONAL, null);
}
public MithraDataObject getCurrentDataForWrite(MithraTransactionalObject mithraObject)
{
return mithraObject.zGetTxDataForWrite();
}
public TransactionalBehavior enrollInTransactionForDelete(MithraTransactionalObject mithraObject, MithraTransaction tx, TransactionalState prevState)
{
// already enrolled
return this;
}
public TransactionalBehavior enrollInTransactionForRead(MithraTransactionalObject mto, MithraTransaction threadTx, TransactionalState prevState)
{
return this;
}
public TransactionalBehavior enrollInTransactionForWrite(MithraTransactionalObject mto, MithraTransaction threadTx, TransactionalState prevState)
{
return this;
}
public void setData(MithraTransactionalObject mithraObject, MithraDataObject newData)
{
mithraObject.zSetTxData(newData);
}
public void delete(MithraTransactionalObject obj)
{
MithraTransaction tx = MithraManagerProvider.getMithraManager().getCurrentTransaction();
tx.delete(obj);
obj.zGetCache().remove(obj);
obj.zSetTxPersistenceState(PersistedState.DELETED);
obj.zGetPortal().incrementClassUpdateCount();
}
public void deleteForRemote(MithraTransactionalObject obj, int hierarchyDepth)
{
MithraDataObject data = obj.zGetTxDataForRead();
MithraObjectPortal portal = data.zGetMithraObjectPortal(hierarchyDepth);
portal.getMithraObjectPersister().delete(data);
obj.zSetDeleted();
obj.zGetCache().remove(obj);
if (hierarchyDepth == 0)
{
obj.zSetTxPersistenceState(PersistedState.DELETED);
}
portal.incrementClassUpdateCount();
}
public void update(MithraTransactionalObject obj, AttributeUpdateWrapper updateWrapper, boolean isReadonly, boolean triggerHook)
{
if (isReadonly)
{
throw new MithraBusinessException("cannot change a readonly attribute which is already persisted in database");
}
applyUpdate(obj, updateWrapper, triggerHook);
}
public MithraDataObject update(MithraTransactionalObject obj, IntegerAttribute attr, int newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
IntegerUpdateWrapper updateWrapper = new IntegerUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, ByteArrayAttribute attr, byte[] newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
ByteArrayUpdateWrapper updateWrapper = new ByteArrayUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, BooleanAttribute attr, boolean newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
BooleanUpdateWrapper updateWrapper = new BooleanUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, BigDecimalAttribute attr, BigDecimal newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
BigDecimalUpdateWrapper updateWrapper = new BigDecimalUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, ByteAttribute attr, byte newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
ByteUpdateWrapper updateWrapper = new ByteUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, CharAttribute attr, char newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
CharUpdateWrapper updateWrapper = new CharUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, DateAttribute attr, Date newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
DateUpdateWrapper updateWrapper = new DateUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, TimeAttribute attr, Time newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
TimeUpdateWrapper updateWrapper = new TimeUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, DoubleAttribute attr, double newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
DoubleUpdateWrapper updateWrapper = new DoubleUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, FloatAttribute attr, float newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
FloatUpdateWrapper updateWrapper = new FloatUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, LongAttribute attr, long newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
LongUpdateWrapper updateWrapper = new LongUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, ShortAttribute attr, short newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
ShortUpdateWrapper updateWrapper = new ShortUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, StringAttribute attr, String newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
StringUpdateWrapper updateWrapper = new StringUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
public MithraDataObject update(MithraTransactionalObject obj, TimestampAttribute attr, Timestamp newValue, boolean readOnly, boolean triggerHook)
{
if (readOnly)
{
return throwReadOnlyException();
}
MithraDataObject data = this.getCurrentDataForWrite(obj);
TimestampUpdateWrapper updateWrapper = new TimestampUpdateWrapper(attr, data, newValue);
applyUpdate(obj, updateWrapper, triggerHook);
return data;
}
private MithraDataObject throwReadOnlyException()
{
throw new MithraBusinessException("cannot change a readonly or primary key attribute which is already persisted in database");
}
private void applyUpdate(MithraTransactionalObject obj, AttributeUpdateWrapper updateWrapper, boolean triggerHook)
{
MithraManagerProvider.getMithraManager().getCurrentTransaction().update(obj, updateWrapper);
obj.zGetCache().reindexForTransaction(obj, updateWrapper);
updateWrapper.incrementUpdateCount();
if (triggerHook) obj.triggerUpdateHook(updateWrapper);
}
public void remoteUpdate(MithraTransactionalObject obj, List updateWrappers)
{
this.remoteUpdateForBatch(obj, updateWrappers);
((AttributeUpdateWrapper)updateWrappers.get(0)).getAttribute().getOwnerPortal().getMithraObjectPersister().update(obj, updateWrappers);
obj.zSetUpdated(updateWrappers);
}
public void remoteUpdateForBatch(MithraTransactionalObject obj, List updateWrappers)
{
MithraDataObject currentDataForWrite = this.getCurrentDataForWrite(obj);
for(int i=0;i<updateWrappers.size();i++)
{
AttributeUpdateWrapper updateWrapper = (AttributeUpdateWrapper) updateWrappers.get(i);
updateWrapper.setDataToUpdate(currentDataForWrite);
}
for(int i=0;i<updateWrappers.size();i++)
{
AttributeUpdateWrapper updateWrapper = (AttributeUpdateWrapper) updateWrappers.get(i);
obj.zGetCache().reindexForTransaction(obj, updateWrapper);
updateWrapper.incrementUpdateCount();
}
}
}
| apache-2.0 |
ameybarve15/incubator-geode | gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/wan/AbstractGatewaySenderEventProcessor.java | 47967 | /*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache.wan;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.CancelException;
import com.gemstone.gemfire.GemFireException;
import com.gemstone.gemfire.SystemFailure;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.cache.Operation;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionDestroyedException;
import com.gemstone.gemfire.cache.client.internal.Connection;
import com.gemstone.gemfire.cache.client.internal.pooling.ConnectionDestroyedException;
import com.gemstone.gemfire.cache.wan.GatewayEventFilter;
import com.gemstone.gemfire.cache.wan.GatewayQueueEvent;
import com.gemstone.gemfire.cache.wan.GatewaySender;
import com.gemstone.gemfire.internal.Version;
import com.gemstone.gemfire.internal.cache.BucketRegion;
import com.gemstone.gemfire.internal.cache.Conflatable;
import com.gemstone.gemfire.internal.cache.DistributedRegion;
import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.EnumListenerEvent;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.RegionQueue;
import com.gemstone.gemfire.internal.cache.wan.parallel.ConcurrentParallelGatewaySenderQueue;
import com.gemstone.gemfire.internal.cache.wan.parallel.ParallelGatewaySenderQueue;
import com.gemstone.gemfire.internal.cache.wan.serial.SerialGatewaySenderQueue;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.LoggingThreadGroup;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.pdx.internal.PeerTypeRegistration;
/**
* EventProcessor responsible for peeking from queue and handling over the events
* to the dispatcher.
* The queue could be SerialGatewaySenderQueue or ParallelGatewaySenderQueue or
* {@link ConcurrentParallelGatewaySenderQueue}.
* The dispatcher could be either GatewaySenderEventRemoteDispatcher or GatewaySenderEventCallbackDispatcher.
*
* @author Suranjan Kumar
* @since 7.0
*
*/
public abstract class AbstractGatewaySenderEventProcessor extends Thread {
private static final Logger logger = LogService.getLogger();
public static boolean TEST_HOOK = false;
protected RegionQueue queue;
protected GatewaySenderEventDispatcher dispatcher;
protected final AbstractGatewaySender sender;
/**
* An int id used to identify each batch.
*/
protected int batchId = 0;
/**
* A boolean verifying whether this <code>AbstractGatewaySenderEventProcessor</code>
* is running.
*/
private volatile boolean isStopped = true;
/**
* A boolean verifying whether this <code>AbstractGatewaySenderEventProcessor</code>
* is paused.
*/
protected volatile boolean isPaused = false;
/**
* A boolean indicating that the dispatcher thread for
* this <code>AbstractGatewaySenderEventProcessor</code>
* is now waiting for resuming
*/
protected boolean isDispatcherWaiting = false;
/**
* A lock object used to control pausing this dispatcher
*/
protected final Object pausedLock = new Object();
public final Object runningStateLock = new Object();
/**
* A boolean verifying whether a warning has already been issued if the event
* queue has reached a certain threshold.
*/
protected boolean eventQueueSizeWarning = false;
private Exception exception;
/*
* The batchIdToEventsMap contains a mapping between batch id and an array of
* events. The first element of the array is the list of events peeked from
* the queue. The second element of the array is the list of filtered events.
* These are the events actually sent.
*/
private Map<Integer, List<GatewaySenderEventImpl>[]> batchIdToEventsMap = Collections
.synchronizedMap(new HashMap<Integer, List<GatewaySenderEventImpl>[]>());
private Map<Integer, List<GatewaySenderEventImpl>> batchIdToPDXEventsMap = Collections
.synchronizedMap(new HashMap<Integer, List<GatewaySenderEventImpl>>());
private List<GatewaySenderEventImpl> pdxSenderEventsList = new ArrayList<GatewaySenderEventImpl>();
private Map<Object, GatewaySenderEventImpl> pdxEventsMap = new HashMap<Object,GatewaySenderEventImpl>();
private volatile boolean rebuildPdxList = false;
private volatile boolean resetLastPeekedEvents;
private long numEventsDispatched;
/**
* @param createThreadGroup
* @param string
*/
public AbstractGatewaySenderEventProcessor(LoggingThreadGroup createThreadGroup,
String string, GatewaySender sender) {
super(createThreadGroup, string);
this.sender = (AbstractGatewaySender)sender;
}
abstract protected void initializeMessageQueue(String id);
public abstract void enqueueEvent(EnumListenerEvent operation,
EntryEvent event, Object substituteValue) throws IOException,
CacheException;
protected abstract void rebalance();
public boolean isStopped() {
return this.isStopped;
}
protected void setIsStopped(boolean isStopped) {
if (isStopped) {
this.isStopped = true;
this.failureLogInterval.clear();
} else {
this.isStopped = isStopped;
}
}
public boolean isPaused() {
return this.isPaused;
}
/**
* @return the queue
*/
public RegionQueue getQueue() {
return this.queue;
}
/**
* Increment the batch id. This method is not synchronized because this
* dispatcher is the caller
*/
public void incrementBatchId() {
// If _batchId + 1 == maximum, then roll over
if (this.batchId + 1 == Integer.MAX_VALUE) {
this.batchId = -1;
}
this.batchId++;
}
/**
* Reset the batch id. This method is not synchronized because this dispatcher
* is the caller
*/
protected void resetBatchId() {
this.batchId = 0;
// dont reset first time when first batch is put for dispatch
//if (this.batchIdToEventsMap.size() == 1) {
// if (this.batchIdToEventsMap.containsKey(0)) {
// return;
// }
//}
//this.batchIdToEventsMap.clear();
this.resetLastPeekedEvents = true;
}
/**
* Returns the current batch id to be used to identify the next batch.
*
* @return the current batch id to be used to identify the next batch
*/
protected int getBatchId() {
return this.batchId;
}
protected boolean isConnectionReset() {
return this.resetLastPeekedEvents;
}
protected void eventQueueRemove() throws CacheException,
InterruptedException {
this.queue.remove();
}
protected void eventQueueRemove(int size) throws CacheException {
this.queue.remove(size);
}
protected Object eventQueueTake() throws CacheException, InterruptedException {
return this.queue.take();
}
protected int eventQueueSize() {
// This should be local size instead of PR size. Fix for #48627
if (this.queue instanceof ParallelGatewaySenderQueue) {
return ((ParallelGatewaySenderQueue) queue).localSize();
}
if (this.queue instanceof ConcurrentParallelGatewaySenderQueue) {
return ((ConcurrentParallelGatewaySenderQueue) queue).localSize();
}
return this.queue.size();
}
/**
* @return the sender
*/
public AbstractGatewaySender getSender() {
return this.sender;
}
public void pauseDispatching() {
if (this.isPaused) {
return;
}
this.isPaused = true;
}
//merge44957: WHile merging 44957, need this method hence picked up this method from revision 42024.
public void waitForDispatcherToPause() {
if (!this.isPaused) {
throw new IllegalStateException("Should be trying to pause!");
}
boolean interrupted=false;
synchronized(this.pausedLock) {
while(!isDispatcherWaiting && !isStopped()) {
try {
this.pausedLock.wait();
} catch(InterruptedException e) {
interrupted = true;
}
}
}
if(interrupted) {
Thread.currentThread().interrupt();
}
}
public void resumeDispatching() {
if (!this.isPaused) {
return;
}
this.isPaused = false;
// Notify thread to resume
if (logger.isDebugEnabled()) {
logger.debug("{}: Resumed dispatching", this);
}
synchronized (this.pausedLock) {
this.pausedLock.notifyAll();
}
}
protected boolean stopped() {
if (this.isStopped) {
return true;
}
if (sender.getStopper().cancelInProgress() != null) {
return true;
}
return false;
}
/**
* When a batch fails, then this keeps the last time when a failure was
* logged . We don't want to swamp the logs in retries due to same batch failures.
*/
private final ConcurrentHashMap<Integer, long[]> failureLogInterval =
new ConcurrentHashMap<Integer, long[]>();
/**
* The maximum size of {@link #failureLogInterval} beyond which it will start
* logging all failure instances. Hopefully this should never happen in
* practice.
*/
protected static final int FAILURE_MAP_MAXSIZE = Integer.getInteger(
"gemfire.GatewaySender.FAILURE_MAP_MAXSIZE", 1000000);
/**
* The maximum interval for logging failures of the same event in millis.
*/
protected static final int FAILURE_LOG_MAX_INTERVAL = Integer.getInteger(
"gemfire.GatewaySender.FAILURE_LOG_MAX_INTERVAL", 300000);
public final boolean skipFailureLogging(Integer batchId) {
boolean skipLogging = false;
// if map has become large then give up on new events but we don't expect
// it to become too large in practise
if (this.failureLogInterval.size() < FAILURE_MAP_MAXSIZE) {
// first long in logInterval gives the last time when the log was done,
// and the second tracks the current log interval to be used which
// increases exponentially
// multiple currentTimeMillis calls below may hinder performance
// but not much to worry about since failures are expected to
// be an infrequent occurance (and if frequent then we have to skip
// logging for quite a while in any case)
long[] logInterval = this.failureLogInterval.get(batchId);
if (logInterval == null) {
logInterval = this.failureLogInterval.putIfAbsent(batchId,
new long[] { System.currentTimeMillis(), 1000 });
}
if (logInterval != null) {
long currentTime = System.currentTimeMillis();
if ((currentTime - logInterval[0]) < logInterval[1]) {
skipLogging = true;
}
else {
logInterval[0] = currentTime;
// don't increase logInterval to beyond a limit (5 mins by default)
if (logInterval[1] <= (FAILURE_LOG_MAX_INTERVAL / 4)) {
logInterval[1] *= 4;
}
// TODO: should the retries be throttled by some sleep here?
}
}
}
return skipLogging;
}
/**
* After a successful batch execution remove from failure map if present (i.e.
* if the event had failed on a previous try).
*/
public final boolean removeEventFromFailureMap(Integer batchId) {
return this.failureLogInterval.remove(batchId) != null;
}
protected void processQueue() {
final boolean isDebugEnabled = logger.isDebugEnabled();
final boolean isTraceEnabled = logger.isTraceEnabled();
final int batchSize = sender.getBatchSize();
final int batchTimeInterval = sender.getBatchTimeInterval();
final GatewaySenderStats statistics = this.sender.getStatistics();
if (isDebugEnabled) {
logger.debug("STARTED processQueue {}", this.getId());
}
//list of the events peeked from queue
List<GatewaySenderEventImpl> events = null;
// list of the above peeked events which are filtered through the filters attached
List<GatewaySenderEventImpl> filteredList = new ArrayList<GatewaySenderEventImpl>();
//list of the PDX events which are peeked from pDX region and needs to go acrossthe site
List<GatewaySenderEventImpl> pdxEventsToBeDispatched = new ArrayList<GatewaySenderEventImpl>();
// list of filteredList + pdxEventsToBeDispatched events
List<GatewaySenderEventImpl> eventsToBeDispatched = new ArrayList<GatewaySenderEventImpl>();
for (;;) {
if (stopped()) {
break;
}
try {
// Check if paused. If so, wait for resumption
if (this.isPaused) {
waitForResumption();
}
// Peek a batch
if (isDebugEnabled) {
logger.debug("Attempting to peek a batch of {} events", batchSize);
}
for (;;) {
// check before sleeping
if (stopped()) {
if (isDebugEnabled) {
logger.debug("GatewaySenderEventProcessor is stopped. Returning without peeking events.");
}
break;
}
// Check if paused. If so, wait for resumption
if (this.isPaused) {
waitForResumption();
}
// We need to initialize connection in dispatcher before sending first
// batch here ONLY, because we need GatewayReceiver's version for
// filtering VERSION_ACTION events from being sent.
boolean sendUpdateVersionEvents = shouldSendVersionEvents(this.dispatcher);
// sleep a little bit, look for events
boolean interrupted = Thread.interrupted();
try {
if(resetLastPeekedEvents) {
resetLastPeekedEvents();
resetLastPeekedEvents = false;
}
{
// Below code was added to consider the case of queue region is
// destroyed due to userPRs localdestroy or destroy operation.
// In this case we were waiting for queue region to get created
// and then only peek from the region queue.
// With latest change of multiple PR with single ParalleSender, we
// cant wait for particular regionqueue to get recreated as there
// will be other region queue from which events can be picked
/*// Check if paused. If so, wait for resumption
if (this.isPaused) {
waitForResumption();
}
synchronized (this.getQueue()) {
// its quite possible that the queue region is
// destroyed(userRegion
// localdestroy destroys shadow region locally). In this case
// better to
// wait for shadows region to get recreated instead of keep loop
// for peeking events
if (this.getQueue().getRegion() == null
|| this.getQueue().getRegion().isDestroyed()) {
try {
this.getQueue().wait();
continue; // this continue is important to recheck the
// conditions of stop/ pause after the wait of 1 sec
}
catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
}*/
}
events = this.queue.peek(batchSize, batchTimeInterval);
} catch (InterruptedException e) {
interrupted = true;
this.sender.getCancelCriterion().checkCancelInProgress(e);
continue; // keep trying
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
if (events.isEmpty()) {
continue; // nothing to do!
}
//this list is access by ack reader thread so create new every time. #50220
filteredList = new ArrayList<GatewaySenderEventImpl>();
filteredList.addAll(events);
for (GatewayEventFilter filter : sender.getGatewayEventFilters()) {
Iterator<GatewaySenderEventImpl> itr = filteredList.iterator();
while (itr.hasNext()) {
GatewayQueueEvent event = itr.next();
// This seems right place to prevent transmission of UPDATE_VERSION events if receiver's
// version is < 7.0.1, especially to prevent another loop over events.
if (!sendUpdateVersionEvents && event.getOperation() == Operation.UPDATE_VERSION_STAMP) {
if (isTraceEnabled) {
logger.trace("Update Event Version event: {} removed from Gateway Sender queue: {}", event, sender);
}
itr.remove();
statistics.incEventsNotQueued();
}
boolean transmit = filter.beforeTransmit(event);
if (!transmit) {
if (isDebugEnabled) {
logger.debug("{}: Did not transmit event due to filtering: {}", sender.getId(), event);
}
itr.remove();
statistics.incEventsFiltered();
}
}
}
/*if (filteredList.isEmpty()) {
eventQueueRemove(events.size());
continue;
}*/
// if the bucket becomes secondary after the event is picked from it,
// check again before dispatching the event. Do this only for
// AsyncEventQueue since possibleDuplicate flag is not used in WAN.
if (this.getSender().isParallel()
&& (this.getDispatcher() instanceof GatewaySenderEventCallbackDispatcher)) {
Iterator<GatewaySenderEventImpl> itr = filteredList.iterator();
while (itr.hasNext()) {
GatewaySenderEventImpl event = (GatewaySenderEventImpl)itr.next();
PartitionedRegion qpr = null;
if (this.getQueue() instanceof ConcurrentParallelGatewaySenderQueue) {
qpr = ((ConcurrentParallelGatewaySenderQueue)this.getQueue())
.getRegion(event.getRegionPath());
}
else {
qpr = ((ParallelGatewaySenderQueue)this.getQueue())
.getRegion(event.getRegionPath());
}
int bucketId = event.getBucketId();
// if the bucket from which the event has been picked is no longer
// primary, then set possibleDuplicate to true on the event
if (qpr != null) {
BucketRegion bucket = qpr.getDataStore().getLocalBucketById(
bucketId);
if (bucket == null || !bucket.getBucketAdvisor().isPrimary()) {
event.setPossibleDuplicate(true);
}
}
if (isDebugEnabled) {
logger.debug("Bucket id: {} is no longer primary on this node. The event {} will be dispatched from this node with possibleDuplicate set to true.",
bucketId, event);
}
}
}
eventsToBeDispatched.clear();
if (!(this.dispatcher instanceof GatewaySenderEventCallbackDispatcher)) {
// store the batch before dispatching so it can be retrieved by the ack thread.
List<GatewaySenderEventImpl>[] eventsArr = (List<GatewaySenderEventImpl>[]) new List[2];
eventsArr[0] = events;
eventsArr[1] = filteredList;
this.batchIdToEventsMap.put(getBatchId(), eventsArr);
// find out PDX event and append it in front of the list
pdxEventsToBeDispatched = addPDXEvent();
eventsToBeDispatched.addAll(pdxEventsToBeDispatched);
if (!pdxEventsToBeDispatched.isEmpty()) {
this.batchIdToPDXEventsMap.put(getBatchId(),
pdxEventsToBeDispatched);
}
}
eventsToBeDispatched.addAll(filteredList);
// Conflate the batch. Event conflation only occurs on the queue.
// Once an event has been peeked into a batch, it won't be
// conflated. So if events go through the queue quickly (as in the
// no-ack case), then multiple events for the same key may end up in
// the batch.
List conflatedEventsToBeDispatched = conflate(eventsToBeDispatched);
if (isDebugEnabled) {
logBatchFine("During normal processing, dispatching the following ", conflatedEventsToBeDispatched);
}
boolean success = this.dispatcher.dispatchBatch(conflatedEventsToBeDispatched,
sender.isRemoveFromQueueOnException(), false);
if (success) {
if (isDebugEnabled) {
logger.debug("During normal processing, successfully dispatched {} events (batch #{})",
conflatedEventsToBeDispatched.size(), getBatchId());
}
removeEventFromFailureMap(getBatchId());
}
else {
if (!skipFailureLogging(getBatchId())) {
logger.warn(LocalizedMessage.create(
LocalizedStrings.GatewayImpl_EVENT_QUEUE_DISPATCH_FAILED, new Object[] { filteredList.size(), getBatchId() }));
}
}
// check again, don't do post-processing if we're stopped.
if (stopped()) {
break;
}
// If the batch is successfully processed, remove it from the queue.
if (success) {
if (this.dispatcher instanceof GatewaySenderEventCallbackDispatcher) {
handleSuccessfulBatchDispatch(conflatedEventsToBeDispatched, events);
} else {
incrementBatchId();
}
// pdx related gateway sender events needs to be updated for
// isDispatched
for (GatewaySenderEventImpl pdxGatewaySenderEvent : pdxEventsToBeDispatched) {
pdxGatewaySenderEvent.isDispatched = true;
}
if (TEST_HOOK) {
this.numEventsDispatched += conflatedEventsToBeDispatched.size();
}
} // successful batch
else { // The batch was unsuccessful.
if (this.dispatcher instanceof GatewaySenderEventCallbackDispatcher) {
handleUnSuccessfulBatchDispatch(events);
this.resetLastPeekedEvents = true;
} else {
handleUnSuccessfulBatchDispatch(events);
if (!resetLastPeekedEvents) {
while (!this.dispatcher.dispatchBatch(conflatedEventsToBeDispatched,
sender.isRemoveFromQueueOnException(), true)) {
if (isDebugEnabled) {
logger.debug("During normal processing, unsuccessfully dispatched {} events (batch #{})",
conflatedEventsToBeDispatched.size(), getBatchId());
}
if (stopped() || resetLastPeekedEvents) {
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
incrementBatchId();
}
}
} // unsuccessful batch
if (logger.isDebugEnabled()) {
logger.debug("Finished processing events (batch #{})", (getBatchId()-1));
}
} // for
} catch (RegionDestroyedException e) {
//setting this flag will ensure that already peeked events will make
//it to the next batch before new events are peeked (fix for #48784)
this.resetLastPeekedEvents = true;
// most possible case is ParallelWan when user PR is locally destroyed
// shadow PR is also locally destroyed
if (logger.isDebugEnabled()) {
logger.debug("Observed RegionDestroyedException on Queue's region.");
}
} catch (CancelException e) {
logger.debug("Caught cancel exception", e);
setIsStopped(true);
} catch (VirtualMachineError err) {
SystemFailure.initiateFailure(err);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err;
} catch (Throwable e) {
// Whenever you catch Error or Throwable, you must also
// catch VirtualMachineError (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
// Well, OK. Some strange nonfatal thing.
if (stopped()) {
return; // don't complain, just exit.
}
if (events != null) {
handleUnSuccessfulBatchDispatch(events);
}
this.resetLastPeekedEvents = true;
if (e instanceof GatewaySenderException) {
Throwable cause = e.getCause();
if (cause instanceof IOException
|| e instanceof GatewaySenderConfigurationException) {
continue;
}
}
// We'll log it but continue on with the next batch.
logger.warn(LocalizedMessage.create(LocalizedStrings.GatewayImpl_AN_EXCEPTION_OCCURRED_THE_DISPATCHER_WILL_CONTINUE), e);
}
} // for
}
private boolean shouldSendVersionEvents(
GatewaySenderEventDispatcher dispatcher) {
// onyly in case of remote dispatcher we send versioned events
return false;
}
private List conflate(List<GatewaySenderEventImpl> events) {
List<GatewaySenderEventImpl> conflatedEvents = null;
// Conflate the batch if necessary
if (this.sender.isBatchConflationEnabled() && events.size() > 1) {
Map<ConflationKey, GatewaySenderEventImpl> conflatedEventsMap = new LinkedHashMap<ConflationKey, GatewaySenderEventImpl>();
conflatedEvents = new ArrayList<GatewaySenderEventImpl>();
for (GatewaySenderEventImpl gsEvent : events) {
// Determine whether the event should be conflated.
if (gsEvent.shouldBeConflated()) {
// The event should be conflated. Create the conflation key
// (comprised of the event's region, key and the operation).
ConflationKey key = new ConflationKey(gsEvent.getRegion().getFullPath(), gsEvent.getKeyToConflate(),
gsEvent.getOperation());
// Attempt to remove the key. If the entry is removed, that means a
// duplicate key was found. If not, this is a no-op.
conflatedEventsMap.remove(key);
// Add the key to the end of the map.
conflatedEventsMap.put(key, gsEvent);
}
else {
// The event should not be conflated (create or destroy). Add it to
// the map.
ConflationKey key = new ConflationKey(gsEvent.getRegion().getFullPath(), gsEvent.getKeyToConflate(),
gsEvent.getOperation());
conflatedEventsMap.put(key, gsEvent);
}
}
// Iterate the map and add the events to the conflated events list
for (GatewaySenderEventImpl gei : conflatedEventsMap.values()) {
conflatedEvents.add(gei);
}
// Increment the events conflated from batches statistic
this.sender.getStatistics().incEventsConflatedFromBatches(
events.size() - conflatedEvents.size());
}
else {
conflatedEvents = events;
}
return conflatedEvents;
}
private List<GatewaySenderEventImpl> addPDXEvent() throws IOException {
List<GatewaySenderEventImpl> pdxEventsToBeDispatched = new ArrayList<GatewaySenderEventImpl>();
//getPDXRegion
GemFireCacheImpl cache = (GemFireCacheImpl) this.sender.getCache();
Region<Object, Object> pdxRegion = cache
.getRegion(PeerTypeRegistration.REGION_NAME);
if(rebuildPdxList) {
pdxEventsMap.clear();
pdxSenderEventsList.clear();
rebuildPdxList = false;
}
// find out the list of the PDXEvents which needs to be send across remote
// site
// these events will be added to list pdxSenderEventsList. I am expecting
// that PDX events will be only added to PDX region. no deletion happens on
// PDX region
if (pdxRegion != null && pdxRegion.size() != pdxEventsMap.size()) {
for (Map.Entry<Object, Object> typeEntry : pdxRegion.entrySet()) {
if(!pdxEventsMap.containsKey(typeEntry.getKey())){
EntryEventImpl event = new EntryEventImpl(
(LocalRegion) pdxRegion, Operation.UPDATE,
typeEntry.getKey(), typeEntry.getValue(), null, false,
cache.getMyId());
event.setEventId(new EventID(cache.getSystem()));
List<Integer> allRemoteDSIds = new ArrayList<Integer>();
for (GatewaySender sender : cache.getGatewaySenders()) {
allRemoteDSIds.add(sender.getRemoteDSId());
}
GatewaySenderEventCallbackArgument geCallbackArg = new GatewaySenderEventCallbackArgument(
event.getRawCallbackArgument(), this.sender.getMyDSId(),
allRemoteDSIds, true);
event.setCallbackArgument(geCallbackArg);
GatewaySenderEventImpl pdxSenderEvent = new GatewaySenderEventImpl(
EnumListenerEvent.AFTER_UPDATE, event, null);
pdxEventsMap.put(typeEntry.getKey(), pdxSenderEvent);
pdxSenderEventsList.add(pdxSenderEvent);
}
}
}
Iterator<GatewaySenderEventImpl> iterator = pdxSenderEventsList.iterator();
while(iterator.hasNext()){
GatewaySenderEventImpl pdxEvent = iterator.next();
if (pdxEvent.isAcked) {
// Since this is acked, it means it has reached to remote site.Dont add
// to pdxEventsToBeDispatched
iterator.remove();
continue;
}
if (pdxEvent.isDispatched) {
// Dispacther does not mean that event has reched remote site. We may
// need to send it agian if there is porblem while recieveing ack
// containing this event.Dont add to pdxEventsToBeDispatched
continue;
}
pdxEventsToBeDispatched.add(pdxEvent);
}
if(!pdxEventsToBeDispatched.isEmpty() && logger.isDebugEnabled()){
logger.debug("List of PDX Event to be dispatched : {}", pdxEventsToBeDispatched);
}
// add all these pdx events before filtered events
return pdxEventsToBeDispatched;
}
/**
* Mark all PDX types as requiring dispatch so that they will be
* sent over the connection again.
* @param remotePdxSize
*/
public void checkIfPdxNeedsResend(int remotePdxSize) {
GemFireCacheImpl cache = (GemFireCacheImpl) this.sender.getCache();
Region<Object, Object> pdxRegion = cache
.getRegion(PeerTypeRegistration.REGION_NAME);
//The peer has not seen all of our PDX types. This may be because
//they have been lost on the remote side. Resend the PDX types.
if(pdxRegion != null && pdxRegion.size() > remotePdxSize) {
rebuildPdxList = true;
}
}
private void resetLastPeekedEvents() {
this.batchIdToEventsMap.clear();
// make sure that when there is problem while receiving ack, pdx gateway
// sender events isDispatched is set to false so that same events will be
// dispatched in next batch
for(Map.Entry<Integer, List<GatewaySenderEventImpl>> entry : this.batchIdToPDXEventsMap.entrySet()){
for(GatewaySenderEventImpl event : entry.getValue()){
event.isDispatched = false;
}
}
this.batchIdToPDXEventsMap.clear();
if(this.queue instanceof SerialGatewaySenderQueue)
((SerialGatewaySenderQueue)this.queue).resetLastPeeked();
else if (this.queue instanceof ParallelGatewaySenderQueue){
((ParallelGatewaySenderQueue)this.queue).resetLastPeeked();
}else{
//we will never come here
throw new RuntimeException("resetLastPeekedEvents : no matching queue found " + this);
}
}
private void handleSuccessfulBatchDispatch(List filteredList, List events) {
if (filteredList != null) {
for (GatewayEventFilter filter : sender.getGatewayEventFilters()) {
for (Iterator i = filteredList.iterator(); i.hasNext();) {
Object o = i.next();
if (o != null && o instanceof GatewaySenderEventImpl) {
try {
filter.afterAcknowledgement((GatewaySenderEventImpl)o);
} catch (Exception e) {
logger
.fatal(
LocalizedMessage
.create(
LocalizedStrings.GatewayEventFilter_EXCEPTION_OCCURED_WHILE_HANDLING_CALL_TO_0_AFTER_ACKNOWLEDGEMENT_FOR_EVENT_1,
new Object[] { filter.toString(), o }), e);
}
}
}
}
}
filteredList.clear();
eventQueueRemove(events.size());
final GatewaySenderStats statistics = this.sender.getStatistics();
int queueSize = eventQueueSize();
// Log an alert for each event if necessary
if (this.sender.getAlertThreshold() > 0) {
Iterator it = events.iterator();
long currentTime = System.currentTimeMillis();
while (it.hasNext()) {
Object o = it.next();
if (o != null && o instanceof GatewaySenderEventImpl) {
GatewaySenderEventImpl ge = (GatewaySenderEventImpl)o;
if (ge.getCreationTime() + this.sender.getAlertThreshold() < currentTime) {
logger.warn(LocalizedMessage.create(LocalizedStrings.GatewayImpl_EVENT_QUEUE_ALERT_OPERATION_0_REGION_1_KEY_2_VALUE_3_TIME_4,
new Object[] { ge.getOperation(), ge.getRegionPath(), ge.getKey(),
ge.getDeserializedValue(), currentTime - ge.getCreationTime() }));
statistics.incEventsExceedingAlertThreshold();
}
}
}
}
if (this.eventQueueSizeWarning
&& queueSize <= AbstractGatewaySender.QUEUE_SIZE_THRESHOLD) {
logger.info(LocalizedMessage.create(
LocalizedStrings.GatewayImpl_THE_EVENT_QUEUE_SIZE_HAS_DROPPED_BELOW_THE_THRESHOLD_0,
AbstractGatewaySender.QUEUE_SIZE_THRESHOLD));
this.eventQueueSizeWarning = false;
}
incrementBatchId();
}
private void handleUnSuccessfulBatchDispatch(List events) {
final GatewaySenderStats statistics = this.sender.getStatistics();
statistics.incBatchesRedistributed();
// Set posDup flag on each event in the batch
Iterator it = events.iterator();
while (it.hasNext() && !this.isStopped) {
Object o = it.next();
if (o != null && o instanceof GatewaySenderEventImpl) {
GatewaySenderEventImpl ge = (GatewaySenderEventImpl)o;
ge.setPossibleDuplicate(true);
}
}
}
/**
* In case of BatchException we expect that the dispatcher has removed all
* the events till the event that threw BatchException.
*/
public void handleException() {
final GatewaySenderStats statistics = this.sender.getStatistics();
statistics.incBatchesRedistributed();
this.resetLastPeekedEvents = true;
}
public void handleSuccessBatchAck(int batchId) {
// this is to acknowledge PDX related events
List<GatewaySenderEventImpl> pdxEvents = this.batchIdToPDXEventsMap
.remove(batchId);
if (pdxEvents != null) {
for (GatewaySenderEventImpl senderEvent : pdxEvents) {
senderEvent.isAcked = true;
}
}
List<GatewaySenderEventImpl>[] eventsArr = this.batchIdToEventsMap
.remove(batchId);
if (eventsArr != null) {
List<GatewaySenderEventImpl> filteredEvents = eventsArr[1];
for (GatewayEventFilter filter : sender.getGatewayEventFilters()) {
for (GatewaySenderEventImpl event : filteredEvents) {
try {
filter.afterAcknowledgement(event);
} catch (Exception e) {
logger
.fatal(
LocalizedMessage
.create(
LocalizedStrings.GatewayEventFilter_EXCEPTION_OCCURED_WHILE_HANDLING_CALL_TO_0_AFTER_ACKNOWLEDGEMENT_FOR_EVENT_1,
new Object[] { filter.toString(), event }), e);
}
}
}
List<GatewaySenderEventImpl> events = eventsArr[0];
if (logger.isDebugEnabled()) {
logger.debug("Removing events from the queue {}", events.size());
}
eventQueueRemove(events.size());
}
}
public void handleUnSuccessBatchAck(int bId) {
this.sender.getStatistics().incBatchesRedistributed();
// Set posDup flag on each event in the batch
List<GatewaySenderEventImpl>[] eventsArr = this.batchIdToEventsMap.get(bId);
if(eventsArr!=null){
List<GatewaySenderEventImpl> events = eventsArr[0];
Iterator it = events.iterator();
while (it.hasNext() && !this.isStopped) {
Object o = it.next();
if (o != null && o instanceof GatewaySenderEventImpl) {
GatewaySenderEventImpl ge = (GatewaySenderEventImpl)o;
ge.setPossibleDuplicate(true);
}
}
}
}
//merge44957: While merging 44957, changed this method as per revision 42024.
protected void waitForResumption() throws InterruptedException {
synchronized (this.pausedLock) {
if (!this.isPaused) {
return;
}
if (logger.isDebugEnabled()) {
logger.debug("GatewaySenderEventProcessor is paused. Waiting for Resumption");
}
this.isDispatcherWaiting = true;
this.pausedLock.notifyAll();
while (this.isPaused) {
this.pausedLock.wait();
}
this.isDispatcherWaiting = false;
}
}
public abstract void initializeEventDispatcher();
public GatewaySenderEventDispatcher getDispatcher(){
return this.dispatcher;
}
public Map<Integer, List<GatewaySenderEventImpl>[]> getBatchIdToEventsMap() {
return this.batchIdToEventsMap;
}
public Map<Integer, List<GatewaySenderEventImpl>> getBatchIdToPDXEventsMap() {
return this.batchIdToPDXEventsMap;
}
@Override
public void run() {
try {
setRunningStatus();
processQueue();
} catch (CancelException e) {
if (!this.isStopped()) {
logger.info(LocalizedMessage.create(LocalizedStrings.GatewayImpl_A_CANCELLATION_OCCURRED_STOPPING_THE_DISPATCHER));
setIsStopped(true);
}
} catch (VirtualMachineError err) {
SystemFailure.initiateFailure(err);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err;
} catch (Throwable e) {
// Whenever you catch Error or Throwable, you must also
// catch VirtualMachineError (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
logger.fatal(LocalizedMessage.create(LocalizedStrings.GatewayImpl_MESSAGE_DISPATCH_FAILED_DUE_TO_UNEXPECTED_EXCEPTION), e);
}
}
public void setRunningStatus() throws Exception {
GemFireException ex = null;
try {
this.initializeEventDispatcher();
}
catch (GemFireException e) {
ex = e;
}
synchronized (this.runningStateLock) {
if (ex != null) {
this.setException(ex);
setIsStopped(true);
}
else {
setIsStopped(false);
}
this.runningStateLock.notifyAll();
}
if (ex != null) {
throw ex;
}
}
public void setException(GemFireException ex) {
this.exception = ex;
}
public Exception getException(){
return this.exception;
}
/**
* Stops the dispatcher from dispatching events . The dispatcher will stay
* alive for a predefined time OR until its queue is empty.
*
* @see AbstractGatewaySender#MAXIMUM_SHUTDOWN_WAIT_TIME
*/
public void stopProcessing() {
if (!this.isAlive()) {
return;
}
resumeDispatching();
if (logger.isDebugEnabled()) {
logger.debug("{}: Notifying the dispatcher to terminate", this);
}
// If this is the primary, stay alive for a predefined time
// OR until the queue becomes empty
if (this.sender.isPrimary()) {
if (AbstractGatewaySender.MAXIMUM_SHUTDOWN_WAIT_TIME == -1) {
try {
while (!(this.queue.size() == 0)) {
Thread.sleep(5000);
if (logger.isDebugEnabled()) {
logger.debug("{}: Waiting for the queue to get empty.", this);
}
}
}
catch (InterruptedException e) {
// interrupted
}
catch (CancelException e) {
// cancelled
}
} else {
try {
Thread.sleep(AbstractGatewaySender.MAXIMUM_SHUTDOWN_WAIT_TIME * 1000);
} catch (InterruptedException e) {/* ignore */
// interrupted
}
}
}
dispatcher.stop();
//set isStopped to true
setIsStopped(true);
if (this.isAlive()) {
this.interrupt();
if (logger.isDebugEnabled()) {
logger.debug("{}: Joining with the dispatcher thread upto limit of 5 seconds", this);
}
try {
this.join(5000); // wait for our thread to stop
if (this.isAlive()) {
logger.warn(LocalizedMessage.create(LocalizedStrings.GatewayImpl_0_DISPATCHER_STILL_ALIVE_EVEN_AFTER_JOIN_OF_5_SECONDS, this));
// if the server machine crashed or there was a nic failure, we need
// to terminate the socket connection now to avoid a hang when closing
// the connections later
// try to stop it again
dispatcher.stop();
this.batchIdToEventsMap.clear();
}
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
logger.warn(LocalizedMessage.create(LocalizedStrings.GatewayImpl_0_INTERRUPTEDEXCEPTION_IN_JOINING_WITH_DISPATCHER_THREAD, this));
}
}
if (logger.isDebugEnabled()) {
logger.debug("Stopped dispatching: {}", this);
}
}
protected void destroyProcessor() {
if (logger.isDebugEnabled()) {
logger.debug("Destroying dispatcher");
}
try {
try {
if (this.queue.peek() != null) {
logger.warn(LocalizedMessage.create(LocalizedStrings.GatewayImpl_DESTROYING_GATEWAYEVENTDISPATCHER_WITH_ACTIVELY_QUEUED_DATA));
}
} catch (InterruptedException e) {
/*
* ignore,
*/
// TODO if this won't be thrown, assert it.
}
} catch (CacheException ignore) {
// just checking in case we should log a warning
} finally {
this.queue.getRegion().localDestroyRegion();
if (logger.isDebugEnabled()) {
logger.debug("Destroyed dispatcher");
}
}
}
public void removeCacheListener(){
}
/**
* Logs a batch of events.
*
* @param events
* The batch of events to log
**/
public void logBatchFine(String message, List<GatewaySenderEventImpl> events) {
if (events != null) {
StringBuffer buffer = new StringBuffer();
buffer.append(message);
buffer.append(events.size()).append(" events");
buffer.append(" (batch #" + getBatchId());
buffer.append("):\n");
for (GatewaySenderEventImpl ge : events) {
buffer.append("\tEvent ").append(ge.getEventId()).append(":");
buffer.append(ge.getKey()).append("->");
// TODO:wan70 remove old code
buffer.append(ge.deserialize(ge.getValue()));
buffer.append(ge.getShadowKey());
buffer.append("\n");
}
logger.debug(buffer);
}
}
public long getNumEventsDispatched() {
return numEventsDispatched;
}
public void clear(PartitionedRegion pr, int bucketId) {
((ParallelGatewaySenderQueue)this.queue).clear(pr, bucketId);
}
/*public int size(PartitionedRegion pr, int bucketId)
throws ForceReattemptException {
return ((ParallelGatewaySenderQueue)this.queue).size(pr, bucketId);
}*/
public void notifyEventProcessorIfRequired(int bucketId) {
((ParallelGatewaySenderQueue) this.queue).notifyEventProcessorIfRequired();
}
public BlockingQueue<GatewaySenderEventImpl> getBucketTmpQueue(int bucketId) {
return ((ParallelGatewaySenderQueue) this.queue).getBucketToTempQueueMap()
.get(bucketId);
}
public PartitionedRegion getRegion(String prRegionName) {
return ((ParallelGatewaySenderQueue) this.queue).getRegion(prRegionName);
}
public void removeShadowPR(String prRegionName) {
((ParallelGatewaySenderQueue) this.queue).removeShadowPR(prRegionName);
}
public void conflateEvent(Conflatable conflatableObject, int bucketId,
Long tailKey) {
((ParallelGatewaySenderQueue) this.queue).conflateEvent(conflatableObject,
bucketId, tailKey);
}
public void addShadowPartitionedRegionForUserPR(PartitionedRegion pr) {
((ParallelGatewaySenderQueue) this.queue)
.addShadowPartitionedRegionForUserPR(pr);
}
public void addShadowPartitionedRegionForUserRR(DistributedRegion userRegion) {
((ParallelGatewaySenderQueue) this.queue)
.addShadowPartitionedRegionForUserRR(userRegion);
}
protected class SenderStopperCallable implements Callable<Boolean> {
private final AbstractGatewaySenderEventProcessor p;
/**
* Need the processor to stop.
*/
public SenderStopperCallable(AbstractGatewaySenderEventProcessor processor) {
this.p = processor;
}
public Boolean call () {
this.p.stopProcessing();
return true;
}
}
private static class ConflationKey {
private Object key;
private Operation operation;
private String regionName;
private ConflationKey(String region, Object key, Operation operation) {
this.key = key;
this.operation = operation;
this.regionName = region;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + key.hashCode();
result = prime * result + operation.hashCode();
result = prime * result + regionName.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ConflationKey that = (ConflationKey)obj;
if (!this.regionName.equals(that.regionName)) {
return false;
}
if (!this.key.equals(that.key)) {
return false;
}
if (!this.operation.equals(that.operation)) {
return false;
}
return true;
}
}
}
| apache-2.0 |
camunda/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/DelayedVariableEvent.java | 1465 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence.entity;
import org.camunda.bpm.engine.impl.core.variable.event.VariableEvent;
import org.camunda.bpm.engine.impl.pvm.runtime.PvmExecutionImpl;
/**
*
* @author Thorben Lindhauer
* @author Christopher Zell
*/
public class DelayedVariableEvent {
protected PvmExecutionImpl targetScope;
protected VariableEvent event;
public DelayedVariableEvent(PvmExecutionImpl targetScope, VariableEvent event) {
this.targetScope = targetScope;
this.event = event;
}
public PvmExecutionImpl getTargetScope() {
return targetScope;
}
public VariableEvent getEvent() {
return event;
}
}
| apache-2.0 |
goodwinnk/intellij-community | platform/lang-impl/src/com/intellij/ide/navigationToolbar/NavBarModel.java | 10626 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.navigationToolbar;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.util.CommonProcessors;
import com.intellij.util.ObjectUtils;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Konstantin Bulenkov
* @author Anna Kozlova
*/
public class NavBarModel {
private List<Object> myModel = Collections.emptyList();
private int mySelectedIndex;
private final Project myProject;
private final NavBarModelListener myNotificator;
private final NavBarModelBuilder myBuilder;
private boolean myChanged = true;
private boolean updated = false;
private boolean isFixedComponent = false;
public NavBarModel(final Project project) {
myProject = project;
myNotificator = project.getMessageBus().syncPublisher(NavBarModelListener.NAV_BAR);
myBuilder = NavBarModelBuilder.getInstance();
}
public int getSelectedIndex() {
return mySelectedIndex;
}
@Nullable
public Object getSelectedValue() {
return getElement(mySelectedIndex);
}
@Nullable
public Object getElement(int index) {
if (index != -1 && index < myModel.size()) {
return myModel.get(index);
}
return null;
}
public int size() {
return myModel.size();
}
public boolean isEmpty() {
return myModel.isEmpty();
}
public int getIndexByModel(int index) {
if (index < 0) return myModel.size() + index;
if (index >= myModel.size() && myModel.size() > 0) return index % myModel.size();
return index;
}
protected void updateModel(DataContext dataContext) {
if (LaterInvocator.isInModalContext() || (updated && !isFixedComponent)) return;
if (PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext) instanceof NavBarPanel) return;
PsiElement psiElement = CommonDataKeys.PSI_FILE.getData(dataContext);
if (psiElement == null) {
psiElement = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
}
psiElement = normalize(psiElement);
if (!myModel.isEmpty() && myModel.get(myModel.size() - 1).equals(psiElement) && !myChanged) return;
if (psiElement != null && psiElement.isValid()) {
updateModel(psiElement);
}
else {
if (UISettings.getInstance().getShowNavigationBar() && !myModel.isEmpty()) return;
Object root = calculateRoot(dataContext);
if (root != null) {
setModel(Collections.singletonList(root));
}
}
setChanged(false);
updated = true;
}
private Object calculateRoot(DataContext dataContext) {
// Narrow down the root element to the first interesting one
Object root = LangDataKeys.MODULE.getData(dataContext);
if (root != null) return root;
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) return null;
Object projectChild;
Object projectGrandChild = null;
CommonProcessors.FindFirstAndOnlyProcessor<Object> processor = new CommonProcessors.FindFirstAndOnlyProcessor<>();
processChildren(project, processor);
projectChild = processor.reset();
if (projectChild != null) {
processChildren(projectChild, processor);
projectGrandChild = processor.reset();
}
return ObjectUtils.chooseNotNull(projectGrandChild, ObjectUtils.chooseNotNull(projectChild, project));
}
protected void updateModel(final PsiElement psiElement) {
final Set<VirtualFile> roots = new HashSet<>();
final ProjectRootManager projectRootManager = ProjectRootManager.getInstance(myProject);
final ProjectFileIndex projectFileIndex = projectRootManager.getFileIndex();
for (VirtualFile root : projectRootManager.getContentRoots()) {
VirtualFile parent = root.getParent();
if (parent == null || !projectFileIndex.isInContent(parent)) {
roots.add(root);
}
}
for (final NavBarModelExtension modelExtension : Extensions.getExtensions(NavBarModelExtension.EP_NAME)) {
for (VirtualFile root : modelExtension.additionalRoots(psiElement.getProject())) {
VirtualFile parent = root.getParent();
if (parent == null || !projectFileIndex.isInContent(parent)) {
roots.add(root);
}
}
}
List<Object> updatedModel = ReadAction.compute(() -> isValid(psiElement) ? myBuilder.createModel(psiElement, roots) : Collections.emptyList());
setModel(ContainerUtil.reverse(updatedModel));
}
void revalidate() {
final List<Object> objects = new ArrayList<>();
boolean update = false;
for (Object o : myModel) {
if (isValid(o)) {
objects.add(o);
} else {
update = true;
break;
}
}
if (update) {
setModel(objects);
}
}
protected void setModel(List<Object> model) {
if (!model.equals(myModel)) {
myModel = model;
myNotificator.modelChanged();
mySelectedIndex = myModel.size() - 1;
myNotificator.selectionChanged();
}
}
public void updateModel(final Object object) {
if (object instanceof PsiElement) {
updateModel((PsiElement)object);
}
else if (object instanceof Module) {
List<Object> l = new ArrayList<>();
l.add(myProject);
l.add(object);
setModel(l);
}
}
protected boolean hasChildren(Object object) {
return !processChildren(object, new CommonProcessors.FindFirstProcessor<>());
}
//to avoid the following situation: element was taken from NavBarPanel via data context and all left children
// were truncated by traverseToRoot
public void setChanged(boolean changed) {
myChanged = changed;
}
@SuppressWarnings({"SimplifiableIfStatement"})
static boolean isValid(final Object object) {
if (object instanceof Project) {
return !((Project)object).isDisposed();
}
if (object instanceof Module) {
return !((Module)object).isDisposed();
}
if (object instanceof PsiElement) {
return ReadAction.compute(() -> ((PsiElement)object).isValid()).booleanValue();
}
return object != null;
}
@Nullable
public static PsiElement normalize(@Nullable PsiElement child) {
if (child == null) return null;
NavBarModelExtension[] extensions = Extensions.getExtensions(NavBarModelExtension.EP_NAME);
for (int i = extensions.length - 1; i >= 0; i--) {
NavBarModelExtension modelExtension = extensions[i];
child = modelExtension.adjustElement(child);
if (child == null) return null;
}
return child;
}
protected List<Object> getChildren(final Object object) {
final List<Object> result = ContainerUtil.newArrayList();
Processor<Object> processor = o -> {
ContainerUtil.addIfNotNull(result, o instanceof PsiElement ? normalize((PsiElement)o) : o);
return true;
};
processChildren(object, processor);
Collections.sort(result, new SiblingsComparator());
return result;
}
private boolean processChildren(Object object, @NotNull Processor<Object> processor) {
if (!isValid(object)) return true;
final Object rootElement = size() > 1 ? getElement(1) : null;
if (rootElement != null && !isValid(rootElement)) return true;
for (NavBarModelExtension modelExtension : Extensions.getExtensions(NavBarModelExtension.EP_NAME)) {
if (modelExtension instanceof AbstractNavBarModelExtension) {
if (!((AbstractNavBarModelExtension)modelExtension).processChildren(object, rootElement, processor)) return false;
}
}
return true;
}
public Object get(final int index) {
return myModel.get(index);
}
public int indexOf(Object value) {
return myModel.indexOf(value);
}
public void setSelectedIndex(final int selectedIndex) {
if (mySelectedIndex != selectedIndex) {
mySelectedIndex = selectedIndex;
myNotificator.selectionChanged();
}
}
public void setFixedComponent(boolean fixedComponent) {
isFixedComponent = fixedComponent;
}
private static final class SiblingsComparator implements Comparator<Object> {
@Override
public int compare(final Object o1, final Object o2) {
final Pair<Integer, String> w1 = getWeightedName(o1);
final Pair<Integer, String> w2 = getWeightedName(o2);
if (w1 == null) return w2 == null ? 0 : -1;
if (w2 == null) return 1;
if (!w1.first.equals(w2.first)) {
return -w1.first.intValue() + w2.first.intValue();
}
return Comparing.compare(w1.second, w2.second, String.CASE_INSENSITIVE_ORDER);
}
@Nullable
private static Pair<Integer, String> getWeightedName(Object object) {
if (object instanceof Module) {
return Pair.create(5, ((Module)object).getName());
}
if (object instanceof PsiDirectoryContainer) {
return Pair.create(4, ((PsiDirectoryContainer)object).getName());
}
else if (object instanceof PsiDirectory) {
return Pair.create(4, ((PsiDirectory)object).getName());
}
if (object instanceof PsiFile) {
return Pair.create(2, ((PsiFile)object).getName());
}
if (object instanceof PsiNamedElement) {
return Pair.create(3, ((PsiNamedElement)object).getName());
}
return null;
}
}
}
| apache-2.0 |
XidongHuang/aws-sdk-for-java | src/main/java/com/amazonaws/services/cloudfront/model/transform/GetDistributionResultStaxUnmarshaller.java | 2532 | /*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudfront.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.cloudfront.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Get Distribution Result StAX Unmarshaller
*/
public class GetDistributionResultStaxUnmarshaller implements Unmarshaller<GetDistributionResult, StaxUnmarshallerContext> {
public GetDistributionResult unmarshall(StaxUnmarshallerContext context) throws Exception {
GetDistributionResult getDistributionResult = new GetDistributionResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument()) {
getDistributionResult.setETag(context.getHeader("ETag"));
}
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument()) return getDistributionResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("Distribution", targetDepth)) {
getDistributionResult.setDistribution(DistributionStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return getDistributionResult;
}
}
}
}
private static GetDistributionResultStaxUnmarshaller instance;
public static GetDistributionResultStaxUnmarshaller getInstance() {
if (instance == null) instance = new GetDistributionResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
zwets/flowable-engine | modules/flowable-engine/src/test/java/org/flowable/engine/test/api/runtime/RuntimeVariablesTest.java | 6602 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.runtime;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.variable.api.persistence.entity.VariableInstance;
/**
* @author Daisuke Yoshimoto
*/
public class RuntimeVariablesTest extends PluggableFlowableTestCase {
@Deployment
public void testGetVariablesByExecutionIds() {
ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess");
ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess");
org.flowable.task.api.Task task1 = taskService.createTaskQuery().processInstanceId(processInstance1.getId()).singleResult();
org.flowable.task.api.Task task2 = taskService.createTaskQuery().processInstanceId(processInstance2.getId()).singleResult();
// org.flowable.task.service.Task local variables
taskService.setVariableLocal(task1.getId(), "taskVar1", "sayHello1");
// Execution variables
taskService.setVariable(task1.getId(), "executionVar1", "helloWorld1");
// org.flowable.task.service.Task local variables
taskService.setVariableLocal(task2.getId(), "taskVar2", "sayHello2");
// Execution variables
taskService.setVariable(task2.getId(), "executionVar2", "helloWorld2");
// only 1 process
Set<String> executionIds = new HashSet<>();
executionIds.add(processInstance1.getId());
List<VariableInstance> variables = runtimeService.getVariableInstancesByExecutionIds(executionIds);
assertEquals(1, variables.size());
checkVariable(processInstance1.getId(), "executionVar1", "helloWorld1", variables);
// 2 process
executionIds = new HashSet<>();
executionIds.add(processInstance1.getId());
executionIds.add(processInstance2.getId());
variables = runtimeService.getVariableInstancesByExecutionIds(executionIds);
assertEquals(2, variables.size());
checkVariable(processInstance1.getId(), "executionVar1", "helloWorld1", variables);
checkVariable(processInstance2.getId(), "executionVar2", "helloWorld2", variables);
}
@Deployment(resources = {
"org/flowable/engine/test/api/runtime/RuntimeVariablesTest.testGetVariablesByExecutionIds.bpmn20.xml"
})
public void testGetVariablesByExecutionIdsForSerializableType() {
ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess");
org.flowable.task.api.Task task1 = taskService.createTaskQuery().processInstanceId(processInstance1.getId()).singleResult();
StringBuilder sb = new StringBuilder("a");
for (int i = 0; i < 4001; i++) {
sb.append("a");
}
String serializableTypeVar = sb.toString();
// Execution variables
taskService.setVariable(task1.getId(), "executionVar1", serializableTypeVar);
// only 1 process
Set<String> executionIds = new HashSet<>();
executionIds.add(processInstance1.getId());
List<VariableInstance> variables = runtimeService.getVariableInstancesByExecutionIds(executionIds);
assertEquals(serializableTypeVar, variables.get(0).getValue());
}
private void checkVariable(String executionId, String name, String value, List<VariableInstance> variables) {
for (VariableInstance variable : variables) {
if (executionId.equals(variable.getExecutionId())) {
assertEquals(name, variable.getName());
assertEquals(value, variable.getValue());
return;
}
}
fail();
}
@Deployment(resources = {
"org/flowable/engine/test/api/runtime/variableScope.bpmn20.xml"
})
public void testGetVariablesByExecutionIdsForScope() {
Map<String, Object> processVars = new HashMap<>();
processVars.put("processVar", "processVar");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("variableScopeProcess", processVars);
Set<String> executionIds = new HashSet<>();
List<Execution> executions = runtimeService.createExecutionQuery().processInstanceId(processInstance.getId()).list();
for (Execution execution : executions) {
if (!processInstance.getId().equals(execution.getId())) {
executionIds.add(execution.getId());
runtimeService.setVariableLocal(execution.getId(), "executionVar", "executionVar");
}
}
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list();
for (org.flowable.task.api.Task task : tasks) {
taskService.setVariableLocal(task.getId(), "taskVar", "taskVar");
}
List<VariableInstance> executionVariableInstances = runtimeService.getVariableInstancesByExecutionIds(executionIds);
assertEquals(2, executionVariableInstances.size());
assertEquals("executionVar", executionVariableInstances.get(0).getName());
assertEquals("executionVar", executionVariableInstances.get(0).getValue());
assertEquals("executionVar", executionVariableInstances.get(1).getName());
assertEquals("executionVar", executionVariableInstances.get(1).getValue());
executionIds = new HashSet<>();
executionIds.add(processInstance.getId());
executionVariableInstances = runtimeService.getVariableInstancesByExecutionIds(executionIds);
assertEquals(1, executionVariableInstances.size());
assertEquals("processVar", executionVariableInstances.get(0).getName());
assertEquals("processVar", executionVariableInstances.get(0).getValue());
}
}
| apache-2.0 |
onders86/camel | camel-core/src/test/java/org/apache/camel/main/MainVetoTest.java | 2265 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.main;
import org.apache.camel.CamelContext;
import org.apache.camel.VetoCamelContextStartException;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.support.LifecycleStrategySupport;
import org.junit.Assert;
import org.junit.Test;
/**
* @version
*/
public class MainVetoTest extends Assert {
@Test
public void testMain() throws Exception {
// lets make a simple route
Main main = new Main();
main.setDuration(30);
main.setDurationHitExitCode(99);
main.addRouteBuilder(new MyRoute());
main.addMainListener(new MainListenerSupport() {
@Override
public void configure(CamelContext context) {
context.addLifecycleStrategy(new MyVetoLifecycle());
}
});
// should not hang as we veto fail
main.run();
// should complete normally due veto
assertEquals(0, main.getExitCode());
}
private class MyRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("timer:foo").to("mock:foo");
}
}
private class MyVetoLifecycle extends LifecycleStrategySupport {
@Override
public void onContextStart(CamelContext context) throws VetoCamelContextStartException {
throw new VetoCamelContextStartException("We do not like this route", context, false);
}
}
}
| apache-2.0 |
spring-projects/spring-boot | spring-boot-project/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/cache/CachesEndpointTests.java | 7880 | /*
* Copyright 2012-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.cache;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.junit.jupiter.api.Test;
import org.springframework.boot.actuate.cache.CachesEndpoint.CacheEntry;
import org.springframework.boot.actuate.cache.CachesEndpoint.CacheManagerDescriptor;
import org.springframework.cache.Cache;
import org.springframework.cache.CacheManager;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.cache.support.SimpleCacheManager;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.BDDMockito.given;
import static org.mockito.BDDMockito.then;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
/**
* Tests for {@link CachesEndpoint}.
*
* @author Stephane Nicoll
*/
class CachesEndpointTests {
@Test
void allCachesWithSingleCacheManager() {
CachesEndpoint endpoint = new CachesEndpoint(
Collections.singletonMap("test", new ConcurrentMapCacheManager("a", "b")));
Map<String, CacheManagerDescriptor> allDescriptors = endpoint.caches().getCacheManagers();
assertThat(allDescriptors).containsOnlyKeys("test");
CacheManagerDescriptor descriptors = allDescriptors.get("test");
assertThat(descriptors.getCaches()).containsOnlyKeys("a", "b");
assertThat(descriptors.getCaches().get("a").getTarget()).isEqualTo(ConcurrentHashMap.class.getName());
assertThat(descriptors.getCaches().get("b").getTarget()).isEqualTo(ConcurrentHashMap.class.getName());
}
@Test
void allCachesWithSeveralCacheManagers() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
cacheManagers.put("test", new ConcurrentMapCacheManager("a", "b"));
cacheManagers.put("another", new ConcurrentMapCacheManager("a", "c"));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
Map<String, CacheManagerDescriptor> allDescriptors = endpoint.caches().getCacheManagers();
assertThat(allDescriptors).containsOnlyKeys("test", "another");
assertThat(allDescriptors.get("test").getCaches()).containsOnlyKeys("a", "b");
assertThat(allDescriptors.get("another").getCaches()).containsOnlyKeys("a", "c");
}
@Test
void namedCacheWithSingleCacheManager() {
CachesEndpoint endpoint = new CachesEndpoint(
Collections.singletonMap("test", new ConcurrentMapCacheManager("b", "a")));
CacheEntry entry = endpoint.cache("a", null);
assertThat(entry).isNotNull();
assertThat(entry.getCacheManager()).isEqualTo("test");
assertThat(entry.getName()).isEqualTo("a");
assertThat(entry.getTarget()).isEqualTo(ConcurrentHashMap.class.getName());
}
@Test
void namedCacheWithSeveralCacheManagers() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
cacheManagers.put("test", new ConcurrentMapCacheManager("b", "dupe-cache"));
cacheManagers.put("another", new ConcurrentMapCacheManager("c", "dupe-cache"));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
assertThatExceptionOfType(NonUniqueCacheException.class).isThrownBy(() -> endpoint.cache("dupe-cache", null))
.withMessageContaining("dupe-cache").withMessageContaining("test").withMessageContaining("another");
}
@Test
void namedCacheWithUnknownCache() {
CachesEndpoint endpoint = new CachesEndpoint(
Collections.singletonMap("test", new ConcurrentMapCacheManager("b", "a")));
CacheEntry entry = endpoint.cache("unknown", null);
assertThat(entry).isNull();
}
@Test
void namedCacheWithWrongCacheManager() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
cacheManagers.put("test", new ConcurrentMapCacheManager("b", "a"));
cacheManagers.put("another", new ConcurrentMapCacheManager("c", "a"));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
CacheEntry entry = endpoint.cache("c", "test");
assertThat(entry).isNull();
}
@Test
void namedCacheWithSeveralCacheManagersWithCacheManagerFilter() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
cacheManagers.put("test", new ConcurrentMapCacheManager("b", "a"));
cacheManagers.put("another", new ConcurrentMapCacheManager("c", "a"));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
CacheEntry entry = endpoint.cache("a", "test");
assertThat(entry).isNotNull();
assertThat(entry.getCacheManager()).isEqualTo("test");
assertThat(entry.getName()).isEqualTo("a");
}
@Test
void clearAllCaches() {
Cache a = mockCache("a");
Cache b = mockCache("b");
CachesEndpoint endpoint = new CachesEndpoint(Collections.singletonMap("test", cacheManager(a, b)));
endpoint.clearCaches();
then(a).should().clear();
then(b).should().clear();
}
@Test
void clearCache() {
Cache a = mockCache("a");
Cache b = mockCache("b");
CachesEndpoint endpoint = new CachesEndpoint(Collections.singletonMap("test", cacheManager(a, b)));
assertThat(endpoint.clearCache("a", null)).isTrue();
then(a).should().clear();
then(b).should(never()).clear();
}
@Test
void clearCacheWithSeveralCacheManagers() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
cacheManagers.put("test", cacheManager(mockCache("dupe-cache"), mockCache("b")));
cacheManagers.put("another", cacheManager(mockCache("dupe-cache")));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
assertThatExceptionOfType(NonUniqueCacheException.class)
.isThrownBy(() -> endpoint.clearCache("dupe-cache", null)).withMessageContaining("dupe-cache")
.withMessageContaining("test").withMessageContaining("another");
}
@Test
void clearCacheWithSeveralCacheManagersWithCacheManagerFilter() {
Map<String, CacheManager> cacheManagers = new LinkedHashMap<>();
Cache a = mockCache("a");
Cache b = mockCache("b");
cacheManagers.put("test", cacheManager(a, b));
Cache anotherA = mockCache("a");
cacheManagers.put("another", cacheManager(anotherA));
CachesEndpoint endpoint = new CachesEndpoint(cacheManagers);
assertThat(endpoint.clearCache("a", "another")).isTrue();
then(a).should(never()).clear();
then(anotherA).should().clear();
then(b).should(never()).clear();
}
@Test
void clearCacheWithUnknownCache() {
Cache a = mockCache("a");
CachesEndpoint endpoint = new CachesEndpoint(Collections.singletonMap("test", cacheManager(a)));
assertThat(endpoint.clearCache("unknown", null)).isFalse();
then(a).should(never()).clear();
}
@Test
void clearCacheWithUnknownCacheManager() {
Cache a = mockCache("a");
CachesEndpoint endpoint = new CachesEndpoint(Collections.singletonMap("test", cacheManager(a)));
assertThat(endpoint.clearCache("a", "unknown")).isFalse();
then(a).should(never()).clear();
}
private CacheManager cacheManager(Cache... caches) {
SimpleCacheManager cacheManager = new SimpleCacheManager();
cacheManager.setCaches(Arrays.asList(caches));
cacheManager.afterPropertiesSet();
return cacheManager;
}
private Cache mockCache(String name) {
Cache cache = mock(Cache.class);
given(cache.getName()).willReturn(name);
given(cache.getNativeCache()).willReturn(new Object());
return cache;
}
}
| apache-2.0 |
rpiotti/Web-Karma | karma-cleaning/src/main/java/edu/isi/karma/cleaning/QuestionableRecord/Feature3.java | 1737 | /*******************************************************************************
* Copyright 2012 University of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This code was developed by the Information Integration Group as part
* of the Karma project at the Information Sciences Institute of the
* University of Southern California. For more information, publications,
* and related projects, please see: http://www.isi.edu/integration
******************************************************************************/
package edu.isi.karma.cleaning.QuestionableRecord;
// reverse count feature
public class Feature3 implements RecFeature {
int rcnt = 0;
String resString;
double weight = 1.0;
// calculate the number of reverse order
public Feature3(String res, double weight) {
this.resString = res;
}
public String getName() {
return resString;
}
@Override
public double computerScore() {
int pre = -1;
double cnt = 0;
for (int c = 0; c < resString.length(); c++) {
if (Character.isDigit(resString.charAt(c))) {
if (pre != -1) {
pre = c;
continue;
}
if (c < pre) {
pre = c;
cnt++;
}
}
}
return cnt * this.weight;
}
}
| apache-2.0 |
dashbuilder/dashbuilder | dashbuilder-client/dashbuilder-navigation-client/src/main/java/org/dashbuilder/client/navigation/NavigationManager.java | 1227 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.client.navigation;
import java.util.List;
import org.dashbuilder.navigation.NavItem;
import org.dashbuilder.navigation.NavTree;
import org.uberfire.mvp.Command;
public interface NavigationManager {
void init(Command afterInit);
void setDefaultNavTree(NavTree navTree);
NavTree getDefaultNavTree();
NavTree getNavTree();
void saveNavTree(NavTree navTree, Command afterSave);
NavTree secure(NavTree navTree, boolean removeEmptyGroups);
void secure(List<NavItem> itemList, boolean removeEmptyGroups);
void navItemClicked(NavItem navItem);
}
| apache-2.0 |
nvoron23/presto | presto-tests/src/main/java/com/facebook/presto/tests/tpch/TpchIndexResolver.java | 8050 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.tests.tpch;
import com.facebook.presto.spi.ConnectorColumnHandle;
import com.facebook.presto.spi.ConnectorIndexHandle;
import com.facebook.presto.spi.ConnectorIndexResolver;
import com.facebook.presto.spi.ConnectorResolvedIndex;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.Index;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.split.MappedRecordSet;
import com.facebook.presto.tpch.TpchColumnHandle;
import com.facebook.presto.tpch.TpchTableHandle;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.util.Types.checkType;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Iterables.transform;
public class TpchIndexResolver
implements ConnectorIndexResolver
{
private final String connectorId;
private final TpchIndexedData indexedData;
public TpchIndexResolver(String connectorId, TpchIndexedData indexedData)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null");
this.indexedData = checkNotNull(indexedData, "indexedData is null");
}
@Override
public ConnectorResolvedIndex resolveIndex(ConnectorTableHandle tableHandle, Set<ConnectorColumnHandle> indexableColumns, TupleDomain<ConnectorColumnHandle> tupleDomain)
{
TpchTableHandle tpchTableHandle = checkType(tableHandle, TpchTableHandle.class, "tableHandle");
// Keep the fixed values that don't overlap with the indexableColumns
// Note: technically we could more efficiently utilize the overlapped columns, but this way is simpler for now
Map<ConnectorColumnHandle, Comparable<?>> fixedValues = Maps.filterKeys(tupleDomain.extractFixedValues(), not(in(indexableColumns)));
// determine all columns available for index lookup
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.addAll(transform(indexableColumns, columnNameGetter()));
builder.addAll(transform(fixedValues.keySet(), columnNameGetter()));
Set<String> lookupColumnNames = builder.build();
// do we have an index?
if (!indexedData.getIndexedTable(tpchTableHandle.getTableName(), tpchTableHandle.getScaleFactor(), lookupColumnNames).isPresent()) {
return null;
}
TupleDomain<ConnectorColumnHandle> filteredTupleDomain = tupleDomain;
if (!tupleDomain.isNone()) {
filteredTupleDomain = TupleDomain.withColumnDomains(Maps.filterKeys(tupleDomain.getDomains(), not(in(fixedValues.keySet()))));
}
return new ConnectorResolvedIndex(new TpchIndexHandle(connectorId, tpchTableHandle.getTableName(), tpchTableHandle.getScaleFactor(), lookupColumnNames, TupleDomain.withFixedValues(fixedValues)), filteredTupleDomain);
}
@Override
public Index getIndex(ConnectorIndexHandle indexHandle, List<ConnectorColumnHandle> lookupSchema, List<ConnectorColumnHandle> outputSchema)
{
TpchIndexHandle tpchIndexHandle = checkType(indexHandle, TpchIndexHandle.class, "indexHandle");
Map<ConnectorColumnHandle, Comparable<?>> fixedValues = tpchIndexHandle.getFixedValues().extractFixedValues();
checkArgument(!any(lookupSchema, in(fixedValues.keySet())), "Lookup columnHandles are not expected to overlap with the fixed value predicates");
// Establish an order for the fixedValues
List<ConnectorColumnHandle> fixedValueColumns = ImmutableList.copyOf(fixedValues.keySet());
// Extract the fixedValues as their raw values and types
ImmutableList.Builder<Object> valueBuilder = ImmutableList.builder();
ImmutableList.Builder<Type> typeBuilder = ImmutableList.builder();
for (ConnectorColumnHandle fixedValueColumn : fixedValueColumns) {
valueBuilder.add(fixedValues.get(fixedValueColumn));
typeBuilder.add(((TpchColumnHandle) fixedValueColumn).getType());
}
final List<Object> rawFixedValues = valueBuilder.build();
final List<Type> rawFixedTypes = typeBuilder.build();
// Establish the schema after we append the fixed values to the lookup keys.
List<ConnectorColumnHandle> finalLookupSchema = ImmutableList.<ConnectorColumnHandle>builder()
.addAll(lookupSchema)
.addAll(fixedValueColumns)
.build();
Optional<TpchIndexedData.IndexedTable> indexedTable = indexedData.getIndexedTable(tpchIndexHandle.getTableName(), tpchIndexHandle.getScaleFactor(), tpchIndexHandle.getIndexColumnNames());
checkState(indexedTable.isPresent());
TpchIndexedData.IndexedTable table = indexedTable.get();
// Compute how to map from the final lookup schema to the table index key order
final List<Integer> keyRemap = computeRemap(handleToNames(finalLookupSchema), table.getKeyColumns());
Function<RecordSet, RecordSet> keyFormatter = new Function<RecordSet, RecordSet>()
{
@Override
public RecordSet apply(RecordSet key)
{
return new MappedRecordSet(new AppendingRecordSet(key, rawFixedValues, rawFixedTypes), keyRemap);
}
};
// Compute how to map from the output of the indexed data to the expected output schema
final List<Integer> outputRemap = computeRemap(table.getOutputColumns(), handleToNames(outputSchema));
Function<RecordSet, RecordSet> outputFormatter = new Function<RecordSet, RecordSet>()
{
@Override
public RecordSet apply(RecordSet output)
{
return new MappedRecordSet(output, outputRemap);
}
};
return new TpchIndex(keyFormatter, outputFormatter, table);
}
private static List<Integer> computeRemap(List<String> startSchema, List<String> endSchema)
{
ImmutableList.Builder<Integer> builder = ImmutableList.builder();
for (String columnName : endSchema) {
int index = startSchema.indexOf(columnName);
checkArgument(index != -1, "Column name in end that is not in the start: %s", columnName);
builder.add(index);
}
return builder.build();
}
private static List<String> handleToNames(List<ConnectorColumnHandle> columnHandles)
{
return Lists.transform(columnHandles, columnNameGetter());
}
private static Function<ConnectorColumnHandle, String> columnNameGetter()
{
return new Function<ConnectorColumnHandle, String>()
{
@Override
public String apply(ConnectorColumnHandle columnHandle)
{
return checkType(columnHandle, TpchColumnHandle.class, "columnHandle").getColumnName();
}
};
}
}
| apache-2.0 |
gingerwizard/elasticsearch | modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java | 13632 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest.common;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
import static org.hamcrest.Matchers.equalTo;
public class KeyValueProcessorTests extends ESTestCase {
private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory();
public void test() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
}
public void testRootTarget() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe");
Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe")));
}
public void testKeySameAsSourceField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
ingestDocument.setFieldValue("first", "first=hello");
Processor processor = createKvProcessor("first", "&", "=", null, null,null, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello")));
}
public void testIncludeKeys() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=",
Sets.newHashSet("first"), null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertFalse(ingestDocument.hasField("target.second"));
}
public void testExcludeKeys() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=",
null, Sets.newHashSet("second"), "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertFalse(ingestDocument.hasField("target.second"));
}
public void testIncludeAndExcludeKeys() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
"first=hello&second=world&second=universe&third=bar");
Processor processor = createKvProcessor(fieldName, "&", "=",
Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false);
processor.execute(ingestDocument);
assertFalse(ingestDocument.hasField("target.first"));
assertFalse(ingestDocument.hasField("target.second"));
assertFalse(ingestDocument.hasField("target.third"));
}
public void testMissingField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
Processor processor = createKvProcessor("unknown", "&",
"=", null, null, "target", false);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
}
public void testNullValueWithIgnoreMissing() throws Exception {
String fieldName = RandomDocumentPicks.randomFieldName(random());
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap(fieldName, null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true);
processor.execute(ingestDocument);
assertIngestDocument(originalIngestDocument, ingestDocument);
}
public void testNonExistentWithIgnoreMissing() throws Exception {
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true);
processor.execute(ingestDocument);
assertIngestDocument(originalIngestDocument, ingestDocument);
}
public void testFailFieldSplitMatch() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe"));
assertFalse(ingestDocument.hasField("target.second"));
}
public void testFailValueSplitMatch() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar"));
Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false);
Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]"));
}
public void testTrimKeyAndValue() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
}
public void testTrimMultiCharSequence() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
"to=<foo@example.com>, orig_to=<bar@example.com>, %+relay=mail.example.com[private/dovecot-lmtp]," +
" delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent "
);
Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com"));
assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com"));
assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]"));
assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2"));
assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21"));
assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0"));
assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent"));
}
public void testStripBrackets() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(
random(), ingestDocument, "first=<hello>&second=\"world\"&second=(universe)&third=<foo>&fourth=[bar]&fifth='last'"
);
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo"));
assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar"));
assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last"));
}
public void testAddPrefix() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_");
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe")));
}
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
Set<String> excludeKeys, String targetField,
boolean ignoreMissing) throws Exception {
return createKvProcessor(
field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null
);
}
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
String trimKey, String trimValue, boolean stripBrackets,
String prefix) throws Exception {
Map<String, Object> config = new HashMap<>();
config.put("field", field);
config.put("field_split", fieldSplit);
config.put("value_split", valueSplit);
config.put("target_field", targetField);
if (includeKeys != null) {
config.put("include_keys", new ArrayList<>(includeKeys));
}
if (excludeKeys != null) {
config.put("exclude_keys", new ArrayList<>(excludeKeys));
}
config.put("ignore_missing", ignoreMissing);
if (trimKey != null) {
config.put("trim_key", trimKey);
}
if (trimValue != null) {
config.put("trim_value", trimValue);
}
config.put("strip_brackets", stripBrackets);
if (prefix != null) {
config.put("prefix", prefix);
}
return FACTORY.create(null, randomAlphaOfLength(10), null, config);
}
}
| apache-2.0 |
jonathanstiansen/storm-crawler | core/src/main/java/com/digitalpebble/storm/crawler/ConfigurableTopology.java | 3941 | /**
* Licensed to DigitalPebble Ltd under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* DigitalPebble licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpebble.storm.crawler;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.utils.Utils;
import com.digitalpebble.storm.crawler.util.ConfUtils;
public abstract class ConfigurableTopology {
protected Config conf = new Config();
protected boolean isLocal = false;
protected int ttl = -1;
public static void start(ConfigurableTopology topology, String args[]) {
String[] remainingArgs = topology.parse(args);
topology.run(remainingArgs);
}
protected Config getConf() {
return conf;
}
protected abstract int run(String args[]);
protected int submit(String name, Config conf, TopologyBuilder builder) {
// register Metadata for serialization with FieldsSerializer
Config.registerSerialization(conf, Metadata.class);
if (isLocal) {
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(name, conf, builder.createTopology());
if (ttl != -1) {
Utils.sleep(ttl * 1000);
cluster.shutdown();
}
}
else {
try {
StormSubmitter.submitTopology(name, conf,
builder.createTopology());
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
return 0;
}
private String[] parse(String args[]) {
List<String> newArgs = new ArrayList<String>();
Collections.addAll(newArgs, args);
Iterator<String> iter = newArgs.iterator();
while (iter.hasNext()) {
String param = iter.next();
if (param.equals("-conf")) {
if (!iter.hasNext()) {
throw new RuntimeException("Conf file not specified");
}
iter.remove();
String resource = iter.next();
try {
ConfUtils.loadConf(resource, conf);
} catch (FileNotFoundException e) {
throw new RuntimeException("File not found : " + resource);
}
iter.remove();
} else if (param.equals("-local")) {
isLocal = true;
iter.remove();
} else if (param.equals("-ttl")) {
if (!iter.hasNext()) {
throw new RuntimeException("ttl value not specified");
}
iter.remove();
String ttlValue = iter.next();
try {
ttl = Integer.parseInt(ttlValue);
} catch (NumberFormatException nfe) {
throw new RuntimeException("ttl value incorrect");
}
iter.remove();
}
}
return newArgs.toArray(new String[newArgs.size()]);
}
}
| apache-2.0 |