repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
Sargul/dbeaver
plugins/org.jkiss.dbeaver.ui.search/src/org/jkiss/dbeaver/ui/search/data/SearchDataQuery.java
13722
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.search.data; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.OperationCanceledException; import org.eclipse.core.runtime.Status; import org.eclipse.search.ui.ISearchQuery; import org.eclipse.search.ui.ISearchResult; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.*; import org.jkiss.dbeaver.model.data.DBDAttributeConstraint; import org.jkiss.dbeaver.model.data.DBDDataFilter; import org.jkiss.dbeaver.model.data.DBDDataReceiver; import org.jkiss.dbeaver.model.exec.*; import org.jkiss.dbeaver.model.impl.AbstractExecutionSource; import org.jkiss.dbeaver.model.navigator.DBNDatabaseNode; import org.jkiss.dbeaver.model.navigator.DBNModel; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DefaultProgressMonitor; import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSDataContainer; import org.jkiss.dbeaver.model.struct.DBSEntity; import org.jkiss.dbeaver.model.struct.DBSEntityAttribute; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.ui.search.AbstractSearchResult; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.ArrayUtils; import org.jkiss.utils.CommonUtils; import java.math.BigDecimal; import java.util.*; public class SearchDataQuery implements ISearchQuery { private static final Log log = Log.getLog(SearchDataQuery.class); private final SearchDataParams params; private SearchDataResult searchResult; private SearchDataQuery(SearchDataParams params) { this.params = params; } @Override public String getLabel() { return params.getSearchString(); } @Override public boolean canRerun() { return true; } @Override public boolean canRunInBackground() { return true; } @Override public ISearchResult getSearchResult() { if (searchResult == null) { searchResult = new SearchDataResult(this); } return searchResult; } @Override public IStatus run(IProgressMonitor m) throws OperationCanceledException { try { String searchString = params.getSearchString(); //monitor.subTask("Collect tables"); Set<DBPDataSource> dataSources = new HashSet<>(); for (DBSDataContainer searcher : params.sources) { dataSources.add(searcher.getDataSource()); } // Search DBNModel dbnModel = DBWorkbench.getPlatform().getNavigatorModel(); DBRProgressMonitor monitor = new DefaultProgressMonitor(m); int totalObjects = 0; monitor.beginTask( "Search \"" + searchString + "\" in " + params.sources.size() + " table(s) / " + dataSources.size() + " database(s)", params.sources.size()); try { for (DBSDataContainer dataContainer : params.sources) { if (monitor.isCanceled()) { break; } if (searchDataInContainer(monitor, dbnModel, dataContainer)) { totalObjects++; } monitor.worked(1); } } finally { monitor.done(); } searchResult.fireChange(new AbstractSearchResult.DatabaseSearchFinishEvent(searchResult, totalObjects)); return Status.OK_STATUS; } catch (Exception e) { return GeneralUtils.makeExceptionStatus(e); } } private boolean searchDataInContainer(DBRProgressMonitor monitor, DBNModel dbnModel, DBSDataContainer dataContainer) { if (!params.searchForeignObjects && dataContainer instanceof DBPForeignObject && ((DBPForeignObject) dataContainer).isForeignObject()) { return false; } String objectName = DBUtils.getObjectFullName(dataContainer, DBPEvaluationContext.DML); DBNDatabaseNode node = dbnModel.findNode(dataContainer); if (node == null) { log.warn("Can't find tree node for object \"" + objectName + "\""); return false; } monitor.subTask("Search in '" + objectName + "'"); log.debug("Search in '" + objectName + "'"); SearchTableMonitor searchMonitor = new SearchTableMonitor(monitor); try (DBCSession session = DBUtils.openUtilSession(searchMonitor, dataContainer, "Search rows in " + objectName)) { TestDataReceiver dataReceiver = new TestDataReceiver(searchMonitor); try { findRows(session, dataContainer, dataReceiver); } catch (DBCException e) { // Search failed in some container - just write an error in log. // We don't want to break whole search because of one single table. log.debug("Fulltext search failed in '" + dataContainer.getName() + "'", e); } if (dataReceiver.rowCount > 0) { SearchDataObject object = new SearchDataObject(node, dataReceiver.rowCount, dataReceiver.filter); searchResult.addObjects(Collections.singletonList(object)); return true; } } catch (DBCException e) { log.error("Error searching data in container", e); } return false; } private DBCStatistics findRows( @NotNull DBCSession session, @NotNull DBSDataContainer dataContainer, @NotNull TestDataReceiver dataReceiver) throws DBCException { DBSEntity entity; if (dataContainer instanceof DBSEntity) { entity = (DBSEntity) dataContainer; } else { log.warn("Data container " + dataContainer + " isn't entity"); return null; } try { List<DBDAttributeConstraint> constraints = new ArrayList<>(); for (DBSEntityAttribute attribute : CommonUtils.safeCollection(entity.getAttributes(session.getProgressMonitor()))) { if (params.fastSearch) { if (DBUtils.findAttributeIndex(session.getProgressMonitor(), attribute) == null) { continue; } } if (DBUtils.isPseudoAttribute(attribute) || DBUtils.isHiddenObject(attribute)) { continue; } DBCLogicalOperator[] supportedOperators = DBUtils.getAttributeOperators(attribute); DBCLogicalOperator operator; Object value; switch (attribute.getDataKind()) { case BOOLEAN: continue; case NUMERIC: if (!params.searchNumbers) { continue; } if (!ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) { continue; } operator = DBCLogicalOperator.EQUALS; try { value = Integer.valueOf(params.searchString); } catch (NumberFormatException e) { try { value = Long.valueOf(params.searchString); } catch (NumberFormatException e1) { try { value = Double.valueOf(params.searchString); } catch (NumberFormatException e2) { try { value = new BigDecimal(params.searchString); } catch (Exception e3) { // Not a number continue; } } } } break; case CONTENT: case BINARY: if (!params.searchLOBs) { continue; } case STRING: // Do not check value length. Some columns may be compressed/compacted/have special data type and thus have length < than value length. // if (attribute.getMaxLength() > 0 && attribute.getMaxLength() < params.searchString.length()) { // continue; // } if (!params.isCaseSensitive() && ArrayUtils.contains(supportedOperators, DBCLogicalOperator.ILIKE)) { operator = DBCLogicalOperator.ILIKE; value = "%" + params.searchString + "%"; } else if (ArrayUtils.contains(supportedOperators, DBCLogicalOperator.LIKE)) { operator = DBCLogicalOperator.LIKE; value = "%" + params.searchString + "%"; } else if (ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) { operator = DBCLogicalOperator.EQUALS; value = params.searchString; } else { continue; } break; default: { // Try to convert string to attribute type // On success search by exact match if (!ArrayUtils.contains(supportedOperators, DBCLogicalOperator.EQUALS)) { continue; } String typeName = attribute.getTypeName(); if (typeName.equals(DBConstants.TYPE_NAME_UUID) || typeName.equals(DBConstants.TYPE_NAME_UUID2)) { try { UUID uuid = UUID.fromString(params.searchString); operator = DBCLogicalOperator.EQUALS; value = uuid.toString(); } catch (Exception e) { // No a UUID continue; } } else { continue; } } } DBDAttributeConstraint constraint = new DBDAttributeConstraint(attribute, constraints.size()); constraint.setOperator(operator); constraint.setValue(value); constraint.setVisible(true); constraints.add(constraint); } if (constraints.isEmpty()) { return null; } dataReceiver.filter = new DBDDataFilter(constraints); dataReceiver.filter.setAnyConstraint(true); DBCExecutionSource searchSource = new AbstractExecutionSource(dataContainer, session.getExecutionContext(), this); return dataContainer.readData(searchSource, session, dataReceiver, dataReceiver.filter, -1, -1, 0, 0); } catch (DBException e) { throw new DBCException("Error finding rows", e); } } public static SearchDataQuery createQuery(SearchDataParams params) throws DBException { return new SearchDataQuery(params); } private class SearchTableMonitor extends VoidProgressMonitor { private DBRProgressMonitor baseMonitor; private volatile boolean canceled; private SearchTableMonitor(DBRProgressMonitor monitor) { this.baseMonitor = monitor; } @Override public boolean isCanceled() { return canceled || baseMonitor.isCanceled(); } } private class TestDataReceiver implements DBDDataReceiver { private SearchTableMonitor searchMonitor; private int rowCount = 0; private DBDDataFilter filter; TestDataReceiver(SearchTableMonitor searchMonitor) { this.searchMonitor = searchMonitor; } @Override public void fetchStart(DBCSession session, DBCResultSet resultSet, long offset, long maxRows) throws DBCException { } @Override public void fetchRow(DBCSession session, DBCResultSet resultSet) throws DBCException { rowCount++; if (rowCount >= params.maxResults) { searchMonitor.canceled = true; } } @Override public void fetchEnd(DBCSession session, DBCResultSet resultSet) throws DBCException { } @Override public void close() { } } }
apache-2.0
atlasapi/atlas-deer
atlas-api/src/main/java/org/atlasapi/www/filter/RequestLoggingFilter.java
3038
package org.atlasapi.www.filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.filter.OncePerRequestFilter; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponseWrapper; import java.io.IOException; import java.time.Duration; import java.time.Instant; public class RequestLoggingFilter extends OncePerRequestFilter { private static final Logger log = LoggerFactory.getLogger(RequestLoggingFilter.class); @Override protected void doFilterInternal( HttpServletRequest req, HttpServletResponse resp, FilterChain chain ) throws IOException, ServletException { // Hack to allow extraction of the status code. In newer versions of javax.servlet-api (3.0+) this will no // longer be necessary. StatusExposingServletResponse statusExposingServletResponse = new StatusExposingServletResponse(resp); Instant start = Instant.now(); try { chain.doFilter(req, statusExposingServletResponse); } finally { Instant finish = Instant.now(); long time = Duration.between(start, finish).toMillis(); String fullRequestURI = fullRequestURI(req); log.info("{} {} {} {}ms", req.getMethod(), fullRequestURI, statusExposingServletResponse.getStatus(), time); } } private String fullRequestURI(HttpServletRequest request) { StringBuilder requestURL = new StringBuilder(request.getRequestURI()); String queryString = request.getQueryString(); if (queryString == null) { return requestURL.toString(); } else { return requestURL.append('?').append(queryString).toString(); } } private static class StatusExposingServletResponse extends HttpServletResponseWrapper { private int httpStatus = SC_OK; public StatusExposingServletResponse(HttpServletResponse response) { super(response); } public int getStatus() { return httpStatus; } @Override public void setStatus(int sc) { httpStatus = sc; super.setStatus(sc); } @Override public void setStatus(int status, String string) { super.setStatus(status, string); this.httpStatus = status; } @Override public void sendError(int sc) throws IOException { httpStatus = sc; super.sendError(sc); } @Override public void sendError(int sc, String msg) throws IOException { httpStatus = sc; super.sendError(sc, msg); } @Override public void sendRedirect(String location) throws IOException { httpStatus = SC_MOVED_TEMPORARILY; super.sendRedirect(location); } } }
apache-2.0
brunocvcunha/mdmconnectors
mdm-client-send-data/src/main/java/com/totvslabs/mdm/client/ui/PanelAbstract.java
4759
package com.totvslabs.mdm.client.ui; import java.awt.Color; import java.awt.Component; import java.text.ParseException; import java.util.ArrayList; import java.util.List; import javax.swing.BorderFactory; import javax.swing.JComponent; import javax.swing.JPanel; import javax.swing.text.MaskFormatter; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; import com.totvslabs.mdm.client.pojoTSA.MasterConfigurationData; public abstract class PanelAbstract extends JPanel { private static final long serialVersionUID = 1L; protected CellConstraints cc; private FormLayout layoutGeneral; private Integer actualRow = 2; protected final List<JComponent> fieldsToNotEnable = new ArrayList<JComponent>(); protected final List<JComponent> fieldsToNotDisable = new ArrayList<JComponent>(); public PanelAbstract(int colNumber, int rowNumber, String borderName){ this.setCc(new CellConstraints()); this.setLayoutGeneral(new FormLayout( "30px, " + this.generateStringLayout(colNumber, 300) + ", 30px", "10px, " + this.generateStringLayout(rowNumber, 21) + ", 10px" )); this.setLayout(this.getLayoutGeneral()); // this.setBackground(Color.GREEN); this.setBorder( BorderFactory.createTitledBorder(borderName) ); } public static MaskFormatter getMaskFormatter(String mask) { MaskFormatter maskFormatter = null; try { maskFormatter = new MaskFormatter(mask); maskFormatter.setPlaceholderCharacter('_'); } catch (ParseException e) { } return maskFormatter; } private String generateStringLayout(int number, int size) { StringBuilder sb = new StringBuilder(); for(int i=0; i<number; i++) { sb.append(size); if(i<number+1) { sb.append(", "); } } return sb.toString(); } protected void add(JComponent[] components, int rowSpan) { if(components != null) { int colNumber = 2; for (JComponent component : components) { this.add(component, colNumber, false, rowSpan); colNumber++; } this.actualRow = rowSpan + this.actualRow; } } protected void add(JComponent[] components) { this.add(components, 1); } public abstract void initializeLayout(); public abstract void fillComponents(MasterConfigurationData masterConfigurationData); public abstract void fillData(MasterConfigurationData masterConfigurationData); public void disableFields() { disableFields(this); } public void enableDisableField(JComponent component, boolean enable) { component.setEnabled(enable); if(enable) { this.fieldsToNotEnable.remove(component); } else { this.fieldsToNotEnable.add(component); } } public void disableFields(JPanel panel) { Component[] components = panel.getComponents(); if(components != null) { for (Component component : components) { if(component instanceof JPanel) { disableFields((JPanel) component); } else { processEnableDisableComponent(component, false); } } } } public void enableFields() { enableFields(this); } private void enableFields(JPanel panel) { Component[] components = panel.getComponents(); if(components != null) { for (Component component : components) { if(component instanceof JPanel) { enableFields((JPanel) component); } else { processEnableDisableComponent(component, true); } } } } private void processEnableDisableComponent(Component component, boolean enable) { if((!enable && !fieldsToNotDisable.contains(component)) || (enable && !fieldsToNotEnable.contains(component))) { component.setEnabled(enable); } } protected void add(JComponent component) { this.add(component, 2); } protected void add(JComponent component, int colNumber) { this.add(component, colNumber, true); } protected void add(JComponent component, int colNumber, boolean addRow) { this.add(component, colNumber, addRow, 1); } protected void add(final JComponent component, int colNumber, boolean addRow, int rowSpan, int colSpan) { this.add(component, this.cc.xywh(colNumber, this.actualRow, colSpan, rowSpan)); if(addRow) { this.actualRow = this.actualRow + rowSpan; } } protected void add(final JComponent component, int colNumber, boolean addRow, int rowSpan) { this.add(component, colNumber, addRow, rowSpan, 1); } public CellConstraints getCc() { return cc; } public void setCc(CellConstraints cc) { this.cc = cc; } public FormLayout getLayoutGeneral() { return layoutGeneral; } public void setLayoutGeneral(FormLayout layoutGeneral) { this.layoutGeneral = layoutGeneral; } }
apache-2.0
yaobanglin/wpan
app/src/main/java/com/xinyu/mwp/entity/WithDrawCashReturnEntity.java
2513
package com.xinyu.mwp.entity; /** * Created by Administrator on 2017/3/13. */ public class WithDrawCashReturnEntity extends BaseEntity { private long wid; private int id; private double charge; private double amount; private String withdrawTime; private String handleTime; private String bank; private String branchBank; private String province; private String city; private String cardNo; private String name; private String comment; private int status; public long getWid() { return wid; } public void setWid(long wid) { this.wid = wid; } public int getId() { return id; } public void setId(int id) { this.id = id; } public double getCharge() { return charge; } public void setCharge(double charge) { this.charge = charge; } public double getAmount() { return amount; } public void setAmount(double amount) { this.amount = amount; } public String getWithdrawTime() { return withdrawTime; } public void setWithdrawTime(String withdrawTime) { this.withdrawTime = withdrawTime; } public String getHandleTime() { return handleTime; } public void setHandleTime(String handleTime) { this.handleTime = handleTime; } public String getBank() { return bank; } public void setBank(String bank) { this.bank = bank; } public String getBranchBank() { return branchBank; } public void setBranchBank(String branchBank) { this.branchBank = branchBank; } public String getProvince() { return province; } public void setProvince(String province) { this.province = province; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getCardNo() { return cardNo; } public void setCardNo(String cardNo) { this.cardNo = cardNo; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getComment() { return comment; } public void setComment(String comment) { this.comment = comment; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } }
apache-2.0
romainmoreau/e-paper
e-paper-client-api/src/main/java/fr/romainmoreau/epaper/client/api/EPaperResponseException.java
681
package fr.romainmoreau.epaper.client.api; import java.nio.charset.StandardCharsets; public class EPaperResponseException extends EPaperException { private static final long serialVersionUID = 1L; public EPaperResponseException(byte[] response) { super(getMessage(response)); } public EPaperResponseException(byte[] response, Exception cause) { super(getMessage(response), cause); } public EPaperResponseException(String message) { super(message); } public EPaperResponseException(String message, Exception cause) { super(message, cause); } private static String getMessage(byte[] response) { return new String(response, StandardCharsets.US_ASCII); } }
apache-2.0
garydgregory/jcommander-addons
src/test/java/com/garygregory/jcommander/converters/time/LocalTimeConverterTest.java
1412
/** * Copyright (C) 2016 Gary Gregory. All rights reserved. * * See the NOTICE.txt file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.garygregory.jcommander.converters.time; import java.time.LocalTime; import org.junit.Test; import com.garygregory.jcommander.converters.AbstractStringConverterBasicTest; /** * Tests {@link LocalTimeConverter}. * * @since 1.0.0 * @author <a href="mailto:ggregory@garygregory.com">Gary Gregory</a> */ public class LocalTimeConverterTest extends AbstractStringConverterBasicTest<LocalTime> { public LocalTimeConverterTest() { super(new LocalTimeConverter(null)); } @Test public void testLocalTime() { testRoundtrip(LocalTime.parse("10:15:30")); } }
apache-2.0
elnin0815/EGKfeuer
src/test/java/de/gecko/egkfeuer/model/ekg/v52/ToEgkPatientConverterV52Test.java
1943
package de.gecko.egkfeuer.model.ekg.v52; import de.gecko.egkfeuer.model.EgkPatient; import de.gecko.egkfeuer.model.ekg.ToPatientConverter; import org.apache.commons.io.IOUtils; import org.junit.Before; import org.junit.Test; import java.nio.file.Files; import java.nio.file.Paths; import static org.junit.Assert.*; public class ToEgkPatientConverterV52Test { private ToPatientConverter converter; @Before public void before() { converter = new ToPatientConverterV52(); } @Test public void testToPatient() throws Exception { String pdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v52.pd.xml")), "ISO-8859-15"); String vdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v52.vd.xml")), "ISO-8859-15"); assertTrue(converter.isPdCompatible(pdContent)); assertTrue(converter.isVdCompatible(vdContent)); EgkPatient patient = converter.toPatient(pdContent, vdContent); assertNotNull(patient); } @Test(expected = IllegalArgumentException.class) public void testToPatientWrongVersionPd() throws Exception { String pdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v51.pd.xml")), "ISO-8859-15"); String vdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v52.vd.xml")), "ISO-8859-15"); assertFalse(converter.isPdCompatible(pdContent)); converter.toPatient(pdContent, vdContent); } @Test(expected = IllegalArgumentException.class) public void testToPatientWrongVersionVd() throws Exception { String pdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v52.pd.xml")), "ISO-8859-15"); String vdContent = IOUtils.toString(Files.newInputStream(Paths.get("src/test/resources/egk/v51.vd.xml")), "ISO-8859-15"); assertFalse(converter.isVdCompatible(vdContent)); converter.toPatient(pdContent, vdContent); } }
apache-2.0
caskdata/cdap
cdap-spark-core/src/main/java/co/cask/cdap/app/runtime/spark/SparkProgramRuntimeProvider.java
7842
/* * Copyright © 2016 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.app.runtime.spark; import co.cask.cdap.app.runtime.ProgramRunner; import co.cask.cdap.app.runtime.ProgramRuntimeProvider; import co.cask.cdap.app.runtime.spark.distributed.DistributedSparkProgramRunner; import co.cask.cdap.common.conf.CConfiguration; import co.cask.cdap.common.conf.Constants; import co.cask.cdap.common.lang.ClassLoaders; import co.cask.cdap.proto.ProgramType; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.io.Closeables; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.ProvisionException; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Type; import java.net.URL; /** * A {@link ProgramRuntimeProvider} that provides runtime system support for {@link ProgramType#SPARK} program. * This class shouldn't have dependency on Spark classes. */ @ProgramRuntimeProvider.SupportedProgramType(ProgramType.SPARK) public class SparkProgramRuntimeProvider implements ProgramRuntimeProvider { private URL[] classLoaderUrls; @Override public ProgramRunner createProgramRunner(ProgramType type, Mode mode, Injector injector) { Preconditions.checkArgument(type == ProgramType.SPARK, "Unsupported program type %s. Only %s is supported", type, ProgramType.SPARK); switch (mode) { case LOCAL: // Rewrite YarnClient based on config. The LOCAL runner is used in both SDK and distributed mode // The actual mode that Spark is running is determined by the cdap.spark.cluster.mode attribute // in the hConf boolean rewriteYarnClient = injector.getInstance(CConfiguration.class) .getBoolean(Constants.AppFabric.SPARK_YARN_CLIENT_REWRITE); return createSparkProgramRunner(injector, SparkProgramRunner.class.getName(), rewriteYarnClient, true); case DISTRIBUTED: // The distributed program runner is only used by the CDAP master to launch the twill container // hence it doesn't need to do any class rewrite. return createSparkProgramRunner(injector, DistributedSparkProgramRunner.class.getName(), false, false); default: throw new IllegalArgumentException("Unsupported Spark execution mode " + mode); } } /** * Creates a {@link ProgramRunner} that execute Spark program from the given {@link Injector}. */ private ProgramRunner createSparkProgramRunner(Injector injector, String programRunnerClassName, boolean rewriteYarnClient, boolean rewriteDStreamGraph) { try { SparkRunnerClassLoader classLoader = createClassLoader(rewriteYarnClient, rewriteDStreamGraph); try { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { // Closing of the SparkRunnerClassLoader is done by the SparkProgramRunner when the program execution finished // The current CDAP call run right after it get a ProgramRunner and never reuse a ProgramRunner. // TODO: CDAP-5506 to refactor the program runtime architecture to remove the need of this assumption return createInstance(injector, classLoader.loadClass(programRunnerClassName), classLoader); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } catch (Throwable t) { // If there is any exception, close the SparkRunnerClassLoader Closeables.closeQuietly(classLoader); throw t; } } catch (Throwable t) { throw Throwables.propagate(t); } } /** * Create a new instance of the given {@link Type} from the given {@link Injector}. This method * is doing Guice injection manually through the @Inject constructor to avoid ClassLoader leakage * due to the just-in-time binding map inside the Guice Injector that holds a strong reference to the type, * hence the ClassLoader of that type * * @param injector The Guice Injector for acquiring CDAP system instances * @param type the {@link Class} of the instance to create * @return a new instance of the given {@link Type} */ private <T> T createInstance(Injector injector, Type type, ClassLoader sparkClassLoader) throws Exception { Key<?> typeKey = Key.get(type); @SuppressWarnings("unchecked") Class<T> rawType = (Class<T>) typeKey.getTypeLiteral().getRawType(); Constructor<T> constructor = findInjectableConstructor(rawType); constructor.setAccessible(true); // Acquire the instances for each parameter for the constructor Type[] paramTypes = constructor.getGenericParameterTypes(); Object[] args = new Object[paramTypes.length]; int i = 0; for (Type paramType : paramTypes) { Key<?> paramTypeKey = Key.get(paramType); // If the classloader of the parameter is the same as the Spark ClassLoader, we need to create the // instance manually instead of getting through the Guice Injector to avoid ClassLoader leakage if (paramTypeKey.getTypeLiteral().getRawType().getClassLoader() == sparkClassLoader) { args[i++] = createInstance(injector, paramType, sparkClassLoader); } else { args[i++] = injector.getInstance(paramTypeKey); } } return constructor.newInstance(args); } /** * Finds the constructor of the given type that is suitable for Guice injection. If the given type has * a constructor annotated with {@link Inject}, then it will be returned. Otherwise, the default constructor * will be returned. * * @throws ProvisionException if failed to locate a constructor for the injection */ @SuppressWarnings("unchecked") private <T> Constructor<T> findInjectableConstructor(Class<T> type) throws ProvisionException { for (Constructor<?> constructor : type.getDeclaredConstructors()) { // Find the @Inject constructor if (constructor.isAnnotationPresent(Inject.class)) { return (Constructor<T>) constructor; } } // If no @Inject constructor, use the default constructor try { return type.getDeclaredConstructor(); } catch (NoSuchMethodException e) { throw new ProvisionException("No constructor is annotated with @Inject and there is no default constructor", e); } } /** * Returns an array of {@link URL} being used by the {@link ClassLoader} of this {@link Class}. */ private synchronized SparkRunnerClassLoader createClassLoader(boolean rewriteYarnClient, boolean rewriteDStreamGraph) throws IOException { SparkRunnerClassLoader classLoader; if (classLoaderUrls == null) { classLoader = new SparkRunnerClassLoader(getClass().getClassLoader(), rewriteYarnClient, rewriteDStreamGraph); classLoaderUrls = classLoader.getURLs(); } else { classLoader = new SparkRunnerClassLoader(classLoaderUrls, getClass().getClassLoader(), rewriteYarnClient, rewriteDStreamGraph); } return classLoader; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-marketplacemeteringservice/src/main/java/com/amazonaws/services/marketplacemetering/package-info.java
3011
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ /** * <fullname>AWS Marketplace Metering Service</fullname> * <p> * This reference provides descriptions of the low-level AWS Marketplace Metering Service API. * </p> * <p> * AWS Marketplace sellers can use this API to submit usage data for custom usage dimensions. * </p> * <p> * <b>Submitting Metering Records</b> * </p> * <ul> * <li> * <p> * <i>MeterUsage</i>- Submits the metering record for a Marketplace product. MeterUsage is called from an EC2 instance. * </p> * </li> * <li> * <p> * <i>BatchMeterUsage</i>- Submits the metering record for a set of customers. BatchMeterUsage is called from a * software-as-a-service (SaaS) application. * </p> * </li> * </ul> * <p> * <b>Accepting New Customers</b> * </p> * <ul> * <li> * <p> * <i>ResolveCustomer</i>- Called by a SaaS application during the registration process. When a buyer visits your * website during the registration process, the buyer submits a Registration Token through the browser. The Registration * Token is resolved through this API to obtain a CustomerIdentifier and Product Code. * </p> * </li> * </ul> * <p> * <b>Entitlement and Metering for Paid Container Products</b> * </p> * <ul> * <li> * <p> * Paid container software products sold through AWS Marketplace must integrate with the AWS Marketplace Metering * Service and call the RegisterUsage operation for software entitlement and metering. Calling RegisterUsage from * containers running outside of Amazon Elastic Container Service (Amazon ECR) isn't supported. Free and BYOL products * for ECS aren't required to call RegisterUsage, but you can do so if you want to receive usage data in your seller * reports. For more information on using the RegisterUsage operation, see <a * href="https://docs.aws.amazon.com/marketplace/latest/userguide/container-based-products.html">Container-Based * Products</a>. * </p> * </li> * </ul> * <p> * BatchMeterUsage API calls are captured by AWS CloudTrail. You can use Cloudtrail to verify that the SaaS metering * records that you sent are accurate by searching for records with the eventName of BatchMeterUsage. You can also use * CloudTrail to audit records over time. For more information, see the <i> <a * href="http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html">AWS CloudTrail User * Guide</a> </i>. * </p> */ package com.amazonaws.services.marketplacemetering;
apache-2.0
anjzhen/jbbs
src/main/java/cc/ymee/jbbs/module/record/package-info.java
174
/** * * 用于记录用户登录及操作日志相关 * <p>Create by 14/11/17 下午1:52</p> * @author :anjero by * @version :1.0 */ package cc.ymee.jbbs.module.record;
apache-2.0
resty-gwt/resty-gwt
restygwt/src/test/java/org/fusesource/restygwt/mocking/CachingCallbackFilterTestCase.java
4458
/** * Copyright (C) 2009-2012 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.restygwt.mocking; import com.google.gwt.http.client.RequestBuilder; import com.google.gwt.http.client.RequestCallback; import com.google.gwt.http.client.Response; import com.google.gwt.junit.GWTMockUtilities; import java.util.HashMap; import junit.framework.TestCase; import org.easymock.EasyMock; import org.fusesource.restygwt.client.Method; import org.fusesource.restygwt.client.cache.CacheKey; import org.fusesource.restygwt.client.cache.DefaultQueueableCacheStorage; import org.fusesource.restygwt.client.cache.SimpleCacheKey; import org.fusesource.restygwt.client.callback.CachingCallbackFilter; public class CachingCallbackFilterTestCase extends TestCase { private CachingCallbackFilter filter; private DefaultQueueableCacheStorage storage; private SimpleCacheKey key; @Override protected void setUp() throws Exception { super.setUp(); GWTMockUtilities.disarm(); storage = new DefaultQueueableCacheStorage(); key = new SimpleCacheKey("key"); final CacheKey k = key; filter = new CachingCallbackFilter(storage) { @Override protected CacheKey cacheKey(RequestBuilder builder) { return k; } }; } @Override protected void tearDown() throws Exception { super.tearDown(); GWTMockUtilities.restore(); } public void testNoCallbacksSuccess() throws Exception { Response response = EasyMock.createMock(Response.class); Method method = EasyMock.createMock(Method.class); EasyMock.expect(response.getStatusCode()).andReturn(201); EasyMock.expect(method.getData()).andReturn(new HashMap<String, String>()); EasyMock.replay(response, method); filter.filter(method, response, null); EasyMock.verify(response, method); // hashCode should be good enough assertEquals(response.hashCode(), storage.getResultOrReturnNull(key).hashCode()); } public void testNoCallbacksError() throws Exception { Response response = EasyMock.createMock(Response.class); Method method = EasyMock.createMock(Method.class); EasyMock.expect(response.getStatusCode()).andReturn(401); EasyMock.replay(response, method); filter.filter(method, response, null); EasyMock.verify(response, method); assertNull(storage.getResultOrReturnNull(key)); } public void testManyCallbacksSuccess() throws Exception { Response response = EasyMock.createMock(Response.class); Method method = EasyMock.createMock(Method.class); EasyMock.expect(method.getData()).andReturn(new HashMap<String, String>()); RequestCallback[] myCallbacks = new RequestCallback[4]; for (int i = 0; i < myCallbacks.length; i++) { myCallbacks[i] = EasyMock.createMock(RequestCallback.class); myCallbacks[i].onResponseReceived(null, null); EasyMock.replay(myCallbacks[i]); } EasyMock.expect(response.getStatusCode()).andReturn(200); EasyMock.replay(response, method); for (RequestCallback myCallback : myCallbacks) { storage.addCallback(key, myCallback); } RequestCallback callback = filter.filter(method, response, myCallbacks[0]); assertNotSame(callback, myCallbacks[0]); callback.onResponseReceived(null, null); EasyMock.verify(response, method); for (RequestCallback rc : myCallbacks) { EasyMock.verify(rc); } // hashCode should be good enough assertEquals(response.hashCode(), storage.getResultOrReturnNull(key).hashCode()); } }
apache-2.0
SeMorgana/twitterCrawler
GetUser.java
8176
/*Collecting user account info * *@author: morgan *@data created: 5/2/2013 *@last changed: * */ import twitter4j.auth.AccessToken; import twitter4j.*; import java.io.*; import java.sql.*; import java.util.*; public class GetUser{ public static DatabaseManager dm; public static List<String> al_tokens; public static List<String> al_ids; public static String table_name_user; public static boolean writeResponseTo(long user_id, Twitter twitter){ String[] userErrorInfo = new String[2]; try{ User user = twitter.showUser(user_id); dm.mdata.put("id", String.valueOf(user.getId()) ); userErrorInfo[0] = String.valueOf(user.getId()); dm.mdata.put("name", user.getName()); dm.mdata.put("screen_name",user.getScreenName()); userErrorInfo[1] = user.getScreenName(); dm.mdata.put("friends_count", String.valueOf(user.getFriendsCount()) ); dm.mdata.put("followers_count", String.valueOf(user.getFollowersCount()) ); dm.mdata.put("location", user.getLocation()); dm.mdata.put("created_at", user.getCreatedAt().toString()); dm.mdata.put("favourites_count", String.valueOf(user.getFavouritesCount()) ); dm.mdata.put("time_zone", user.getTimeZone()); dm.mdata.put("statuses_count", String.valueOf(user.getStatusesCount()) ); dm.mdata.put("lang", user.getLang()); dm.mdata.put("url", user.getURL()); dm.mdata.put("description", user.getDescription()); dm.mdata.put("utc_offset", String.valueOf(user.getUtcOffset()) ); dm.mdata.put("verified", String.valueOf(user.isVerified()) ); dm.mdata.put("geo_enabled", String.valueOf(user.isGeoEnabled()) ); dm.mdata.put("protected", String.valueOf(user.isProtected())); dm.mdata.put("contributors_enabled", String.valueOf(user.isContributorsEnabled())); dm.mdata.put("listed_count", String.valueOf(user.getListedCount())); dm.mdata.put("is_translator", String.valueOf(user.isTranslator())); dm.mdata.put("profile_image_url", user.getBiggerProfileImageURL()); dm.mdata.put("profile_background_image_url", user.getProfileBackgroundImageURL()); dm.mdata.put("time_collected", String.valueOf((new java.util.Date()).getTime()) ); java.util.Date date1 = user.getCreatedAt(); java.util.Date date2 = new java.util.Date(); long difference = date2.getTime() - date1.getTime(); double days = difference / (1000.0*3600*24); double years = days / 365.0; dm.mdata.put("account_age", String.format("%.2f", years)+"years" ); if( !dm.mdata.get("id").equals("") ){ String query = "INSERT DELAYED "+table_name_user+" (";//\'" + dm.mdata.get("id") + "\');"; String headers = dm.sattr[0]; String values = "(\'" + dm.mdata.get(dm.sattr[0]) + "\'"; for( int i=1;i<dm.sattr.length;i++ ){ if( dm.mdata.get(dm.sattr[i]) != null && !dm.mdata.get(dm.sattr[i]).equals("") ){//null becomes "" ? headers += ("," + dm.sattr[i]); values += (",\'" +dm.mdata.get(dm.sattr[i]).replace("\\","\\\\").replace("'","\\'") + "\'" ); } } headers += ") "; values += ");"; query = query + headers + " values " + values; dm.statement.execute(query); //System.out.println(dm.mdata.get(dm.sattr[0])); } } catch (SQLException sqle){ System.err.println("sqlError in GetUser:"+ sqle.getMessage()); return true;//including primary key conflicts } catch (Exception e){ String error_str = e.getMessage(); System.err.println("error in GetUser:"+ error_str); System.err.println("user id: " + userErrorInfo[0] + "\tuser screen name: " + userErrorInfo[1]); if(error_str.startsWith("404")){return true;}//if no this user returns true else if(error_str.startsWith("403")){return true;}//The request is understood, but else{return false;} } try{ ResultSet rs = dm.statement.executeQuery("select count(*) from "+table_name_user); rs.next(); System.out.println("Now there are :" + rs.getObject(1) + " rows in "+table_name_user); } catch (Exception e){ System.err.println("error in GetUser:"+ e.getMessage()); return false; } return true; } public static void start(List<String> l_tokens, List<String> l_ids, String[] user_attrs, String[] db_attrs, int num_primary_key, List<String> al_tables) throws Exception{ al_tokens = l_tokens; al_ids = l_ids; table_name_user = al_tables.get(0); // the first one dm = new DatabaseManager(db_attrs[0],db_attrs[1],db_attrs[2],db_attrs[3],db_attrs[4],user_attrs,num_primary_key ); dm.connect(); dm.getStatement(); dm.initMap(); //Random rd = new Random(); Set<Integer> limitReachingTokenIndeces = new HashSet<Integer>(); //int not_work=0; while(al_ids.size()>0){ System.out.println("Now the size of the remaining ids is: "+al_ids.size()); dm.initMap(); //boolean work=false; for(int i=0;i<al_tokens.size();i++){ try{ Twitter twitter = new TwitterFactory().getInstance(); String[] tokens = al_tokens.get(i).split(" "); twitter.setOAuthConsumer(tokens[0], tokens[1]);//tokens[0]:consumer token; toknes[1]:consumer secret AccessToken accessToken = new AccessToken(tokens[2], tokens[3]); twitter.setOAuthAccessToken(accessToken); //check the limit here Map<String,RateLimitStatus> m = twitter.getRateLimitStatus(); Set<String> s = m.keySet(); RateLimitStatus rls1 = m.get("/statuses/user_timeline"); RateLimitStatus rls2 = m.get("/users/show/:id"); int user_timeline_remaining = rls1.getRemaining(); int users_show_remaining = rls2.getRemaining(); if(user_timeline_remaining < 10 || users_show_remaining < 10){ System.out.println("The " + String.valueOf(i+1)+ "th token is reaching limit!!"); limitReachingTokenIndeces.add(i); if(limitReachingTokenIndeces.size() == al_tokens.size()){ System.out.println("Limit reached! sleep 10 mins!!\nSuspended at " + (new java.util.Date())); Thread.sleep(1000 * 600); limitReachingTokenIndeces.clear();//clear it after suspending } else {//the set doesn't contain all the indices if( i == al_tokens.size()-1 ){ i = -1; } continue; } } else { limitReachingTokenIndeces.remove(i); } /**********************checking the limit above****************************/ String sid = al_ids.get(0).trim(); long id = Long.parseLong(sid); boolean can_remove = writeResponseTo(id, twitter); //work = can_remove || work; if(can_remove){System.out.println(al_ids.get(0)+" removing");al_ids.remove(0);}//added or added previously if(al_ids.size() == 0) return;//return if it is done } catch (Exception e){ System.err.println("start function error in GetUser:"+ e.getMessage()); continue; } } } } }
apache-2.0
edaubert/jongo
src/main/java/org/jongo/MongoCollection.java
7077
/* * Copyright (C) 2011 Benoit GUEROUT <bguerout at gmail dot com> and Yves AMSELLEM <amsellem dot yves at gmail dot com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jongo; import com.mongodb.*; import org.bson.types.ObjectId; import org.jongo.query.Query; public class MongoCollection { public static final String MONGO_DOCUMENT_ID_NAME = "_id"; public static final String MONGO_QUERY_OID = "$oid"; private static final Object[] NO_PARAMETERS = {}; private static final String ALL = "{}"; private final DBCollection collection; private final WriteConcern writeConcern; private final ReadPreference readPreference; private final Mapper mapper; public MongoCollection(DBCollection dbCollection, Mapper mapper) { this(dbCollection, mapper, dbCollection.getWriteConcern(), dbCollection.getReadPreference()); } private MongoCollection(DBCollection dbCollection, Mapper mapper, WriteConcern writeConcern, ReadPreference readPreference) { this.collection = dbCollection; this.writeConcern = writeConcern; this.readPreference = readPreference; this.mapper = mapper; } public MongoCollection withWriteConcern(WriteConcern concern) { return new MongoCollection(collection, mapper, concern, readPreference); } public MongoCollection withReadPreference(ReadPreference readPreference) { return new MongoCollection(collection, mapper, writeConcern, readPreference); } public FindOne findOne(ObjectId id) { if (id == null) { throw new IllegalArgumentException("Object id must not be null"); } return new FindOne(collection, readPreference, mapper.getUnmarshaller(), mapper.getQueryFactory(), "{_id:#}", id); } public FindOne findOne() { return findOne(ALL); } public FindOne findOne(String query) { return findOne(query, NO_PARAMETERS); } public FindOne findOne(String query, Object... parameters) { return new FindOne(collection, readPreference, mapper.getUnmarshaller(), mapper.getQueryFactory(), query, parameters); } public Find find() { return find(ALL); } public Find find(String query) { return find(query, NO_PARAMETERS); } public Find find(String query, Object... parameters) { return new Find(collection, readPreference, mapper.getUnmarshaller(), mapper.getQueryFactory(), query, parameters); } public FindAndModify findAndModify() { return findAndModify(ALL); } public FindAndModify findAndModify(String query) { return findAndModify(query, NO_PARAMETERS); } public FindAndModify findAndModify(String query, Object... parameters) { return new FindAndModify(collection, mapper.getUnmarshaller(), mapper.getQueryFactory(), query, parameters); } public long count() { return collection.getCount(readPreference); } public long count(String query) { return count(query, NO_PARAMETERS); } public long count(String query, Object... parameters) { DBObject dbQuery = createQuery(query, parameters).toDBObject(); return collection.getCount(dbQuery, null, readPreference); } public Update update(String query) { return update(query, NO_PARAMETERS); } public Update update(ObjectId id) { if (id == null) { throw new IllegalArgumentException("Object id must not be null"); } return update("{_id:#}", id); } public Update update(String query, Object... parameters) { return new Update(collection, writeConcern, mapper.getQueryFactory(), query, parameters); } public WriteResult save(Object pojo) { return new Insert(collection, writeConcern, mapper.getMarshaller(), mapper.getObjectIdUpdater(), mapper.getQueryFactory()).save(pojo); } public WriteResult insert(Object pojo) { return insert(new Object[]{pojo}); } public WriteResult insert(String query) { return insert(query, NO_PARAMETERS); } public WriteResult insert(Object... pojos) { return new Insert(collection, writeConcern, mapper.getMarshaller(), mapper.getObjectIdUpdater(), mapper.getQueryFactory()).insert(pojos); } public WriteResult insert(String query, Object... parameters) { return new Insert(collection, writeConcern, mapper.getMarshaller(), mapper.getObjectIdUpdater(), mapper.getQueryFactory()).insert(query, parameters); } public WriteResult remove(ObjectId id) { return remove("{" + MONGO_DOCUMENT_ID_NAME + ":#}", id); } public WriteResult remove() { return remove(ALL); } public WriteResult remove(String query) { return remove(query, NO_PARAMETERS); } public WriteResult remove(String query, Object... parameters) { return collection.remove(createQuery(query, parameters).toDBObject(), writeConcern); } public Distinct distinct(String key) { return new Distinct(collection, mapper.getUnmarshaller(), mapper.getQueryFactory(), key); } public Aggregate aggregate(String pipelineOperator) { return aggregate(pipelineOperator, NO_PARAMETERS); } public Aggregate aggregate(String pipelineOperator, Object... parameters) { return new Aggregate(collection, mapper.getUnmarshaller(), mapper.getQueryFactory()).and(pipelineOperator, parameters); } public void drop() { collection.drop(); } public void dropIndex(String keys) { collection.dropIndex(createQuery(keys).toDBObject()); } public void dropIndexes() { collection.dropIndexes(); } public void ensureIndex(String keys) { collection.createIndex(createQuery(keys).toDBObject()); } public void ensureIndex(String keys, String options) { collection.createIndex(createQuery(keys).toDBObject(), createQuery(options).toDBObject()); } public String getName() { return collection.getName(); } public DBCollection getDBCollection() { return collection; } private Query createQuery(String query, Object... parameters) { return mapper.getQueryFactory().createQuery(query, parameters); } @Override public String toString() { if (collection != null) return "collection {" + "name: '" + collection.getName() + "', db: '" + collection.getDB().getName() + "'}"; else return super.toString(); } }
apache-2.0
MuirDH/Miwok
app/src/main/java/com/example/android/miwok/PhrasesFragment.java
6987
package com.example.android.miwok; import android.content.Context; import android.media.AudioManager; import android.media.MediaPlayer; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ListView; import java.util.ArrayList; /** * {@link Fragment} that displays a list of vocabulary phrases. */ public class PhrasesFragment extends Fragment { private MediaPlayer mediaPlayer; // Handles audio focus private AudioManager audioManager; /** * This listener gets triggered whenever the audio focus changes * (i.e., we gain or lose audio focus because of another app or device). */ private AudioManager.OnAudioFocusChangeListener onAudioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() { @Override public void onAudioFocusChange(int focusChange) { if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) { // The AUDIOFOCUS_LOSS_TRANSIENT case means that we've lost audio focus for a // short amount of time. The AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK case means that // our app is allowed to continue playing sound but at a lower volume. We'll treat // both cases the same way because our app is playing short sound files. // Pause playback and reset player to the start of the file. That way, we can // play the word from the beginning when we resume playback. mediaPlayer.pause(); mediaPlayer.seekTo(0); } else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { // The AUDIOFOCUS_GAIN case means we have regained focus and can resume playback. mediaPlayer.start(); } else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) { // The AUDIOFOCUS_LOSS case means we've lost audio focus and // Stop playback and clean up resources releaseMediaPlayer(); } } }; private MediaPlayer.OnCompletionListener completionListener = new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mediaPlayer) { releaseMediaPlayer(); } }; public PhrasesFragment() { // Required empty public constructor } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.word_list, container, false); // Create and setup the {@link AudioManager} to request audio focus audioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE); //Create an ArrayList containing the words for the Phrases final ArrayList<Word> words = new ArrayList<>(); words.add(new Word("Where are you going?", "minto wuksus", R.raw.phrase_where_are_you_going)); words.add(new Word("What is your name?", "tinnә oyaase'nә", R.raw.phrase_what_is_your_name)); words.add(new Word("My name is...", "oyaaset...", R.raw.phrase_my_name_is)); words.add(new Word("How are you feeling?", "michәksәs?", R.raw.phrase_how_are_you_feeling)); words.add(new Word("I'm feeling good.", "kuchi achit", R.raw.phrase_im_feeling_good)); words.add(new Word("Are you coming?", "әәnәs'aa?", R.raw.phrase_are_you_coming)); words.add(new Word("Yes, I'm coming.", "hәә’ әәnәm", R.raw.phrase_yes_im_coming)); words.add(new Word("I'm coming.", "әәnәm", R.raw.phrase_im_coming)); words.add(new Word("Let's go.", "yoowutis", R.raw.phrase_lets_go)); words.add(new Word("Come here.", "әnni'nem", R.raw.phrase_come_here)); /* Array adapter whose data source is a list of Strings. The list item layout, which is a resource defined in the Android framework, contains a single TextView which the adapter will set to display a single word. */ WordAdapter adapter = new WordAdapter(getActivity(), words, R.color.category_phrases); /* Find the ListView object in the view hierarchy of the Activity. */ ListView listView = (ListView) rootView.findViewById(R.id.list); /* Make the ListView use the ArrayAdapter we created above, so that the ListView will display list items for each word in the list of words. This is done by calling the setAdapter method on the Listview object and passing in 1 argument, which is the ArrayAdapter with the variable name itemsAdapter. */ listView.setAdapter(adapter); // Set the listener on the listview listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { releaseMediaPlayer(); // Get the {@link Word} object at the given position the user clicked on Word word = words.get(position); Log.v("PhrasesActivity", "Current word: " + word); int result = audioManager.requestAudioFocus(onAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { // Create an instance of MediaPlayer and cue the mp3 mediaPlayer = MediaPlayer.create(getActivity(), word.getAudioResourceId()); // play the sound file mediaPlayer.start(); mediaPlayer.setOnCompletionListener(completionListener); } } }); return rootView; } @Override public void onStop() { super.onStop(); releaseMediaPlayer(); } private void releaseMediaPlayer() { // If the media player is not null, then it may be currently playing a sound. if (mediaPlayer != null) { // Regardless of the current state of the media player, release its resources // because we no longer need it. mediaPlayer.release(); // Set the media player back to null. For our code, we've decided that // setting the media player to null is an easy way to tell that the media player // is not configured to play an audio file at the moment. mediaPlayer = null; Log.v("PhrasesActivity", "MediaPlayer has been released"); audioManager.abandonAudioFocus(onAudioFocusChangeListener); } } }
apache-2.0
laonong16/leetcode
facebook/Container With Most Water/Accepted-2ms-224572718.java
210
// // Generated by fetch-leetcode-submission project on GitHub. // https://github.com/gitzhou/fetch-leetcode-submission // Contact Me: aaron67[AT]aaron67.cc // // Container With Most Water // null // null
apache-2.0
daisy/pipeline-issues
libs/epubcheck/src/test/java/com/adobe/epubcheck/ops/OPSCheckerTest.java
28215
/* * Copyright (c) 2011 Adobe Systems Incorporated * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package com.adobe.epubcheck.ops; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.URL; import java.util.Collections; import java.util.LinkedList; import java.util.List; import org.junit.Before; import org.junit.Test; import com.adobe.epubcheck.api.EPUBProfile; import com.adobe.epubcheck.messages.MessageId; import com.adobe.epubcheck.opf.ValidationContext.ValidationContextBuilder; import com.adobe.epubcheck.util.EPUBVersion; import com.adobe.epubcheck.util.ExtraReportTest; import com.adobe.epubcheck.util.FileResourceProvider; import com.adobe.epubcheck.util.GenericResourceProvider; import com.adobe.epubcheck.util.Messages; import com.adobe.epubcheck.util.URLResourceProvider; import com.adobe.epubcheck.util.ValidationReport; import com.adobe.epubcheck.util.ValidationReport.ItemReport; import com.adobe.epubcheck.util.outWriter; public class OPSCheckerTest { List<MessageId> expectedErrors = new LinkedList<MessageId>(); List<MessageId> expectedWarnings = new LinkedList<MessageId>(); List<MessageId> expectedFatals = new LinkedList<MessageId>(); public void testValidateDocument(String fileName, String mimeType, EPUBVersion version) { testValidateDocument(fileName, mimeType, version, false); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, boolean verbose) { testValidateDocument(fileName, mimeType, version, verbose, null); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, EPUBProfile profile) { testValidateDocument(fileName, mimeType, version, profile, false); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, EPUBProfile profile, boolean verbose) { testValidateDocument(fileName, mimeType, version, profile, verbose, null); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, ExtraReportTest extraTest) { testValidateDocument(fileName, mimeType, version, false, extraTest); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, boolean verbose, ExtraReportTest extraTest) { testValidateDocument(fileName, mimeType, version, EPUBProfile.DEFAULT, verbose, extraTest); } public void testValidateDocument(String fileName, String mimeType, EPUBVersion version, EPUBProfile profile, boolean verbose, ExtraReportTest extraTest) { ValidationReport testReport = new ValidationReport(fileName, String.format(Messages.get("single_file"), mimeType, version, profile)); String basepath = null; if (version == EPUBVersion.VERSION_2) { basepath = "/20/single/"; } else if (version == EPUBVersion.VERSION_3) { basepath = "/30/single/"; } GenericResourceProvider resourceProvider = null; if (fileName.startsWith("http://") || fileName.startsWith("https://")) { resourceProvider = new URLResourceProvider(fileName); } else { URL fileURL = this.getClass().getResource(basepath + fileName); String filePath = fileURL != null ? fileURL.getPath() : basepath + fileName; resourceProvider = new FileResourceProvider(filePath); } OPSChecker opsChecker = new OPSChecker(new ValidationContextBuilder().path(basepath + fileName) .mimetype(mimeType).resourceProvider(resourceProvider).report(testReport).version(version) .profile(profile).build()); opsChecker.validate(); if (verbose) { outWriter.println(testReport); } assertEquals("The error results do not match", expectedErrors, testReport.getErrorIds()); assertEquals("The warning results do not match", expectedWarnings, testReport.getWarningIds()); assertEquals("The fatal error results do not match", expectedFatals, testReport.getFatalErrorIds()); if (extraTest != null) { extraTest.test(testReport); } } @Before public void setup() { expectedErrors.clear(); expectedWarnings.clear(); expectedFatals.clear(); } @Test public void testValidateSVGRectInvalid() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("svg/invalid/rect.svg", "image/svg+xml", EPUBVersion.VERSION_3); } @Test public void testValidateSVGRectValid() { testValidateDocument("svg/valid/rect.svg", "image/svg+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLEdits001() { testValidateDocument("xhtml/valid/edits-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLEmbed001() { testValidateDocument("xhtml/valid/embed-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLForms001() { testValidateDocument("xhtml/valid/forms-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLGlobalAttrs001() { testValidateDocument("xhtml/valid/global-attrs-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, false); } @Test public void testValidateXHTMLOps001() { testValidateDocument("xhtml/valid/ops-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLOPSMATHML001() { testValidateDocument("xhtml/valid/ops-mathml-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLLINK() { testValidateDocument("xhtml/valid/link.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLLINKInvalid() { Collections.addAll(expectedErrors, MessageId.OPF_027, MessageId.CSS_005); testValidateDocument("xhtml/invalid/link.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLXml11() { Collections.addAll(expectedErrors, MessageId.HTM_001); testValidateDocument("xhtml/invalid/xml11.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLOPSMATHML002() { testValidateDocument("xhtml/valid/ops-mathml-002.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLOPSSVG001() { testValidateDocument("xhtml/valid/ops-svg-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLOPSSVG002() { // assure that epub:type is allowed on svg elements testValidateDocument("xhtml/valid/ops-svg-002.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, false); } @Test public void testValidateXHTMLRuby001() { testValidateDocument("xhtml/valid/ruby-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLCanvas() { testValidateDocument("xhtml/valid/canvas.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLInvalidCanvasFallback() { Collections.addAll(expectedErrors, MessageId.MED_002); testValidateDocument("xhtml/invalid/canvas-fallback.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLSCH001() { testValidateDocument("xhtml/valid/sch-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLSections001() { testValidateDocument("xhtml/valid/sections-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLSSML() { testValidateDocument("xhtml/valid/ssml.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLStyle001() { testValidateDocument("xhtml/valid/style-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLStyle002() { Collections.addAll(expectedErrors, MessageId.CSS_008); testValidateDocument("xhtml/invalid/style-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLSwitch001() { testValidateDocument("xhtml/valid/switch-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLTables001() { testValidateDocument("xhtml/valid/tables-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLText001() { testValidateDocument("xhtml/valid/text-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLTrigger() { testValidateDocument("xhtml/valid/trigger.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLData() { testValidateDocument("xhtml/valid/data.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLPrefixes001() { testValidateDocument("xhtml/valid/prefixes-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLInvalidPrefixes001() { Collections.addAll(expectedErrors, MessageId.OPF_028, MessageId.OPF_027); testValidateDocument("xhtml/invalid/prefixes-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLVideo() { testValidateDocument("xhtml/valid/video.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_OPSMATHML001() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/ops-mathml-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_OPSMATHML002() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/ops-mathml-002.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_SCH001() { // Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.MED_002, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, // MessageId.RSC_005); // mgy not sure what happened here, removed the first entry to make it pass Collections.addAll(expectedErrors, MessageId.MED_002, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/sch-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, false, new ExtraReportTest() { @Override public void test(ValidationReport testReport) { for (ItemReport error : testReport.errorList) { assertTrue("Error '" + error.message + "' has no line number.", error.line != -1); assertTrue("Error '" + error.message + "' has no column number.", error.column != -1); } } }); } @Test public void testValidateXHTML_SVG001() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/svg-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_Switch001() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/switch-001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_Trigger() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/trigger.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_UnresolvedDTD() { Collections.addAll(expectedErrors, MessageId.HTM_004); testValidateDocument("ops/invalid/unresolved-entity.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTML_DupeID() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("ops/invalid/dupe-id.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTML_DupeID_EPUB3() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/duplicate-id.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_httpequiv() { testValidateDocument("xhtml/valid/http-equiv-1.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_httpequiv_caseinsensitive() { testValidateDocument("xhtml/valid/http-equiv-2.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_httpequiv_invalid() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/http-equiv-1.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_httpequivInvalidMetaSibling() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/http-equiv-2.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_SSMLemptyPh() { Collections.addAll(expectedWarnings, MessageId.HTM_007, MessageId.HTM_007); testValidateDocument("xhtml/invalid/ssml-empty-ph.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_issue153_valid() { testValidateDocument("xhtml/valid/issue153.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_issue153_invalid() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/issue153.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_issue166_valid() { testValidateDocument("ops/valid/svg-foreignObject.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTML_doctype1_obsolete() { Collections.addAll(expectedErrors, MessageId.HTM_004); testValidateDocument("xhtml/invalid/doctype-1.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_doctype1() { // <!DOCTYPE html> testValidateDocument("xhtml/valid/doctype-1.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_doctype2() { // <!DOCTYPE html SYSTEM "about:legacy-compat"> testValidateDocument("xhtml/valid/doctype-2.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_SVGLinks() { testValidateDocument("xhtml/valid/svg-links.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML_SVGLinks_MisssingTitle() { expectedWarnings.add(MessageId.ACC_011); testValidateDocument("xhtml/invalid/svg-links.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateSVG_Links() { testValidateDocument("svg/valid/svg-links.svg", "image/svg+xml", EPUBVersion.VERSION_3); } @Test public void testValidateSVG_Links_MisssingTitle() { expectedWarnings.add(MessageId.ACC_011); testValidateDocument("svg/invalid/svg-links.svg", "image/svg+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue204() { Collections.addAll(expectedErrors, MessageId.RSC_005); Collections.addAll(expectedWarnings, MessageId.HTM_025); testValidateDocument("xhtml/valid/issue204.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLStyleAttr001() { testValidateDocument("xhtml/valid/styleAttr001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLStyleAttr002() { Collections.addAll(expectedErrors, MessageId.CSS_008); testValidateDocument("xhtml/invalid/styleAttr001.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } // this test should validate, see issue 173, need to wait for schema update. // @Test // public void testValidateXHTMLSVGwithRDF() { // testValidateDocument("xhtml/valid/svg-rdf-001.xhtml", // "application/xhtml+xml", // EPUBVersion.VERSION_3, true); // } @Test public void testValidateSVGIssue196() { testValidateDocument("ops/valid/svg-font-face.svg", "image/svg+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTMLIssue215() { testValidateDocument("ops/valid/issue215.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateSVGIssue219() { testValidateDocument("svg/valid/issue219.svg", "image/svg+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue222_223_20() { // foreignObject allowed outside switch, and <body> allowed inside testValidateDocument("ops/valid/issue222.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTMLIssue222_223_30() { Collections.addAll(expectedErrors, MessageId.RSC_005); // in 3.0 foreignObject content must be flow as per // http://idpf.org/epub/30/spec/epub30-contentdocs.html#confreq-svg-foreignObject // so the document gives 1 error testValidateDocument("svg/valid/issue222.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue248() { testValidateDocument("xhtml/valid/issue248.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue282_ObjectTypemustmatch() { testValidateDocument("xhtml/valid/issue282-object-typemustmatch.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue287_NestedHyperlink() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("ops/invalid/issue287-nested-hyperlink.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTMLIssue288_InvalidURI() { Collections.addAll(expectedErrors, MessageId.RSC_020); testValidateDocument("xhtml/invalid/issue288-invalid-uri.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue293() { testValidateDocument("ops/valid/issue293-edits-elem-attributes.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_2); } @Test public void testValidateXHTMLIssue296() { testValidateDocument("xhtml/valid/issue296-irc-uri.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue340() { testValidateDocument("xhtml/valid/issue340.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue341() { testValidateDocument("xhtml/valid/issue341.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTMLIssue355() { testValidateDocument("xhtml/valid/issue355.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML301RDFaValid() { testValidateDocument("xhtml/valid/rdfa.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML301MDValid() { testValidateDocument("xhtml/valid/md.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML301MDInvalid() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/md.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML301CustomAttributes() { testValidateDocument("xhtml/valid/custom-ns-attrs.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testValidateXHTML301AriaDescribedAt() { expectedWarnings.add(MessageId.RSC_017); testValidateDocument("xhtml/invalid/aria-describedAt.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testEdupubSectioning_ExplicitBody() { testValidateDocument("xhtml/valid/edupub-sectioning-explicit-body.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_ExplicitSections() { testValidateDocument("xhtml/valid/edupub-sectioning-explicit-sections.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_ImplicitBody() { testValidateDocument("xhtml/valid/edupub-sectioning-implicit-body.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_Subtitle() { testValidateDocument("xhtml/valid/edupub-sectioning-subtitle.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_Invalid() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_InvalidExplicitBody() { Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning-explicit-body.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_InvalidImplicitBody() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning-implicit-body.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_InvalidImplicitBodyAriaHeading() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning-implicit-body-aria-heading.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_InvalidSubtitle() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning-subtitle.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupubSectioning_InvalidAriaLabel() { // aria-label MUST NOT be equal to heading content // 2 errors: one on body and one on sub-section Collections.addAll(expectedErrors, MessageId.RSC_005, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-sectioning-arialabel-heading.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupupHeaading_ImgWithAltText() { testValidateDocument("xhtml/valid/edupub-heading-img.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testEdupupHeaading_ImgWithEmptyAltText() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/edupub-heading-imgnoalt.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.EDUPUB); } @Test public void testMathML() { testValidateDocument("xhtml/valid/mathml.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3); } @Test public void testIndex() { testValidateDocument("xhtml/valid/index.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.IDX); } @Test public void testIndex_NoIndex() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/index-noindex.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.IDX); } @Test public void testIndex_IndexNotOnBody() { Collections.addAll(expectedErrors, MessageId.RSC_005); testValidateDocument("xhtml/invalid/index-notonbody.xhtml", "application/xhtml+xml", EPUBVersion.VERSION_3, EPUBProfile.IDX); } }
apache-2.0
camilesing/zstack
header/src/main/java/org/zstack/header/volume/VolumeCanonicalEvents.java
1680
package org.zstack.header.volume; import org.zstack.header.message.NeedJsonSchema; import java.util.Date; /** * Created by xing5 on 2016/3/12. */ public class VolumeCanonicalEvents { public static final String VOLUME_STATUS_CHANGED_PATH = "/volume/status/change"; @NeedJsonSchema public static class VolumeStatusChangedData { private String volumeUuid; private String oldStatus; private String newStatus; private VolumeInventory inventory; private Date date = new Date(); private String accountUuid; public String getVolumeUuid() { return volumeUuid; } public void setVolumeUuid(String volumeUuid) { this.volumeUuid = volumeUuid; } public String getOldStatus() { return oldStatus; } public void setOldStatus(String oldStatus) { this.oldStatus = oldStatus; } public String getNewStatus() { return newStatus; } public void setNewStatus(String newStatus) { this.newStatus = newStatus; } public VolumeInventory getInventory() { return inventory; } public void setInventory(VolumeInventory inventory) { this.inventory = inventory; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } public String getAccountUuid() { return accountUuid; } public void setAccountUuid(String accountUuid) { this.accountUuid = accountUuid; } } }
apache-2.0
dpishchukhin/org.knowhowlab.osgi.testing
org.knowhowlab.osgi.testing.assertions/src/main/java/org/knowhowlab/osgi/testing/assertions/FrameworkAssert.java
11477
/* * Copyright (c) 2010-2013 Dmytro Pishchukhin (http://knowhowlab.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.knowhowlab.osgi.testing.assertions; import org.osgi.framework.Bundle; import org.osgi.framework.FrameworkEvent; import org.osgi.framework.Version; import java.util.concurrent.TimeUnit; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.knowhowlab.osgi.testing.utils.BundleUtils.findBundle; import static org.knowhowlab.osgi.testing.utils.FrameworkUtils.waitForFrameworkEvent; /** * A set of OSGi Framework specific assertion methods useful for writing tests. * <p/> * Before use it could be initialized with default BundleContext * {@link OSGiAssert#setDefaultBundleContext(org.osgi.framework.BundleContext)} * * @author dmytro.pishchukhin * @see java.lang.AssertionError * @see org.knowhowlab.osgi.testing.assertions.OSGiAssert */ public class FrameworkAssert extends OSGiAssert { /** * Utility class. Only static methods are available. */ private FrameworkAssert() { } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param bundleId bundle id * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, int bundleId, long timeoutInMillis) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s within timeout: %sms", eventTypeMask, bundleId, timeoutInMillis), eventTypeMask, bundleId, timeoutInMillis); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param bundleId bundle id * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, int bundleId, long timeoutInMillis) { assertFrameworkEvent(message, eventTypeMask, bundleId, timeoutInMillis, MILLISECONDS); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param bundleId bundle id * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, int bundleId, long timeout, TimeUnit timeUnit) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s within timeout: %s%s", eventTypeMask, bundleId, timeout, timeUnit), eventTypeMask, bundleId, timeout, timeUnit); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param bundleId bundle id * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, int bundleId, long timeout, TimeUnit timeUnit) { Bundle bundle = findBundle(getBundleContext(), bundleId); assertThat(format("Unknown bundle with ID: %d", bundleId), bundle, notNullValue()); assertThat("TimeUnit is null", timeUnit, notNullValue()); FrameworkEvent event = waitForFrameworkEvent(getBundleContext(), bundleId, eventTypeMask, timeout, timeUnit); assertThat(message, event, notNullValue()); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, String symbolicName, long timeoutInMillis) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s within timeout: %sms", eventTypeMask, symbolicName, timeoutInMillis), eventTypeMask, symbolicName, timeoutInMillis, MILLISECONDS); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, String symbolicName, long timeoutInMillis) { assertFrameworkEvent(message, eventTypeMask, symbolicName, timeoutInMillis, MILLISECONDS); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, String symbolicName, long timeout, TimeUnit timeUnit) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s within timeout: %s%s", eventTypeMask, symbolicName, timeout, timeUnit), eventTypeMask, symbolicName, timeout, timeUnit); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, String symbolicName, long timeout, TimeUnit timeUnit) { assertThat("SymbolicName is null", symbolicName, notNullValue()); assertThat("TimeUnit is null", timeUnit, notNullValue()); FrameworkEvent event = waitForFrameworkEvent(getBundleContext(), symbolicName, eventTypeMask, timeout, timeUnit); assertThat(message, event, notNullValue()); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param version version * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, String symbolicName, Version version, long timeoutInMillis) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s[%s] within timeout: %sms", eventTypeMask, symbolicName, version, timeoutInMillis), eventTypeMask, symbolicName, version, timeoutInMillis); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeoutInMillis. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param version version * @param timeoutInMillis time interval in millis to wait. If zero, the method will wait indefinitely. * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, String symbolicName, Version version, long timeoutInMillis) { assertFrameworkEvent(message, eventTypeMask, symbolicName, version, timeoutInMillis, MILLISECONDS); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown * * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param version version * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(int eventTypeMask, String symbolicName, Version version, long timeout, TimeUnit timeUnit) { assertFrameworkEvent(format("FrameworkEvent is unavailable: %s for bundle: %s[%s] within timeout: %s%s", eventTypeMask, symbolicName, version, timeout, timeUnit), eventTypeMask, symbolicName, version, timeout, timeUnit); } /** * Asserts that FrameworkEvent for defined bundle will be fired within given timeout. If it not as expected * {@link AssertionError} is thrown with the given message * * @param message message * @param eventTypeMask FrameworkEvent type mask * @param symbolicName symbolicName * @param version version * @param timeout time interval to wait. If zero, the method will wait indefinitely. * @param timeUnit time unit for the time interval * @since 1.1 */ public static void assertFrameworkEvent(String message, int eventTypeMask, String symbolicName, Version version, long timeout, TimeUnit timeUnit) { assertThat("SymbolicName is null", symbolicName, notNullValue()); assertThat("TimeUnit is null", timeUnit, notNullValue()); FrameworkEvent event = waitForFrameworkEvent(getBundleContext(), symbolicName, version, eventTypeMask, timeout, timeUnit); assertThat(message, event, notNullValue()); } }
apache-2.0
dremio/dremio-oss
common/src/main/java/com/dremio/common/util/DremioEdition.java
1680
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.common.util; /** * Expose the current loaded edition of Dremio software */ public enum DremioEdition { OSS, COMMUNITY, ENTERPRISE, MARKETPLACE; private static final DremioEdition CURRENT; static { DremioEdition edition = DremioEdition.OSS; if (is("Marketplace")) { edition = DremioEdition.MARKETPLACE; } else if (is("Enterprise")) { edition = DremioEdition.ENTERPRISE; } else if (is("Community")) { edition = DremioEdition.COMMUNITY; } CURRENT = edition; } public static DremioEdition get() { return CURRENT; } public static String getAsString() { switch (CURRENT) { case OSS: return "OSS"; case COMMUNITY: return "CE"; case ENTERPRISE: return "EE"; case MARKETPLACE: return "ME"; default: return CURRENT.name(); } } private static boolean is(String name) { try { Class.forName("com.dremio.edition." + name); return true; } catch (Exception e) { return false; } } }
apache-2.0
marcusportmann/mmp-java
src/mmp-application-kafka/src/main/java/guru/mmp/application/kafka/serialization/AvroSerializer.java
2479
/* * Copyright 2017 Marcus Portmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package guru.mmp.application.kafka.serialization; //~--- non-JDK imports -------------------------------------------------------- import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificRecordBase; import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.serialization.Serializer; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Map; //~--- JDK imports ------------------------------------------------------------ /** * The <code>AvroSerializer</code> class implements the Apache Avro serializer for Apache Kafka. * * @param <T> * * @author Marcus Portmann */ public class AvroSerializer<T extends SpecificRecordBase> implements Serializer<T> { @Override public void close() { // No-op } @Override public void configure(Map<String, ?> arg0, boolean arg1) { // No-op } @Override public byte[] serialize(String topic, T data) { try { byte[] result = null; if (data != null) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null); DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(data.getSchema()); datumWriter.write(data, binaryEncoder); binaryEncoder.flush(); byteArrayOutputStream.close(); result = byteArrayOutputStream.toByteArray(); } return result; } catch (IOException e) { throw new SerializationException("Failed to serialize the data for the topic (" + topic + ")", e); } } }
apache-2.0
liyuanhust/LoadMoreHelper
libary/src/main/java/com/lain/loadmorehelper/list/recyclerview/BaseWrapperAdapter.java
7539
/* * Copyright (C) 2015 Haruki Hasegawa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lain.loadmorehelper.list.recyclerview; import android.support.v7.widget.RecyclerView; import android.view.ViewGroup; import java.lang.ref.WeakReference; public class BaseWrapperAdapter<VH extends RecyclerView.ViewHolder> extends RecyclerView.Adapter<VH> { private static final String TAG = "ARVBaseWrapperAdapter"; private RecyclerView.Adapter<VH> mWrappedAdapter; private BridgeObserver mBridgeObserver; public BaseWrapperAdapter(RecyclerView.Adapter<VH> adapter) { mWrappedAdapter = adapter; mBridgeObserver = new BridgeObserver<>(this); mWrappedAdapter.registerAdapterDataObserver(mBridgeObserver); super.setHasStableIds(mWrappedAdapter.hasStableIds()); } public void release() { onRelease(); if (mWrappedAdapter != null && mBridgeObserver != null) { mWrappedAdapter.unregisterAdapterDataObserver(mBridgeObserver); } mWrappedAdapter = null; mBridgeObserver = null; } protected void onRelease() { // override this method if needed } public RecyclerView.Adapter<VH> getWrappedAdapter() { return mWrappedAdapter; } @Override public void onAttachedToRecyclerView(RecyclerView recyclerView) { mWrappedAdapter.onAttachedToRecyclerView(recyclerView); } @Override public void onDetachedFromRecyclerView(RecyclerView recyclerView) { mWrappedAdapter.onDetachedFromRecyclerView(recyclerView); } @Override public void onViewAttachedToWindow(VH holder) { mWrappedAdapter.onViewAttachedToWindow(holder); } @Override public void onViewDetachedFromWindow(VH holder) { mWrappedAdapter.onViewDetachedFromWindow(holder); } @Override public void onViewRecycled(VH holder) { mWrappedAdapter.onViewRecycled(holder); } @Override public void setHasStableIds(boolean hasStableIds) { super.setHasStableIds(hasStableIds); mWrappedAdapter.setHasStableIds(hasStableIds); } @Override public VH onCreateViewHolder(ViewGroup parent, int viewType) { return mWrappedAdapter.onCreateViewHolder(parent, viewType); } @Override public void onBindViewHolder(VH holder, int position) { mWrappedAdapter.onBindViewHolder(holder, position); } @Override public int getItemCount() { return mWrappedAdapter.getItemCount(); } @Override public long getItemId(int position) { return mWrappedAdapter.getItemId(position); } @Override public int getItemViewType(int position) { return mWrappedAdapter.getItemViewType(position); } protected void onHandleWrappedAdapterChanged() { notifyDataSetChanged(); } protected void onHandleWrappedAdapterItemRangeChanged(int positionStart, int itemCount) { notifyItemRangeChanged(positionStart, itemCount); } protected void onHandleWrappedAdapterItemRangeChanged(int positionStart, int itemCount, Object payload) { notifyItemRangeChanged(positionStart, itemCount, payload); } protected void onHandleWrappedAdapterItemRangeInserted(int positionStart, int itemCount) { notifyItemRangeInserted(positionStart, itemCount); } protected void onHandleWrappedAdapterItemRangeRemoved(int positionStart, int itemCount) { notifyItemRangeRemoved(positionStart, itemCount); } protected void onHandleWrappedAdapterRangeMoved(int fromPosition, int toPosition, int itemCount) { if (itemCount != 1) { throw new IllegalStateException("itemCount should be always 1 (actual: " + itemCount + ")"); } notifyItemMoved(fromPosition, toPosition); } /*package*/ final void onWrappedAdapterChanged() { onHandleWrappedAdapterChanged(); } /*package*/ final void onWrappedAdapterItemRangeChanged(int positionStart, int itemCount) { onHandleWrappedAdapterItemRangeChanged(positionStart, itemCount); } /*package*/ final void onWrappedAdapterItemRangeChanged(int positionStart, int itemCount, Object payload) { onHandleWrappedAdapterItemRangeChanged(positionStart, itemCount, payload); } /*package*/ final void onWrappedAdapterItemRangeInserted(int positionStart, int itemCount) { onHandleWrappedAdapterItemRangeInserted(positionStart, itemCount); } /*package*/ final void onWrappedAdapterItemRangeRemoved(int positionStart, int itemCount) { onHandleWrappedAdapterItemRangeRemoved(positionStart, itemCount); } /*package*/ final void onWrappedAdapterRangeMoved(int fromPosition, int toPosition, int itemCount) { onHandleWrappedAdapterRangeMoved(fromPosition, toPosition, itemCount); } private static final class BridgeObserver<VH extends RecyclerView.ViewHolder> extends RecyclerView.AdapterDataObserver { private WeakReference<BaseWrapperAdapter<VH>> mRefHolder; public BridgeObserver(BaseWrapperAdapter<VH> holder) { mRefHolder = new WeakReference<>(holder); } @Override public void onChanged() { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterChanged(); } } @Override public void onItemRangeChanged(int positionStart, int itemCount) { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterItemRangeChanged(positionStart, itemCount); } } @Override public void onItemRangeChanged(int positionStart, int itemCount, Object payload) { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterItemRangeChanged(positionStart, itemCount, payload); } } @Override public void onItemRangeInserted(int positionStart, int itemCount) { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterItemRangeInserted(positionStart, itemCount); } } @Override public void onItemRangeRemoved(int positionStart, int itemCount) { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterItemRangeRemoved(positionStart, itemCount); } } @Override public void onItemRangeMoved(int fromPosition, int toPosition, int itemCount) { final BaseWrapperAdapter<VH> holder = mRefHolder.get(); if (holder != null) { holder.onWrappedAdapterRangeMoved(fromPosition, toPosition, itemCount); } } } }
apache-2.0
usr000/outlier-detection
spark-inbound/src/test/java/outlier/detection/spark/processor/MedAbsDevOutlierFinderTest.java
2156
package outlier.detection.spark.processor; import java.util.Arrays; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import outlier.detection.dto.Outlier; public class MedAbsDevOutlierFinderTest { private OutlierFinder outlierFinder; @Before public void setUp() { outlierFinder = new MedAbsDevOutlierFinder(); } @Test public void testNoOutliers() { Outlier empty = outlier(); double[][] input = { new double[]{}, new double[]{0.0}, new double[]{1.0}, new double[]{-1.0}, new double[]{-1.0, 1.0}, new double[]{1.0, 1.0, 0.0, 2}, new double[]{1.0, 1.0, 1.0, 1.0} }; for (double[] in: input) { Outlier outlier = outlierFinder.findIn(in); Assert.assertEquals(empty, outlier); } } @Test public void testOutliers() { double[][] input = { new double[]{1.0, 1.0, 0.0},//0 new double[]{1.0, 1.0, 0.0, 100.0},//1 new double[]{1.0, 1.0, 0.0, 1.5, 2.5, 3.5, 5, 7, 8, 9},//2 new double[]{1.0, 1.0, 0.0, 1.5, 2.5, 3.5, 5, 7, 8, 9, -5},//3 new double[]{1, 13, 192, 7, 8, 99, 1014, 4},//4 new double[]{5.0, 5.0, 5.0, 5.0, 105.0, 6.0, 7.0, 7.0, 105.0, 5.0, 2.0, 102.0, 6.0, 3.0},//5 new double[]{6.0, 5.0, 4.0, 107.0, 5.0, 5.0, 7.0, 7.0, 2.0, 6.0, 104.0, 2.0, 105.0},//6 new double[]{107.0, 7.0, 7.0, 5.0, 105.0, 4.0, 102.0, 7.0, 7.0, 102.0},//7 new double[]{5.0, 3.0, 104.0, 4.0, 6.0, 4.0, 3.0, 4.0, 6.0, 2.0, 7.0}//8 }; Outlier[] expected = { outlier(0.0),//0 outlier(100.0),//1 outlier(8.0, 9.0),//2 outlier(8.0, 9.0, -5.0),//3 outlier(192.0, 99.0, 1014.0),//4 outlier(105.0, 105.0, 2.0, 102.0, 3.0),//5 outlier(107.0, 2.0, 104.0, 2.0, 105.0),//6 outlier(107.0, 105.0, 102.0, 102.0),//7 outlier(104.0, 7.0)//8 }; Assert.assertEquals(input.length, expected.length); for (int i=0; i<input.length; i++) { Outlier outlier = outlierFinder.findIn(input[i]); Assert.assertEquals(expected[i], outlier); } } private Outlier outlier(Double... readings) { Outlier outlier = new Outlier("local_MedAbsDev", Arrays.asList(readings)); return outlier; } }
apache-2.0
alucardxh/BaseFramework
src/main/java/com/baseframework/example/commons/script/Test.java
445
package com.baseframework.example.commons.script; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; public class Test { public static void main(String[] args) throws ScriptException { ScriptEngineManager factory= new ScriptEngineManager(); ScriptEngine se = factory.getEngineByName("Java"); se.eval("System.out.println(\"aaa\");"); } }
apache-2.0
masaki-yamakawa/geode
geode-membership/src/main/java/org/apache/geode/internal/membership/utils/AvailablePort.java
16160
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.membership.utils; import java.io.IOException; import java.io.PrintStream; import java.io.Serializable; import java.net.DatagramPacket; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MulticastSocket; import java.net.NetworkInterface; import java.net.ServerSocket; import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.Enumeration; import java.util.Random; import org.apache.geode.annotations.Immutable; import org.apache.geode.distributed.internal.membership.api.MembershipConfig; import org.apache.geode.internal.inet.LocalHostUtil; import org.apache.geode.util.internal.GeodeGlossary; /** * This class determines whether or not a given port is available and can also provide a randomly * selected available port. */ public class AvailablePort { public static final int AVAILABLE_PORTS_LOWER_BOUND = 20001;// 20000/udp is securid public static final int AVAILABLE_PORTS_UPPER_BOUND = 29999;// 30000/tcp is spoolfax /** Is the port available for a Socket (TCP) connection? */ public static final int SOCKET = 0; /** Is the port available for a JGroups (UDP) multicast connection */ public static final int MULTICAST = 1; /** * see if there is a gemfire system property that establishes a default address for the given * protocol, and return it */ public static InetAddress getAddress(int protocol) { String name = null; try { if (protocol == SOCKET) { name = System.getProperty(GeodeGlossary.GEMFIRE_PREFIX + "bind-address"); } else if (protocol == MULTICAST) { name = System.getProperty(GeodeGlossary.GEMFIRE_PREFIX + "mcast-address"); } if (name != null) { return InetAddress.getByName(name); } } catch (UnknownHostException e) { throw new RuntimeException("Unable to resolve address " + name); } return null; } /** * Returns whether or not the given port on the local host is available (that is, unused). * * @param port The port to check * @param protocol The protocol to check (either {@link #SOCKET} or {@link #MULTICAST}). * * @throws IllegalArgumentException <code>protocol</code> is unknown */ public static boolean isPortAvailable(final int port, int protocol) { return isPortAvailable(port, protocol, getAddress(protocol)); } /** * Returns whether or not the given port on the local host is available (that is, unused). * * @param port The port to check * @param protocol The protocol to check (either {@link #SOCKET} or {@link #MULTICAST}). * @param addr the bind address (or mcast address) to use * * @throws IllegalArgumentException <code>protocol</code> is unknown */ public static boolean isPortAvailable(final int port, int protocol, InetAddress addr) { if (protocol == SOCKET) { // Try to create a ServerSocket if (addr == null) { return testAllInterfaces(port); } else { return testOneInterface(addr, port); } } else if (protocol == MULTICAST) { MulticastSocket socket = null; try { socket = new MulticastSocket(); InetAddress localHost = LocalHostUtil.getLocalHost(); socket.setInterface(localHost); socket.setSoTimeout(Integer.getInteger("AvailablePort.timeout", 2000).intValue()); socket.setReuseAddress(true); byte[] buffer = new byte[4]; buffer[0] = (byte) 'p'; buffer[1] = (byte) 'i'; buffer[2] = (byte) 'n'; buffer[3] = (byte) 'g'; InetAddress mcid = addr == null ? InetAddress.getByName(MembershipConfig.DEFAULT_MCAST_ADDRESS) : addr; SocketAddress mcaddr = new InetSocketAddress(mcid, port); socket.joinGroup(mcid); DatagramPacket packet = new DatagramPacket(buffer, 0, buffer.length, mcaddr); socket.send(packet); try { socket.receive(packet); packet.getData(); // make sure there's data, but no need to process it return false; } catch (SocketTimeoutException ste) { // System.out.println("socket read timed out"); return true; } catch (Exception e) { e.printStackTrace(); return false; } } catch (java.io.IOException ioe) { if (ioe.getMessage().equals("Network is unreachable")) { throw new RuntimeException( "Network is unreachable", ioe); } ioe.printStackTrace(); return false; } catch (Exception e) { e.printStackTrace(); return false; } finally { if (socket != null) { try { socket.close(); } catch (Exception e) { e.printStackTrace(); } } } } else { throw new IllegalArgumentException(String.format("Unknown protocol: %s", Integer.valueOf(protocol))); } } public static Keeper isPortKeepable(final int port, int protocol, InetAddress addr) { if (protocol == SOCKET) { // Try to create a ServerSocket if (addr == null) { return keepAllInterfaces(port); } else { return keepOneInterface(addr, port); } } else if (protocol == MULTICAST) { throw new IllegalArgumentException("You can not keep the JGROUPS protocol"); } else { throw new IllegalArgumentException(String.format("Unknown protocol: %s", Integer.valueOf(protocol))); } } private static boolean testOneInterface(InetAddress addr, int port) { Keeper k = keepOneInterface(addr, port); if (k != null) { k.release(); return true; } else { return false; } } private static Keeper keepOneInterface(InetAddress addr, int port) { ServerSocket server = null; try { // (new Exception("Opening server socket on " + port)).printStackTrace(); server = new ServerSocket(); server.setReuseAddress(true); if (addr != null) { server.bind(new InetSocketAddress(addr, port)); } else { server.bind(new InetSocketAddress(port)); } Keeper result = new Keeper(server, port); server = null; return result; } catch (java.io.IOException ioe) { if (ioe.getMessage().equals("Network is unreachable")) { throw new RuntimeException("Network is unreachable"); } // ioe.printStackTrace(); if (addr instanceof Inet6Address) { byte[] addrBytes = addr.getAddress(); if ((addrBytes[0] == (byte) 0xfe) && (addrBytes[1] == (byte) 0x80)) { // Hack, early Sun 1.5 versions (like Hitachi's JVM) cannot handle IPv6 // link local addresses. Cannot trust InetAddress.isLinkLocalAddress() // see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6558853 // By returning true we ignore these interfaces and potentially say a // port is not in use when it really is. Keeper result = new Keeper(server, port); server = null; return result; } } return null; } catch (Exception ex) { return null; } finally { if (server != null) { try { server.close(); } catch (Exception ex) { } } } } /** * Test to see if a given port is available port on all interfaces on this host. * * @return true of if the port is free on all interfaces */ private static boolean testAllInterfaces(int port) { Keeper k = keepAllInterfaces(port); if (k != null) { k.release(); return true; } else { return false; } } private static Keeper keepAllInterfaces(int port) { // First check to see if we can bind to the wildcard address. if (!testOneInterface(null, port)) { return null; } // Now check all of the addresses for all of the addresses // on this system. On some systems (solaris, aix) binding // to the wildcard address will successfully bind to only some // of the interfaces if other interfaces are in use. We want to // make sure this port is completely free. // // Note that we still need the check of the wildcard address, above, // because on some systems (aix) we can still bind to specific addresses // if someone else has bound to the wildcard address. Enumeration en; try { en = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { throw new RuntimeException(e); } while (en.hasMoreElements()) { NetworkInterface next = (NetworkInterface) en.nextElement(); Enumeration en2 = next.getInetAddresses(); while (en2.hasMoreElements()) { InetAddress addr = (InetAddress) en2.nextElement(); boolean available = testOneInterface(addr, port); if (!available) { return null; } } } // Now do it one more time but reserve the wildcard address return keepOneInterface(null, port); } /** * Returns a randomly selected available port in the range 5001 to 32767. * * @param protocol The protocol to check (either {@link #SOCKET} or {@link #MULTICAST}). * * @throws IllegalArgumentException <code>protocol</code> is unknown */ public static int getRandomAvailablePort(int protocol) { return getRandomAvailablePort(protocol, getAddress(protocol)); } /** * Returns a randomly selected available port in the range 5001 to 32767. * * @param protocol The protocol to check (either {@link #SOCKET} or {@link #MULTICAST}). * @param addr the bind-address or mcast address to use * * @throws IllegalArgumentException <code>protocol</code> is unknown */ public static int getRandomAvailablePort(int protocol, InetAddress addr) { return getRandomAvailablePort(protocol, addr, false); } /** * Returns a randomly selected available port in the range 5001 to 32767. * * @param protocol The protocol to check (either {@link #SOCKET} or {@link #MULTICAST}). * @param addr the bind-address or mcast address to use * @param useMembershipPortRange use true if the port will be used for membership * * @throws IllegalArgumentException <code>protocol</code> is unknown */ public static int getRandomAvailablePort(int protocol, InetAddress addr, boolean useMembershipPortRange) { while (true) { int port = getRandomWildcardBindPortNumber(useMembershipPortRange); if (isPortAvailable(port, protocol, addr)) { // don't return the products default multicast port if (!(protocol == MULTICAST && port == MembershipConfig.DEFAULT_MCAST_PORT)) { return port; } } } } @Immutable public static final Random rand; static { boolean fast = Boolean.getBoolean("AvailablePort.fastRandom"); if (fast) { rand = new Random(); } else { rand = new java.security.SecureRandom(); } } private static int getRandomWildcardBindPortNumber(boolean useMembershipPortRange) { int rangeBase; int rangeTop; if (!useMembershipPortRange) { rangeBase = AVAILABLE_PORTS_LOWER_BOUND; // 20000/udp is securid rangeTop = AVAILABLE_PORTS_UPPER_BOUND; // 30000/tcp is spoolfax } else { rangeBase = MembershipConfig.DEFAULT_MEMBERSHIP_PORT_RANGE[0]; rangeTop = MembershipConfig.DEFAULT_MEMBERSHIP_PORT_RANGE[1]; } return rand.nextInt(rangeTop - rangeBase) + rangeBase; } public static int getRandomAvailablePortInRange(int rangeBase, int rangeTop, int protocol) { int numberOfPorts = rangeTop - rangeBase; // do "5 times the numberOfPorts" iterations to select a port number. This will ensure that // each of the ports from given port range get a chance at least once int numberOfRetrys = numberOfPorts * 5; for (int i = 0; i < numberOfRetrys; i++) { int port = rand.nextInt(numberOfPorts + 1) + rangeBase;// add 1 to numberOfPorts so that // rangeTop also gets included if (isPortAvailable(port, protocol, getAddress(protocol))) { return port; } } return -1; } /** * This class will keep an allocated port allocated until it is used. This makes the window * smaller that can cause bug 46690 * */ public static class Keeper implements Serializable { private final transient ServerSocket ss; private final int port; public Keeper(ServerSocket ss, int port) { this.ss = ss; this.port = port; } public Keeper(ServerSocket ss, Integer port) { this.ss = ss; this.port = port != null ? port : 0; } public int getPort() { return this.port; } /** * Once you call this the socket will be freed and can then be reallocated by someone else. */ public void release() { try { if (this.ss != null) { this.ss.close(); } } catch (IOException ignore) { } } } /////////////////////// Main Program /////////////////////// @Immutable private static final PrintStream out = System.out; @Immutable private static final PrintStream err = System.err; private static void usage(String s) { err.println("\n** " + s + "\n"); err.println("usage: java AvailablePort socket|jgroups [\"addr\" network-address] [port]"); err.println(""); err.println( "This program either prints whether or not a port is available for a given protocol, or it prints out an available port for a given protocol."); err.println(""); System.exit(1); } public static void main(String[] args) throws UnknownHostException { String protocolString = null; String addrString = null; String portString = null; for (int i = 0; i < args.length; i++) { if (protocolString == null) { protocolString = args[i]; } else if (args[i].equals("addr") && i < args.length - 1) { addrString = args[++i]; } else if (portString == null) { portString = args[i]; } else { usage("Spurious command line: " + args[i]); } } int protocol; if (protocolString == null) { usage("Missing protocol"); return; } else if (protocolString.equalsIgnoreCase("socket")) { protocol = SOCKET; } else if (protocolString.equalsIgnoreCase("javagroups") || protocolString.equalsIgnoreCase("jgroups")) { protocol = MULTICAST; } else { usage("Unknown protocol: " + protocolString); return; } InetAddress addr = null; if (addrString != null) { addr = InetAddress.getByName(addrString); } if (portString != null) { int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException ex) { usage("Malformed port: " + portString); return; } out.println("\nPort " + port + " is " + (isPortAvailable(port, protocol, addr) ? "" : "not ") + "available for a " + protocolString + " connection\n"); } else { out.println("\nRandomly selected " + protocolString + " port: " + getRandomAvailablePort(protocol, addr) + "\n"); } } }
apache-2.0
JinBuHanLin/eshow-android
app/src/main/java/cn/org/eshow/demo/activity/LoginActivity.java
12929
package cn.org.eshow.demo.activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.balysv.materialmenu.MaterialMenuDrawable; import com.balysv.materialmenu.MaterialMenuView; import com.umeng.socialize.Config; import com.umeng.socialize.UMAuthListener; import com.umeng.socialize.UMShareAPI; import com.umeng.socialize.bean.SHARE_MEDIA; import org.androidannotations.annotations.AfterViews; import org.androidannotations.annotations.Click; import org.androidannotations.annotations.EActivity; import org.androidannotations.annotations.ViewById; import org.json.JSONException; import org.json.JSONObject; import java.util.Collection; import java.util.Iterator; import java.util.Map; import cn.org.eshow.demo.R; import cn.org.eshow.demo.bean.Enum_CodeType; import cn.org.eshow.demo.bean.Enum_ThirdType; import cn.org.eshow.demo.bluetooth.StringUtil; import cn.org.eshow.demo.common.CommonActivity; import cn.org.eshow.demo.common.Global; import cn.org.eshow.demo.common.SharedPrefUtil; import cn.org.eshow.demo.network.ESResponseListener; import cn.org.eshow.demo.network.NetworkInterface; import cn.org.eshow.demo.view.LoginAutoCompleteEdit; import cn.org.eshow.framwork.fragment.AbProgressDialogFragment; import cn.org.eshow.framwork.util.AbDialogUtil; import cn.org.eshow.framwork.util.AbLogUtil; import cn.org.eshow.framwork.util.AbStrUtil; import cn.org.eshow.framwork.util.AbToastUtil; import cn.org.eshow.framwork.util.AbViewUtil; /** * 登录页面 * Created by daikting on 16/2/24. */ @EActivity(R.layout.activity_login) public class LoginActivity extends CommonActivity { private Context mContext = LoginActivity.this; @ViewById(R.id.rlBack) RelativeLayout mRlMenu; @ViewById(R.id.material_back_button) MaterialMenuView mMaterialBackButton; @ViewById(R.id.tvTitle) TextView mTvTitle; @ViewById(R.id.tvSubTitle) TextView mTvSubTitle; @ViewById(R.id.etTel) LoginAutoCompleteEdit mEtTel; @ViewById(R.id.etPassword) LoginAutoCompleteEdit mEtPassword; @ViewById(R.id.btnLogin) Button mBtnLogin; @ViewById(R.id.tvForgetPW) TextView mTvForgetPW; @ViewById(R.id.llQQLogin) LinearLayout mLlQQLogin; @ViewById(R.id.llWechatLogin) LinearLayout mLlWechatLogin; //友盟分享 UMShareAPI umShareAPI; AbProgressDialogFragment progressDialog; String userName = ""; //第三方授权成功得到的token String thirdToken = ""; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @AfterViews void init() { AbViewUtil.scaleContentView((RelativeLayout) findViewById(R.id.rlParent)); mTvTitle.setText(getTitle()); mTvSubTitle.setVisibility(View.VISIBLE); mMaterialBackButton.setState(MaterialMenuDrawable.IconState.ARROW); mMaterialBackButton.setVisibility(View.VISIBLE); userName = getIntent().getStringExtra(InputPasswordActivity.INTENT_TEL); if(cn.org.eshow.framwork.util.AbStrUtil.isEmpty(userName)){ userName = SharedPrefUtil.getTempTel(mContext); } mEtTel.setText(userName); } @Click(R.id.rlBack) void onBack() { finish(); } @Click(R.id.btnLogin) void onLogin() { String userName = mEtTel.getText().toString(); final String password = mEtPassword.getText().toString(); if (AbStrUtil.isEmpty(userName) && !AbStrUtil.isMobileNo(userName)) { AbToastUtil.showToast(mContext, "请输入账号!!"); return; } if (AbStrUtil.isEmpty(password)) { AbToastUtil.showToast(mContext, "请输入密码!"); return; } if (AbStrUtil.strLength(password) > 20) { AbToastUtil.showToast(mContext, "密码长度过长!"); return; } ESResponseListener responseListener = new ESResponseListener(mContext) { @Override public void onBQSucess(String esMsg, JSONObject resultJson) { try { String userStr = resultJson.getJSONObject("user").toString(); AbLogUtil.d(mContext, "Login userStr:" + userStr); SharedPrefUtil.setUser(mContext, userStr); JSONObject tokenJson = resultJson.getJSONObject("accessToken"); String token = tokenJson.getString("accessToken"); SharedPrefUtil.setAccesstoken(mContext,token); } catch (JSONException e) { e.printStackTrace(); } MainActivity_.intent(mContext).start(); } @Override public void onBQNoData() { } @Override public void onBQNotify(String bqMsg) { AbToastUtil.showToast(mContext, bqMsg); } @Override public void onStart() { progressDialog = AbDialogUtil.showProgressDialog(mContext, Global.LOADING_PROGRESSBAR_ID, "请求数据中..."); } @Override public void onFinish() { progressDialog.dismiss(); } @Override public void onFailure(int statusCode, String content, Throwable error) { Log.i("LoginActivity", "1onFailure:statusCode = " + statusCode + ", content is " + content); if(error!=null){ error.printStackTrace(); } progressDialog.dismiss(); try { JSONObject ret=new JSONObject(content); int rr=ret.getInt("status"); Log.i("LoginActivity","status =="+rr); if(rr==-5){ Log.i("LoginActivity", ret.getString("msg")); AbToastUtil.showToast(mContext, ret.getString("msg")); } } catch (JSONException e) { e.printStackTrace(); } } }; NetworkInterface.login(mContext, userName, password, responseListener); } @Click(R.id.tvForgetPW) void onForgetPW() { String userName = mEtTel.getText().toString(); InputTelActivity_.intent(mContext).extra(InputTelActivity_.INTENT_ISREGISTER, Enum_CodeType.FINDPASSWORD).extra(InputTelActivity_.INTENT_TEL,userName).start(); } /** * 点击注册 */ @Click(R.id.tvSubTitle) void onRegister() { String userName = mEtTel.getText().toString(); InputTelActivity_.intent(mContext).extra(InputTelActivity_.INTENT_ISREGISTER, Enum_CodeType.REGISTER).extra(InputTelActivity_.INTENT_TEL,userName).start(); } /** * 微信授权登录 */ @Click(R.id.llWechatLogin) void onWechatLogin() { Log.i("LoginActivity", "微信授权登录"); //AbToastUtil.showToast(mContext,"功能正在完善开发中..."); Config.dialog = ProgressDialog.show(mContext, "提示", "正在请求跳转...."); SHARE_MEDIA platform = SHARE_MEDIA.WEIXIN; umShareAPI = UMShareAPI.get(mContext); umShareAPI.doOauthVerify(this, platform, umAuthListener); } /** * QQ授权登录 */ @Click(R.id.llQQLogin) void onQQLogin() { Log.i("LoginActivity","QQ授权登录"); //AbToastUtil.showToast(mContext,"功能正在完善开发中..."); Config.dialog = ProgressDialog.show(mContext, "提示", "正在请求跳转...."); SHARE_MEDIA platform = SHARE_MEDIA.QQ; umShareAPI = UMShareAPI.get(mContext); umShareAPI.doOauthVerify(this, platform, umAuthListener); } /** * 授权回调监听 */ private UMAuthListener umAuthListener = new UMAuthListener() { @Override public void onComplete(SHARE_MEDIA platform, int action, Map<String, String> data) { Log.i("LoginActivity", "platform is " + platform.name() + ",,action = " + action + ""); work(data);// thirdToken = data.get("access_token"); Log.i("LoginActivity","======before thirdToken is == "+thirdToken); //截取前16位 thirdToken = thirdToken.substring(0,16); Enum_ThirdType thirdType = Enum_ThirdType.QQ;// Log.i("LoginActivity", "======after thirdToken is == " + thirdToken); if(platform.name().equals(SHARE_MEDIA.WEIXIN.toString())){ thirdType = Enum_ThirdType.WeChat;// }else if(platform.name().equals(SHARE_MEDIA.QQ.toString())){ thirdType = Enum_ThirdType.QQ;// } NetworkInterface.thirdLogin(mContext,thirdToken,thirdType,thirdLoginResListener); } @Override public void onError(SHARE_MEDIA platform, int action, Throwable t) { Log.i("LoginActivity", "SHARE_MEDIA is " + platform.name() + ",,action = " + action + ",,Throwable is " + t.toString()); AbToastUtil.showToast(mContext, platform.name() + " Authorize fail"); } @Override public void onCancel(SHARE_MEDIA platform, int action) { Log.i("LoginActivity", "SHARE_MEDIA is " + platform.name() + ",,action = " + action); AbToastUtil.showToast(mContext, platform.name() + " Authorize cancel"); } }; public void work(Map<String, String> map) { if(map==null){ return; } Collection<String> c = map.values(); Iterator it = c.iterator(); for (; it.hasNext();) { Log.i("LoginActivity","--------"+it.next()); } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (umShareAPI != null) { umShareAPI.onActivityResult(requestCode, resultCode, data); } else { //应用未审核 Log.i("LoginActivity","应用未审核,平台拒绝了应用的授权请求!"); AbToastUtil.showToast(mContext, "应用未审核,平台拒绝了应用的授权请求!"); } } /** * 第三方登录接口回调 */ ESResponseListener thirdLoginResListener = new ESResponseListener(mContext) { @Override public void onBQSucess(String esMsg, JSONObject resultJson) { if(!StringUtil.isBlank(esMsg)){ Log.i("LoginActivity","esMsg is "+esMsg); } if(resultJson!=null){ Log.i("LoginActivity","resultJson is "+resultJson.toString()); } try { boolean isBound = resultJson.getBoolean("type"); if(isBound){ String userStr = resultJson.getJSONObject("user").toString(); AbLogUtil.d(mContext, "Login userStr:" + userStr); SharedPrefUtil.setUser(mContext, userStr); JSONObject tokenJson = resultJson.getJSONObject("accessToken"); String token = tokenJson.getString("accessToken"); SharedPrefUtil.setAccesstoken(mContext,token); MainActivity_.intent(mContext).start(); }else{//进行手机号码绑定 String userName = mEtTel.getText().toString(); InputTelActivity_.intent(mContext).extra(InputTelActivity_.INTENT_ISREGISTER, Enum_CodeType.BOUND).extra(InputTelActivity_.INTENT_THIRDTOEKN, thirdToken).extra(InputTelActivity_.INTENT_TEL,userName).start(); } } catch (JSONException e) { e.printStackTrace(); } } @Override public void onBQNoData() { } @Override public void onBQNotify(String bqMsg) { AbToastUtil.showToast(mContext, bqMsg); } @Override public void onStart() { progressDialog = AbDialogUtil.showProgressDialog(mContext, Global.LOADING_PROGRESSBAR_ID, "请求登录中..."); } @Override public void onFinish() { progressDialog.dismiss(); } @Override public void onFailure(int statusCode, String content, Throwable error) { Log.i("LoginActivity", "2onFailure:statusCode = " + statusCode + ", content is " + content); if(error!=null){ error.printStackTrace(); } progressDialog.dismiss(); AbToastUtil.showToast(mContext, content); } }; }
apache-2.0
GlassTune/GlassTune
glass/src/main/java/com/glasstune/audio/IPitchDetectionHandler.java
165
package com.glasstune.audio; /** * Created by njackson on 29/12/14. */ public interface IPitchDetectionHandler { public void handlePitch(double frequency); }
apache-2.0
hunterrees/challengeChessServer
src/main/java/manager/ClientConnectionManager.java
1405
package manager; import exception.user.UserNotFoundException; import model.ConnectionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.Socket; import java.util.HashMap; import java.util.Map; public class ClientConnectionManager { private static final Logger LOGGER = LoggerFactory.getLogger(ClientConnectionManager.class); private final Map<String, Socket> sockets; private static ClientConnectionManager instance; ClientConnectionManager() { sockets = new HashMap<>(); } public static ClientConnectionManager getInstance() { if (instance == null) { instance = new ClientConnectionManager(); } return instance; } /** * Sets up a connection with the given client. * * @param username non-null string of the user who wants to connect with the server. * @param connectionInfo non-null object containing host and port of the client. */ public void setUpConnection(String username, ConnectionInfo connectionInfo) { } /** * Sends data to the given user on the connection already established. * * @param username non-null string of the user to send data to. * @param data non-null object containing the data to send. * @throws UserNotFoundException if the user did not already establish a connection. */ public void sendData(String username, Object data) throws UserNotFoundException { } }
apache-2.0
McLeodMoores/starling
projects/util/src/main/java/com/opengamma/util/PoolExecutor.java
13485
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.util; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.util.Iterator; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.Lifecycle; /** * Implementation of {@link Executor} that allows jobs to run in a group with a single consumer receiving results for them. * <p> * The maximum number of additional threads is limited, but the thread which submitted jobs may temporarily join the pool to allow its tasks to complete. */ public class PoolExecutor implements Executor, Lifecycle { private static final Logger LOGGER = LoggerFactory.getLogger(PoolExecutor.class); /** * Callback interface for receiving results of a pooled execution. */ public interface CompletionListener<T> { void success(T result); void failure(Throwable error); } /** * Implementation of a {@link ExecutorService} that is associated with a group. */ public class Service<T> implements Executor { private final AtomicInteger _pending = new AtomicInteger(); private final CompletionListener<T> _listener; private volatile boolean _shutdown; private boolean _joining; protected Service(final CompletionListener<T> listener) { LOGGER.info("Created thread pool service {}", this); _listener = listener; } protected void decrementAndNotify() { if (_pending.decrementAndGet() == 0) { synchronized (this) { if (_joining) { notifyAll(); } } } } protected void postResult(final T result) { if (_listener != null && !_shutdown) { LOGGER.debug("Result available from {} - {} remaining", this, _pending); _listener.success(result); } else { LOGGER.debug("Discarding result from {} - {} remaining", this, _pending); } } protected void postException(final Throwable error) { if (_listener != null && !_shutdown) { LOGGER.debug("Error available from {} - {} remaining", this, _pending); _listener.failure(error); } else { LOGGER.debug("Discarding result from {} - {} remaining", this, _pending); } } /** * Submits a job for execution, posting the result when it completes. * <p> * This must not be used after {@link #shutdown} or {@link #join} have been called. * * @param command the job to execute, not null * @param result the result to post */ public void execute(final Runnable command, final T result) { _pending.incrementAndGet(); PoolExecutor.this.execute(new ExecuteRunnable<>(this, command, result)); } /** * Submits a job for execution, posting its result when it completes. * <p> * This must not be used after {@link #shutdown} or {@link #join} have been called. * * @param command the job to execute, not null */ public void execute(final Callable<T> command) { _pending.incrementAndGet(); PoolExecutor.this.execute(new ExecuteCallable<>(this, command)); } /** * Discards any outstanding jobs. This will return immediately; to wait for jobs to be discarded or completed, call {@link #join} afterwards. */ public synchronized void shutdown() { LOGGER.info("Shutting down {}", this); if (_shutdown) { return; } _shutdown = true; if (_joining) { notifyAll(); } final Iterator<Runnable> itrQueue = getQueue().iterator(); while (itrQueue.hasNext()) { final Runnable entry = itrQueue.next(); if (entry instanceof Execute) { final Execute<?> execute = (Execute<?>) entry; if (execute._service == this && execute.markExecuted()) { LOGGER.debug("Discarding {}", execute); _pending.decrementAndGet(); itrQueue.remove(); } } } } /** * Waits for all submitted jobs to complete. This thread may execute one or more of the submitted jobs. * * @throws InterruptedException * if there is an interruption before the jobs are complete */ public void join() throws InterruptedException { LOGGER.info("Joining"); Execute<?> inline = null; try { Iterator<Runnable> itrQueue = null; do { synchronized (this) { _joining = true; try { if (_pending.get() == 0) { LOGGER.info("No pending tasks"); _shutdown = true; return; } if (itrQueue == null || !itrQueue.hasNext()) { itrQueue = getQueue().iterator(); } while (itrQueue.hasNext()) { final Runnable entry = itrQueue.next(); if (entry instanceof Execute) { final Execute<?> execute = (Execute<?>) entry; if (execute._service == this && execute.markExecuted()) { LOGGER.debug("Inline execution of {}", execute); itrQueue.remove(); inline = execute; break; } } } if (inline == null) { LOGGER.info("No inline executions available, waiting for {} remaining tasks", _pending); wait(); } } finally { _joining = false; } } if (inline != null) { inline.runImpl(); inline = null; } } while (true); } finally { if (inline != null) { getQueue().add(inline); } } } // Executor /** * Submit a job for execution to the group. This is the same as calling {@link #execute(Runnable,Object)}. * <p> * This must not be used after {@link #shutdown} or {@link #join} have been called. * * @param command the job to execute, not null */ @Override public void execute(final Runnable command) { execute(command, null); } // Object @Override public String toString() { return Integer.toHexString(hashCode()); } } private abstract static class Execute<T> implements Runnable { private final Service<T> _service; private final AtomicBoolean _executed = new AtomicBoolean(); protected Execute(final Service<T> service) { _service = service; } public boolean markExecuted() { return !_executed.getAndSet(true); } protected abstract T callImpl() throws Throwable; protected void runImpl() { try { LOGGER.debug("Executing {}", this); _service.postResult(callImpl()); } catch (final Throwable t) { _service.postException(t); } finally { _service.decrementAndNotify(); } } @Override public void run() { if (_service._shutdown) { return; } if (markExecuted()) { runImpl(); } else { LOGGER.debug("Already executed or cancelled {}", this); } } @Override public String toString() { return _service.toString(); } } private static final class ExecuteRunnable<T> extends Execute<T> { private final Runnable _runnable; private final T _result; ExecuteRunnable(final Service<T> service, final Runnable runnable, final T result) { super(service); ArgumentChecker.notNull(runnable, "runnable"); _runnable = runnable; _result = result; } @Override protected T callImpl() { _runnable.run(); return _result; } @Override public String toString() { return super.toString() + "/" + _runnable; } } private static final class ExecuteCallable<T> extends Execute<T> { private final Callable<T> _callable; ExecuteCallable(final Service<T> service, final Callable<T> callable) { super(service); ArgumentChecker.notNull(callable, "callable"); _callable = callable; } @Override protected T callImpl() throws Throwable { return _callable.call(); } @Override public String toString() { return super.toString() + "/" + _callable; } } private static final ThreadLocal<Reference<PoolExecutor>> INSTANCE = new ThreadLocal<>(); private final Reference<PoolExecutor> _me = new WeakReference<>(this); private final BlockingQueue<Runnable> _queue = new LinkedBlockingQueue<>(); private final ThreadPoolExecutor _underlying; private static final class ExecutorThread extends Thread { private final Reference<PoolExecutor> _owner; private ExecutorThread(final Reference<PoolExecutor> owner, final ThreadGroup group, final Runnable runnable, final String threadName, final int stackSize) { super(group, runnable, threadName, stackSize); _owner = owner; } @Override public void run() { INSTANCE.set(_owner); super.run(); } } private static final class ExecutorThreadFactory extends NamedThreadPoolFactory { private final Reference<PoolExecutor> _owner; private ExecutorThreadFactory(final Reference<PoolExecutor> owner, final String name) { super(name, true); _owner = owner; } @Override protected Thread createThread(final ThreadGroup group, final Runnable runnable, final String threadName, final int stackSize) { return new ExecutorThread(_owner, group, runnable, threadName, stackSize); } } /** * Creates a new execution pool with the given (maximum) number of threads. * <p> * This can be created with no threads. Tasks submitted will never be executed unless they arrive from a pool and another thread * then joins that pool to complete its execution. * * @param maxThreads the maximum number of threads to put in the pool * @param name the diagnostic name to use for the pool */ public PoolExecutor(final int maxThreads, final String name) { if (maxThreads > 0) { final ThreadFactory factory = new ExecutorThreadFactory(_me, name); _underlying = new MdcAwareThreadPoolExecutor(maxThreads, maxThreads, 60, TimeUnit.SECONDS, _queue, factory); _underlying.allowCoreThreadTimeOut(true); } else { _underlying = null; } } @Override protected void finalize() { if (_underlying != null) { _underlying.shutdown(); } } protected BlockingQueue<Runnable> getQueue() { return _queue; } /** * Creates a service group with a listener to handle results from that group. * * @param <T> the result type for jobs submitted to the group * @param listener the listener to receive results from jobs in the group, or null if the results are not wanted * @return the service group to submit further jobs to */ public <T> Service<T> createService(final CompletionListener<T> listener) { return new Service<>(listener); } public ExecutorService asService() { return _underlying; } /** * Registers an instance with the current thread, returning the previously registered instance (if any). * * @param instance the instance to register, or null for none * @return the previously registered instance, or null for none */ public static PoolExecutor setInstance(final PoolExecutor instance) { final Reference<PoolExecutor> previous = INSTANCE.get(); if (instance != null) { INSTANCE.set(instance._me); } else { INSTANCE.set(null); } if (previous != null) { return previous.get(); } return null; } /** * Returns the instance registered with the current thread, if any. * * @return the registered instance, or null for none */ public static PoolExecutor instance() { final Reference<PoolExecutor> executor = INSTANCE.get(); if (executor != null) { return executor.get(); } return null; } // Executor /** * Submits a job to the underlying execution pool. * * @param command the job to execute, not null */ @Override public void execute(final Runnable command) { LOGGER.debug("Submitting {}", command); if (_underlying != null) { _underlying.execute(command); } else { getQueue().add(command); } } // Lifecycle /** * Dummy {@link Lifecycle#start} method; this object is implicitly started at construction and it is not possible to restart it after a {@link #stop} request. */ @Override public void start() { if (!isRunning()) { throw new IllegalStateException("Can't restart service after explicit stop"); } } @Override public void stop() { _me.clear(); if (_underlying != null) { _underlying.shutdown(); } } @Override public boolean isRunning() { return _me.get() != null; } }
apache-2.0
johnpr01/STAF-Aeron
src/main/java/com/kaazing/staf_aeron/tests/Test0080.java
2342
/* * Copyright 2015 Kaazing Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kaazing.staf_aeron.tests; import com.kaazing.staf_aeron.AeronSTAFProcess; import com.kaazing.staf_aeron.STAFHost; import com.kaazing.staf_aeron.YAMLTestCase; import com.kaazing.staf_aeron.tests.Test; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; public class Test0080 extends Test { public Test0080(YAMLTestCase testCase) { super(testCase); } public void run() { int port = getPort(hosts[0]); String channel = "udp://" + hosts[0].getIpAddress() + ":" + port; String[] commands = { SUB, PUB, PUB }; String[] types = { "sub", "pub", "pub2" }; for (int i = 0; i < hosts.length; i++) { startProcess(hosts[i], hosts[i].getJavaPath() + hosts[i].getPathSeperator() + "java " + aeronDirs[i] + hosts[i].getPathSeperator() + " " + hosts[i].getProperties() + " -cp " + hosts[i].getClasspath() + " " + commands[i] + " " + embedded + " -c=" + channel + " " + hosts[i].getOptions(), testCase.getName() + "-" + types[i], 60); } try { Thread.sleep(10000); killProcess(testCase.getName() + "-" + types[1], true); Thread.sleep(3000); latch.await(); } catch (Exception e) { e.printStackTrace(); } validate(); cleanup(); } // the expected result: the subscriber will continue to get messages from publisher 2. Eventually the subscriber reports // the connection with publisher 1 is INACTIVE public void validate() { } }
apache-2.0
dragonflyor/AndroidDemo_Base
08_04_服务建立广播接收者/src/com/xiaozhe/coderegistereceiver/ScreenReceiver.java
598
package com.xiaozhe.coderegistereceiver; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class ScreenReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { // TODO Auto-generated method stub String action = intent.getAction(); //System.out.println("ÊÕµ½¹ã²¥£º"+action); if("android.intent.action.SCREEN_ON".equals(action)){ System.out.println("ÆÁÄ»´ò¿ª"); }else if("android.intent.action.SCREEN_OFF".equals(action)){ System.out.println("¹Ø±ÕÁËÆÁÄ»"); } } }
apache-2.0
evrignaud/wisdom
core/content-manager/src/main/java/org/wisdom/content/encoding/ValuedEncoding.java
4140
/* * #%L * Wisdom-Framework * %% * Copyright (C) 2013 - 2014 Wisdom Framework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wisdom.content.encoding; /** * Structure storing encoding values from the {@code ACCEPT-ENCODING} HTTP header. */ public class ValuedEncoding implements Comparable<ValuedEncoding> { /** * The encoding. */ String encoding = null; /** * The 'q' value (quality). */ Double qValue = 1.0; /** * The position in the original header. */ Integer position; /** * Creates a {@link org.wisdom.content.encoding.ValuedEncoding}. * * @param encodingName the encoding * @param qValue the q value * @param position the position in the header */ public ValuedEncoding(String encodingName, Double qValue, int position) { this.encoding = encodingName; this.qValue = qValue; this.position = position; } /** * Parses the the given {@code ACCEPT-ENCODING} item (encodingItem), and creates an * {@link org.wisdom.content.encoding.ValuedEncoding}. * * @param encodingItem the item * @param position the position */ public ValuedEncoding(String encodingItem, int position) { this.position = position; //Split an encoding item between encoding and its qValue String[] encodingParts = encodingItem.split(";"); //Grab encoding name encoding = encodingParts[0].trim().replace("\n", ""); //Grab encoding's qValue if it exists (default 1.0 otherwise) if (encodingParts.length > 1) { qValue = Double.parseDouble(encodingParts[1].trim() .replace("\n", "") .replace("q=", "")); } } /** * Gets the encoding. * * @return the encoding */ public String getEncoding() { return encoding; } /** * Gets the 'q' value. * * @return the q value. */ public Double getqValue() { return qValue; } /** * Gets the position from the original list. * * @return the position */ public Integer getPosition() { return position; } /** * Compares two {@link org.wisdom.content.encoding.ValuedEncoding}. * * @param o the object to compare with. * @return 0, 1 or -1. */ @Override public int compareTo(ValuedEncoding o) { if (qValue.equals(o.qValue)) { // In case 2 encodings have the same qValue, the first one has priority return position.compareTo(o.position); } //Highest qValue first, invert default ascending comparison return qValue.compareTo(o.qValue) * -1; } /** * Checks whether the current object is equal to the given object. * * @param obj the object * @return {@code true} if the two objects are equal. */ @Override public boolean equals(Object obj) { if (!(obj instanceof ValuedEncoding)) { return false; } ValuedEncoding ov = (ValuedEncoding) obj; return this.getPosition().equals(ov.getPosition()) && this.getEncoding().equals(ov.getEncoding()) && this.getqValue().equals(ov.getqValue()); } /** * Hash code computation. * * @return the hash code */ @Override public int hashCode() { int enc = encoding != null ? encoding.hashCode() : 0; int qval = qValue != null ? qValue.hashCode() : 0; return 31 * (enc + qval + position); } }
apache-2.0
hirohanin/elephant-birdpig7hadoop21
src/java/com/twitter/elephantbird/proto/codegen/ProtobufWritableGenerator.java
1351
package com.twitter.elephantbird.proto.codegen; import com.twitter.elephantbird.mapreduce.io.ProtobufWritable; import com.twitter.elephantbird.proto.util.FormattingStringBuffer; import com.twitter.elephantbird.util.TypeRef; public class ProtobufWritableGenerator extends ProtoCodeGenerator { @Override public String getFilename() { return String.format("%s/mapreduce/io/Protobuf%sWritable.java", packageName_.replaceAll("\\.", "/"), descriptorProto_.getName()); } @Override public String generateCode() { FormattingStringBuffer sb = new FormattingStringBuffer(); sb.append("package %s.mapreduce.io;", packageName_).endl().endl(); sb.append("import %s.%s.%s;", packageName_, protoFilename_, descriptorProto_.getName()).endl(); sb.append("import %s;", ProtobufWritable.class.getName()).endl(); sb.append("import %s;", TypeRef.class.getName()).endl().endl(); sb.append("public class Protobuf%sWritable extends ProtobufWritable<%s> {", descriptorProto_.getName(), descriptorProto_.getName(), descriptorProto_.getName()).endl(); sb.append(" public Protobuf%sWritable() {", descriptorProto_.getName()).endl(); sb.append(" super(new TypeRef<%s>(){});", descriptorProto_.getName()).endl(); sb.append(" }").endl(); sb.append("}").endl(); sb.endl(); return sb.toString(); } }
apache-2.0
googleapis/java-dialogflow-cx
proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/CreateFlowRequest.java
38690
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/flow.proto package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * The request message for [Flows.CreateFlow][google.cloud.dialogflow.cx.v3beta1.Flows.CreateFlow]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest} */ public final class CreateFlowRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) CreateFlowRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateFlowRequest.newBuilder() to construct. private CreateFlowRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateFlowRequest() { parent_ = ""; languageCode_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateFlowRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateFlowRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder subBuilder = null; if (flow_ != null) { subBuilder = flow_.toBuilder(); } flow_ = input.readMessage( com.google.cloud.dialogflow.cx.v3beta1.Flow.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(flow_); flow_ = subBuilder.buildPartial(); } break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); languageCode_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.FlowProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CreateFlowRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.FlowProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CreateFlowRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.class, com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FLOW_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.cx.v3beta1.Flow flow_; /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the flow field is set. */ @java.lang.Override public boolean hasFlow() { return flow_ != null; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The flow. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.Flow getFlow() { return flow_ == null ? com.google.cloud.dialogflow.cx.v3beta1.Flow.getDefaultInstance() : flow_; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.FlowOrBuilder getFlowOrBuilder() { return getFlow(); } public static final int LANGUAGE_CODE_FIELD_NUMBER = 3; private volatile java.lang.Object languageCode_; /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The languageCode. */ @java.lang.Override public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The bytes for languageCode. */ @java.lang.Override public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (flow_ != null) { output.writeMessage(2, getFlow()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, languageCode_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (flow_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFlow()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, languageCode_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest other = (com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasFlow() != other.hasFlow()) return false; if (hasFlow()) { if (!getFlow().equals(other.getFlow())) return false; } if (!getLanguageCode().equals(other.getLanguageCode())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasFlow()) { hash = (37 * hash) + FLOW_FIELD_NUMBER; hash = (53 * hash) + getFlow().hashCode(); } hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for [Flows.CreateFlow][google.cloud.dialogflow.cx.v3beta1.Flows.CreateFlow]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.FlowProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CreateFlowRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.FlowProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CreateFlowRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.class, com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; if (flowBuilder_ == null) { flow_ = null; } else { flow_ = null; flowBuilder_ = null; } languageCode_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.FlowProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CreateFlowRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest build() { com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest result = new com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest(this); result.parent_ = parent_; if (flowBuilder_ == null) { result.flow_ = flow_; } else { result.flow_ = flowBuilder_.build(); } result.languageCode_ = languageCode_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.hasFlow()) { mergeFlow(other.getFlow()); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The agent to create a flow for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private com.google.cloud.dialogflow.cx.v3beta1.Flow flow_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.Flow, com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder, com.google.cloud.dialogflow.cx.v3beta1.FlowOrBuilder> flowBuilder_; /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the flow field is set. */ public boolean hasFlow() { return flowBuilder_ != null || flow_ != null; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The flow. */ public com.google.cloud.dialogflow.cx.v3beta1.Flow getFlow() { if (flowBuilder_ == null) { return flow_ == null ? com.google.cloud.dialogflow.cx.v3beta1.Flow.getDefaultInstance() : flow_; } else { return flowBuilder_.getMessage(); } } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFlow(com.google.cloud.dialogflow.cx.v3beta1.Flow value) { if (flowBuilder_ == null) { if (value == null) { throw new NullPointerException(); } flow_ = value; onChanged(); } else { flowBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFlow(com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder builderForValue) { if (flowBuilder_ == null) { flow_ = builderForValue.build(); onChanged(); } else { flowBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeFlow(com.google.cloud.dialogflow.cx.v3beta1.Flow value) { if (flowBuilder_ == null) { if (flow_ != null) { flow_ = com.google.cloud.dialogflow.cx.v3beta1.Flow.newBuilder(flow_) .mergeFrom(value) .buildPartial(); } else { flow_ = value; } onChanged(); } else { flowBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearFlow() { if (flowBuilder_ == null) { flow_ = null; onChanged(); } else { flow_ = null; flowBuilder_ = null; } return this; } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder getFlowBuilder() { onChanged(); return getFlowFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.FlowOrBuilder getFlowOrBuilder() { if (flowBuilder_ != null) { return flowBuilder_.getMessageOrBuilder(); } else { return flow_ == null ? com.google.cloud.dialogflow.cx.v3beta1.Flow.getDefaultInstance() : flow_; } } /** * * * <pre> * Required. The flow to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.Flow flow = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.Flow, com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder, com.google.cloud.dialogflow.cx.v3beta1.FlowOrBuilder> getFlowFieldBuilder() { if (flowBuilder_ == null) { flowBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.Flow, com.google.cloud.dialogflow.cx.v3beta1.Flow.Builder, com.google.cloud.dialogflow.cx.v3beta1.FlowOrBuilder>( getFlow(), getParentForChildren(), isClean()); flow_ = null; } return flowBuilder_; } private java.lang.Object languageCode_ = ""; /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The languageCode. */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The bytes for languageCode. */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @param value The languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; onChanged(); return this; } /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return This builder for chaining. */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); onChanged(); return this; } /** * * * <pre> * The language of the following fields in `flow`: * * `Flow.event_handlers.trigger_fulfillment.messages` * * `Flow.event_handlers.trigger_fulfillment.conditional_cases` * * `Flow.transition_routes.trigger_fulfillment.messages` * * `Flow.transition_routes.trigger_fulfillment.conditional_cases` * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @param value The bytes for languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest) private static final com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest(); } public static com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateFlowRequest> PARSER = new com.google.protobuf.AbstractParser<CreateFlowRequest>() { @java.lang.Override public CreateFlowRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateFlowRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateFlowRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateFlowRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CreateFlowRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/transform/ListRegistriesRequestProtocolMarshaller.java
2661
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.glue.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * ListRegistriesRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListRegistriesRequestProtocolMarshaller implements Marshaller<Request<ListRegistriesRequest>, ListRegistriesRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true).operationIdentifier("AWSGlue.ListRegistries") .serviceName("AWSGlue").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public ListRegistriesRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<ListRegistriesRequest> marshall(ListRegistriesRequest listRegistriesRequest) { if (listRegistriesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<ListRegistriesRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, listRegistriesRequest); protocolMarshaller.startMarshalling(); ListRegistriesRequestMarshaller.getInstance().marshall(listRegistriesRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
jjculber/defcoinj
examples/src/main/java/com/google/defcoin/examples/FetchTransactions.java
2401
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.defcoin.examples; import com.google.defcoin.core.*; import com.google.defcoin.params.TestNet3Params; import com.google.defcoin.store.BlockStore; import com.google.defcoin.store.MemoryBlockStore; import com.google.defcoin.utils.BriefLogFormatter; import com.google.common.util.concurrent.ListenableFuture; import java.net.InetAddress; import java.util.List; /** * Downloads the given transaction and its dependencies from a peers memory pool then prints them out. */ public class FetchTransactions { public static void main(String[] args) throws Exception { BriefLogFormatter.init(); System.out.println("Connecting to node"); final NetworkParameters params = TestNet3Params.get(); BlockStore blockStore = new MemoryBlockStore(params); BlockChain chain = new BlockChain(params, blockStore); PeerGroup peerGroup = new PeerGroup(params, chain); peerGroup.startAndWait(); peerGroup.addAddress(new PeerAddress(InetAddress.getLocalHost(), params.getPort())); peerGroup.waitForPeers(1).get(); Peer peer = peerGroup.getConnectedPeers().get(0); Sha256Hash txHash = new Sha256Hash(args[0]); ListenableFuture<Transaction> future = peer.getPeerMempoolTransaction(txHash); System.out.println("Waiting for node to send us the requested transaction: " + txHash); Transaction tx = future.get(); System.out.println(tx); System.out.println("Waiting for node to send us the dependencies ..."); List<Transaction> deps = peer.downloadDependencies(tx).get(); for (Transaction dep : deps) { System.out.println("Got dependency " + dep.getHashAsString()); } System.out.println("Done."); peerGroup.stopAndWait(); } }
apache-2.0
lewismc/oodt
catalog/src/main/java/org/apache/oodt/cas/catalog/system/impl/CatalogServiceLocal.java
57387
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.catalog.system.impl; import org.apache.oodt.cas.catalog.exception.CatalogException; import org.apache.oodt.cas.catalog.exception.CatalogServiceException; import org.apache.oodt.cas.catalog.mapping.IngestMapper; import org.apache.oodt.cas.catalog.metadata.TransactionalMetadata; import org.apache.oodt.cas.catalog.page.*; import org.apache.oodt.cas.catalog.query.QueryExpression; import org.apache.oodt.cas.catalog.query.QueryLogicalGroup; import org.apache.oodt.cas.catalog.query.WrapperQueryExpression; import org.apache.oodt.cas.catalog.repository.CatalogRepository; import org.apache.oodt.cas.catalog.struct.Dictionary; import org.apache.oodt.cas.catalog.struct.Index; import org.apache.oodt.cas.catalog.struct.TransactionId; import org.apache.oodt.cas.catalog.struct.TransactionIdFactory; import org.apache.oodt.cas.catalog.system.Catalog; import org.apache.oodt.cas.catalog.system.CatalogService; import org.apache.oodt.cas.catalog.util.PluginURL; import org.apache.oodt.cas.catalog.util.QueryUtils; import org.apache.oodt.cas.metadata.Metadata; //JDK imports import java.io.File; import java.net.URL; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; /** * @author bfoster * @version $Revision$ * * <p> * A Calatog Service that manages Metadata via one or more underlying Catalogs * <p> */ public class CatalogServiceLocal implements CatalogService { private static Logger LOG = Logger.getLogger(CatalogServiceLocal.class.getName()); protected Set<Catalog> catalogs; protected ReadWriteLock catalogsLock; protected CatalogRepository catalogRepository; protected IngestMapper ingestMapper; protected ReadWriteLock ingestMapperLock; protected boolean restrictQueryPermissions; protected boolean restrictIngestPermissions; protected TransactionIdFactory transactionIdFactory; protected File pluginStorageDir; protected boolean oneCatalogFailsAllFail; protected boolean simplifyQueries; protected boolean disableIntersectingCrossCatalogQueries; protected int crossCatalogResultSortingThreshold; public CatalogServiceLocal(CatalogRepository catalogRepository, IngestMapper ingestMapper, File pluginStorageDir, TransactionIdFactory transactionIdFactory, boolean restrictQueryPermissions, boolean restrictIngestPermissions, boolean oneCatalogFailsAllFail, boolean simplifyQueries, boolean disableIntersectingCrossCatalogQueries, int crossCatalogResultSortingThreshold) throws InstantiationException { try { this.catalogs = new HashSet<Catalog>(); this.catalogsLock = new ReentrantReadWriteLock(); this.ingestMapperLock = new ReentrantReadWriteLock(); this.setPluginStorageDir(pluginStorageDir); this.setRestrictQueryPermissions(restrictQueryPermissions); this.setRestrictIngestPermissions(restrictIngestPermissions); this.setTransactionIdFactory(transactionIdFactory); this.setIngestMapper(ingestMapper); this.setCatalogRepository(catalogRepository); this.oneCatalogFailsAllFail = oneCatalogFailsAllFail; this.simplifyQueries = simplifyQueries; this.disableIntersectingCrossCatalogQueries = disableIntersectingCrossCatalogQueries; this.crossCatalogResultSortingThreshold = crossCatalogResultSortingThreshold; }catch (Exception e) { LOG.log(Level.SEVERE,e.getMessage()); throw new InstantiationException(e.getMessage()); } } /** * Set the CatalogRepository for this CatalogService, with replace existing CatalogRepository * and immediately load all Catalogs from it. * @throws CatalogServiceException On Error loading given CatalogRepository */ protected void setCatalogRepository(CatalogRepository catalogRepository) throws CatalogServiceException { if (catalogRepository != null) { this.catalogsLock.writeLock().lock(); CatalogRepository backupRepository = null; Set<Catalog> backupCatalogs = null; try { LOG.log(Level.INFO, "Using CatalogRepository '" + catalogRepository.getClass().getName() + "'"); backupRepository = this.catalogRepository; backupCatalogs = new HashSet<Catalog>(this.catalogs); LOG.log(Level.INFO, "Loading Catalogs from CatalogRepository . . ."); this.catalogs = catalogRepository.deserializeAllCatalogs(); LOG.log(Level.INFO, "Loaded Catalogs: '" + this.catalogs + "'"); this.catalogRepository = catalogRepository; }catch (Exception e) { this.catalogs = backupCatalogs; this.catalogRepository = backupRepository; throw new CatalogServiceException("Failed to set CatalogRepository '" + catalogRepository + "', reverting back to original settings : " + e.getMessage(), e); }finally { this.catalogsLock.writeLock().unlock(); } }else { throw new CatalogServiceException("Cannot add NULL CatalogRepository to CatalogService, reverting back to original settings"); } } protected void setIngestMapper(IngestMapper ingestMapper) { this.ingestMapperLock.writeLock().lock(); try { LOG.log(Level.INFO, "Using IngestMapper '" + ingestMapper.getClass().getName() + "'"); this.ingestMapper = ingestMapper; }catch (Exception e) { LOG.log(Level.SEVERE, "Failed to set ingest mapper : " + e.getMessage(), e); }finally { this.ingestMapperLock.writeLock().unlock(); } } public void shutdown() throws CatalogServiceException { this.catalogsLock.writeLock().lock(); this.ingestMapperLock.writeLock().lock(); } /** * Returns true if this CatalogService is restricting any queries * from being made to the Catalogs it is managing * @return True is restricting queries, false if restriction is * on a per Catalog bases. */ public boolean isRestrictQueryPermissions() { return this.restrictIngestPermissions; } /** * Modify this CatalogServices query restriction, default is false. * @param restrictQueryPermissions True to block all querys to managing * Catalogs or false to leave it at a per Catalog bases. */ protected void setRestrictQueryPermissions(boolean restrictQueryPermissions) { this.restrictQueryPermissions = restrictQueryPermissions; } /** * Returns true if this CatalogService is restricting any ingestions * from being made to the Catalogs it is managing * @return True is restricting ingestions, false if restriction is * on a per Catalog bases. */ public boolean isRestrictIngestPermissions() { return this.restrictIngestPermissions; } /** * Modify this CatalogServices ingest restriction, default is false. * @param restrictIngestPermissions True to block all ingestions to managing * Catalogs or false to leave it at a per Catalog bases. */ protected void setRestrictIngestPermissions(boolean restrictIngestPermissions) { this.restrictIngestPermissions = restrictIngestPermissions; } /** * */ protected void setTransactionIdFactory( TransactionIdFactory transactionIdFactory) { this.transactionIdFactory = transactionIdFactory; } public void addCatalog(String catalogId, Index index) throws CatalogServiceException { if (!this.containsCatalog(catalogId)) { try { this.replaceCatalog(new Catalog(catalogId, index, null, false, false)); }catch (Exception ignored) { } } else { LOG.log(Level.WARNING, "Attempt to override an existing catalog '" + catalogId + "' already used in CatalogService, remedy and retry add -- no changes took place!"); } } public void addCatalog(String catalogId, Index index, List<Dictionary> dictionaries) throws CatalogServiceException { if (!this.containsCatalog(catalogId)) { try { this.replaceCatalog(new Catalog(catalogId, index, dictionaries, false, false)); }catch (Exception ignored) { } } else { LOG.log(Level.WARNING, "Attempt to override an existing catalog '" + catalogId + "' already used in CatalogService, remedy and retry add -- no changes took place!"); } } public void addCatalog(String catalogId, Index index, List<Dictionary> dictionaries, boolean restrictQueryPermission, boolean restrictIngestPermission) throws CatalogServiceException { if (!this.containsCatalog(catalogId)) { try { this.replaceCatalog(new Catalog(catalogId, index, dictionaries, restrictQueryPermission, restrictIngestPermission)); }catch (Exception ignored) { } } else { LOG.log(Level.WARNING, "Attempt to override an existing catalog '" + catalogId + "' already used in CatalogService, remedy and retry add -- no changes took place!"); } } public void addDictionary(String catalogId, Dictionary dictionary) throws CatalogServiceException { if (this.containsCatalog(catalogId)) { Set<Catalog> backupCatalogs = null; this.catalogsLock.writeLock().lock(); try { backupCatalogs = new HashSet<Catalog>(this.catalogs); for (Catalog catalog : this.catalogs) { if (catalog.getId().equals(catalogId)) { catalog.addDictionary(dictionary); this.catalogRepository.serializeCatalog(catalog); break; } } }catch (Exception e) { this.catalogs = backupCatalogs; throw new CatalogServiceException("Failed to serialize Catalog '" + catalogId + "' -- if CatalogService goes down, Catalog will have to be readded : " + e.getMessage(), e); }finally { this.catalogsLock.writeLock().unlock(); } } else { LOG.log(Level.WARNING, "Attempt to change an existing catalog '" + catalogId + "' already used in CatalogService, remedy and retry add -- no changes took place!"); } } public void replaceDictionaries(String catalogId, List<Dictionary> dictionaries) throws CatalogServiceException { this.modifyCatalog(catalogId, dictionaries, null, null, null); } public void replaceIndex(String catalogId, Index index) throws CatalogServiceException { this.modifyCatalog(catalogId, null, index, null, null); } public void modifyIngestPermission(String catalogId, boolean restrictIngestPermission) throws CatalogServiceException { this.modifyCatalog(catalogId, null, null, null, restrictIngestPermission); } public void modifyQueryPermission(String catalogId, boolean restrictQueryPermission) throws CatalogServiceException { this.modifyCatalog(catalogId, null, null, restrictQueryPermission, null); } protected void modifyCatalog(String catalogId, List<Dictionary> dictionaries, Index index, Boolean restrictQueryPermission, Boolean restrictIngestPermission) throws CatalogServiceException { if (this.containsCatalog(catalogId)) { Set<Catalog> backupCatalogs = null; this.catalogsLock.writeLock().lock(); try { backupCatalogs = new HashSet<Catalog>(this.catalogs); for (Catalog catalog : this.catalogs) { if (catalog.getId().equals(catalogId)) { if (dictionaries != null) { catalog.setDictionaries(dictionaries); } if (index != null) { catalog.setIndex(index); } if (restrictQueryPermission != null) { catalog.setRestrictQueryPermissions(restrictQueryPermissions); } if (restrictIngestPermission != null) { catalog.setRestrictIngestPermissions(restrictIngestPermissions); } this.catalogRepository.serializeCatalog(catalog); break; } } }catch (Exception e) { this.catalogs = backupCatalogs; throw new CatalogServiceException("Failed to serialize Catalog '" + catalogId + "' -- if CatalogService goes down, Catalog will have to be readded : " + e.getMessage(), e); }finally { this.catalogsLock.writeLock().unlock(); } } } protected boolean containsCatalog(String catalogId) throws CatalogServiceException { this.catalogsLock.readLock().lock(); try { return this.catalogs.contains(catalogId); }catch (Exception e) { throw new CatalogServiceException("Failed to check if catalog '" + catalogId + "' has already been added to this CatalogService : " + e.getMessage(), e); }finally { this.catalogsLock.readLock().unlock(); } } /** * Ability to dynamically add a Catalog to this CatalogService for managing * @param catalog Catalog for this CatalogService to manage * @return True if catalogs where added to list * @throws CatalogServiceException If one of the adding Catalog * URNs equals that of an existing Catalog. */ public void addCatalog(Catalog catalog) throws CatalogServiceException { if (!this.containsCatalog(catalog.getId())) { this.replaceCatalog(catalog); } else { LOG.log(Level.WARNING, "Attempt to override an existing catalog '" + catalog + "' already used in CatalogService, remedy and retry add -- no changes took place!"); } } /** * Ability to dynamically add a Catalog to this CatalogService for managing * @param catalog Catalog for this CatalogService to manage * @throws CatalogServiceException When allowOverride=false and one of the adding Catalog * URNs equals that of an existing Catalog. */ public void replaceCatalog(Catalog catalog) throws CatalogServiceException { Set<Catalog> backupCatalogs = null; this.catalogsLock.writeLock().lock(); try { backupCatalogs = new HashSet<Catalog>(this.catalogs); this.catalogs.remove(catalog); this.catalogs.add(catalog); this.catalogRepository.serializeCatalog(catalog); }catch (Exception e) { this.catalogs = backupCatalogs; throw new CatalogServiceException("Failed to serialize Catalog '" + catalog + "' -- if CatalogService goes down, Catalog will have to be readded : " + e.getMessage(), e); }finally { this.catalogsLock.writeLock().unlock(); } } public void removeCatalog(String catalogUrn) throws CatalogServiceException { this.removeCatalog(catalogUrn, false); } /** * * @throws CatalogServiceException */ public void removeCatalog(String catalogId, boolean preserveMapping) throws CatalogServiceException { this.catalogsLock.readLock().lock(); Catalog rmCatalog = null; try { for (Catalog catalog : this.catalogs) { if (catalog.getId().equals(catalogId)) { rmCatalog = catalog; break; } } }catch (Exception e) { throw new CatalogServiceException("Failed to find catalog object for catalog URN '" + catalogId + "' : " + e.getMessage(), e); }finally { this.catalogsLock.readLock().unlock(); } if (rmCatalog != null) { this.catalogsLock.writeLock().lock(); try { LOG.log(Level.INFO, "Removing catalog '" + rmCatalog + "'"); this.catalogs.remove(rmCatalog); this.catalogRepository.deleteSerializedCatalog(catalogId); if (!preserveMapping) { this.ingestMapperLock.writeLock().lock(); try { LOG.log(Level.INFO, "Deleting all index mappings for catalog '" + rmCatalog + "'"); this.ingestMapper.deleteAllMappingsForCatalog(catalogId); } finally { this.ingestMapperLock.writeLock().unlock(); } } }catch (Exception e) { throw new CatalogServiceException("Failed to remove Catalog '" + catalogId + "' from this CatalogService"); }finally { this.catalogsLock.writeLock().unlock(); } }else { LOG.log(Level.WARNING, "Catalog '" + catalogId + "' is not currently managed by this CatalogService"); } } public void setPluginStorageDir(File pluginStorageDir) { this.pluginStorageDir = pluginStorageDir; this.pluginStorageDir.mkdirs(); } public URL getPluginStorageDir() throws CatalogServiceException { try { return new URL("file://" + this.pluginStorageDir.getAbsolutePath()); }catch (Exception e) { throw new CatalogServiceException("Failed to get plugin storage dir directory : " + e.getMessage(), e); } } public List<PluginURL> getPluginUrls() throws CatalogServiceException { try { return this.catalogRepository.deserializePluginURLs(); }catch (Exception e) { throw new CatalogServiceException(e.getMessage(), e); } } public void addPluginUrls(List<PluginURL> urls) throws CatalogServiceException { try { List<PluginURL> currentUrls = new Vector<PluginURL>(this.catalogRepository.deserializePluginURLs()); currentUrls.addAll(urls); this.catalogRepository.serializePluginURLs(currentUrls); }catch (Exception e) { throw new CatalogServiceException(e.getMessage(), e); } } /** * * @return * @throws CatalogServiceException */ public Set<Catalog> getCurrentCatalogList() throws CatalogServiceException { this.catalogsLock.readLock().lock(); try { return new HashSet<Catalog>(this.catalogs); }catch (Exception e) { throw new CatalogServiceException("Failed to get current catalog list : " + e.getMessage(), e); }finally { this.catalogsLock.readLock().unlock(); } } protected Catalog getCatalog(String catalogUrn) throws CatalogServiceException { this.catalogsLock.readLock().lock(); try { for (Catalog catalog : this.catalogs) { if (catalog.getId().equals(catalogUrn)) { return catalog; } } return null; }catch (Exception e) { throw new CatalogServiceException("Failed to get catalog catalog '" + catalogUrn + "' : " + e.getMessage(), e); }finally { this.catalogsLock.readLock().unlock(); } } /** * * @return * @throws CatalogServiceException */ public Set<String> getCurrentCatalogIds() throws CatalogServiceException { this.catalogsLock.readLock().lock(); try { Set<String> catalogIds = new HashSet<String>(); for (Catalog catalog : this.catalogs) { catalogIds.add(catalog.getId()); } return catalogIds; }catch (Exception e) { throw new CatalogServiceException("Failed to get current catalog ids list : " + e.getMessage(), e); }finally { this.catalogsLock.readLock().unlock(); } } public TransactionReceipt ingest(Metadata metadata) throws CatalogServiceException { if (this.restrictIngestPermissions) { throw new CatalogServiceException( "Ingest permissions are restricted for this CatalogService -- request denied"); } try { boolean performUpdate; TransactionId<?> catalogServiceTransactionId = this.getCatalogServiceTransactionId(metadata); if (performUpdate = this.ingestMapper.hasCatalogServiceTransactionId(catalogServiceTransactionId)) { LOG.log(Level.INFO, "TransactionId '" + catalogServiceTransactionId + "' is an existing TransactionId, switching to update mode"); } List<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); for (Catalog catalog : this.getFilteredCatalogList(metadata)) { if (catalog.isIngestable()) { this.ingestMapperLock.writeLock().lock(); try { // perform update if (performUpdate) { if (!Boolean.parseBoolean(metadata.getMetadata(ENABLE_UPDATE_MET_KEY))) { throw new CatalogServiceException("TransactionId '" + catalogServiceTransactionId + "' already exists -- enable update by setting metadata key '" + ENABLE_UPDATE_MET_KEY + "'=true"); } TransactionId<?> catalogTransactionId = this.ingestMapper.getCatalogTransactionId(catalogServiceTransactionId, catalog.getId()); if (catalogTransactionId != null) { CatalogReceipt catalogReceipt = catalog.update(catalogTransactionId, metadata); if (catalogReceipt != null) { if (!catalogReceipt.getTransactionId().equals(catalogTransactionId)) { this.ingestMapper.deleteTransactionIdMapping(catalogTransactionId, catalog.getId()); this.ingestMapper.storeTransactionIdMapping(catalogServiceTransactionId, this.transactionIdFactory, catalogReceipt, catalog.getTransactionIdFactory()); } catalogReceipts.add(catalogReceipt); LOG.log(Level.INFO, "Successfully updated metadata to catalog '" + catalog + "' for TransactionId '" + catalogServiceTransactionId + "'"); }else { LOG.log(Level.SEVERE, "Update attempt to catalog '" + catalog + "' failed for TransactionId '" + catalogServiceTransactionId + "' -- update returned false"); } }else { LOG.log(Level.INFO, "Catalog '" + catalog + "' was not on ingest list for TransactionId '" + catalogServiceTransactionId + "' -- skipping"); } // perform ingest }else { LOG.log(Level.INFO, "Performing ingest for TransactionId '" + catalogServiceTransactionId + "' to catalog '" + catalog + "'"); CatalogReceipt catalogReceipt = catalog.ingest(metadata); if (catalogReceipt != null) { LOG.log(Level.INFO, "Successfully ingested metadata -- Indexing TransactionId information for ingest (CatalogService TransactionId = '" + catalogServiceTransactionId + "', Catalog TransactionId = '" + catalogReceipt.getTransactionId() + "', catalog = '" + catalogReceipt.getCatalogId() + "')"); this.ingestMapper.storeTransactionIdMapping(catalogServiceTransactionId, this.transactionIdFactory, catalogReceipt, catalog.getTransactionIdFactory()); catalogReceipts.add(catalogReceipt); }else { LOG.log(Level.WARNING, "Catalog '" + catalog + "' not interested in any Metadata for TransactionId '" + catalogServiceTransactionId + "'"); } } }catch (Exception e) { LOG.log(Level.WARNING, "Failed to add metadata to catalog '" + catalog.getId() + "' : " + e.getMessage(), e); if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to add metadata to catalog '" + catalog.getId() + "' : " + e.getMessage(), e); } }finally { this.ingestMapperLock.writeLock().unlock(); } }else { LOG.log(Level.WARNING, "Ingest not permitted to catalog '" + catalog + "' -- skipping over catalog"); } } return (catalogReceipts.size() > 0) ? new TransactionReceipt(catalogServiceTransactionId, catalogReceipts) : null; }catch (Exception e) { throw new CatalogServiceException("Error occured during Metadata ingest attempt : " + e.getMessage(), e); } } /** * * @param metadata * @throws CatalogServiceException */ public void delete(Metadata metadata) throws CatalogServiceException { if (this.restrictIngestPermissions) { throw new CatalogServiceException( "Delete permissions are restricted for this CatalogService -- request denied"); } TransactionId<?> catalogServiceTransactionId = this.getCatalogServiceTransactionId(metadata, false); if (catalogServiceTransactionId != null) { for (Catalog catalog : this.getFilteredCatalogList(metadata)) { if (catalog.isIngestable()) { this.ingestMapperLock.writeLock().lock(); try { TransactionId<?> catalogTransactionId = this.ingestMapper.getCatalogTransactionId(catalogServiceTransactionId, catalog.getId()); if (catalogTransactionId != null) { if (this.doReduce(metadata)) { LOG.log(Level.INFO, "Deleting metadata from TransactionId '" + catalogServiceTransactionId + "' for catalog '" + catalog + "'"); if (catalog.reduce(catalogTransactionId, metadata)) { LOG.log(Level.INFO, "Successfully deleted metadata from catalog '" + catalog + "' for TransactionId [id = " + catalogServiceTransactionId + "]"); }else { LOG.log(Level.INFO, "Failed to deleted metadata from catalog '" + catalog + "' for TransactionId [id = " + catalogServiceTransactionId + "] -- delete returned false"); } }else { LOG.log(Level.INFO, "Deleting all records of TransactionId from catalog '" + catalog + "'"); if (catalog.delete(catalogTransactionId)) { this.ingestMapper.deleteTransactionIdMapping(catalogTransactionId, catalog.getId()); LOG.log(Level.INFO, "Successfully deleted metadata from catalog '" + catalog + "' for TransactionId [id = " + catalogServiceTransactionId + "]"); }else { LOG.log(Level.INFO, "Failed to deleted metadata from catalog '" + catalog + "' for TransactionId [id = " + catalogServiceTransactionId + "] -- delete returned false"); } } }else { LOG.log(Level.INFO, "Catalog '" + catalog + "' was not on delete list for TransactionId '" + catalogServiceTransactionId + "' -- skipping"); } }catch (Exception e) { LOG.log(Level.WARNING, "Error occured while deleting metadata for TransactionId [id = " + catalogServiceTransactionId + "] : " + e.getMessage(), e); if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Error occured while deleting metadata for TransactionId [id = " + catalogServiceTransactionId + "] : " + e.getMessage(), e); } }finally { this.ingestMapperLock.writeLock().unlock(); } }else { LOG.log(Level.WARNING, "Deletion is not permitted to catalog '" + catalog + "' -- skipping over catalog"); } } }else { throw new CatalogServiceException("Must specify a TransactionId to delete"); } } protected boolean doReduce(Metadata metadata) { for (String key : metadata.getAllKeys()) { if (!(key.equals(CATALOG_SERVICE_TRANSACTION_ID_MET_KEY) || key.equals(CATALOG_IDS_MET_KEY) || key .equals(CATALOG_TRANSACTION_ID_MET_KEY) || key.equals(CATALOG_ID_MET_KEY))) { return true; } } return false; } public List<String> getProperty(String key) throws CatalogServiceException { List<String> vals = new Vector<String>(); for (Catalog catalog : this.getCurrentCatalogList()) { try { String val = catalog.getProperty(key); if (val != null) { vals.add(val); } }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to get catalog property '" + key + "' from catalog '" + catalog.getId() + "' : " + e .getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to get catalog property '" + key + "' from catalog '" + catalog.getId() + "' : " + e .getMessage(), e); } } } return vals; } public Properties getCalalogProperties() throws CatalogServiceException { Properties properties = new Properties(); for (Catalog catalog : this.getCurrentCatalogList()) { try { Properties catalogProperties = catalog.getProperties(); for (Object key : catalogProperties.keySet()) { String value = properties.getProperty((String) key); if (value != null) { value += "," + catalogProperties.getProperty((String) key); } else { value = catalogProperties.getProperty((String) key); } properties.setProperty((String) key, value); } }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to get catalog properties from catalog '" + catalog.getId() + "' : " + e.getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to get catalog properties from catalog '" + catalog.getId() + "' : " + e.getMessage(), e); } } } return properties; } public Properties getCalalogProperties(String catalogUrn) throws CatalogServiceException { try { Catalog catalog = this.getCatalog(catalogUrn); if (catalog != null) { return catalog.getProperties(); } else { return null; } }catch (Exception e) { throw new CatalogServiceException("Failed to get catalog properties from catalog '" + catalogUrn + "' : " + e.getMessage(), e); } } public Page getNextPage(Page page) throws CatalogServiceException { QueryPager queryPager = new QueryPager(this._query(page.getQueryExpression(), page.getRestrictToCatalogIds())); queryPager.setPageInfo(new PageInfo(page.getPageSize(), page.getPageNum() + 1)); return this.getPage(page.getQueryExpression(), page.getRestrictToCatalogIds(), queryPager); } public Page getPage(PageInfo pageInfo, QueryExpression queryExpression) throws CatalogServiceException { return this.getPage(pageInfo, queryExpression, this.getCurrentCatalogIds()); } public Page getPage(PageInfo pageInfo, QueryExpression queryExpression, Set<String> catalogIds) throws CatalogServiceException { if (this.disableIntersectingCrossCatalogQueries) { try { int totalResults = 0; LinkedHashMap<String, Integer> catalogToSizeOfMap = new LinkedHashMap<String, Integer>(); for (String catalogId : catalogIds) { Catalog catalog = this.getCatalog(catalogId); QueryExpression qe = this.reduceToUnderstoodExpressions(catalog, queryExpression); if (qe != null) { int catalogResultSize = catalog.sizeOf(qe); totalResults += catalogResultSize; catalogToSizeOfMap.put(catalogId, catalogResultSize); } } LOG.log(Level.INFO, "Routing query to catalogs as non-cross catalog intersecting queries . . ."); if (totalResults <= this.crossCatalogResultSortingThreshold) { List<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); for (String catalogId : catalogToSizeOfMap.keySet()) { Catalog catalog = this.getCatalog(catalogId); QueryExpression qe = this.reduceToUnderstoodExpressions(catalog, queryExpression); if (qe != null) { catalogReceipts.addAll(catalog.query(qe)); } } List<TransactionReceipt> transactionReceipts = this.getPossiblyUnindexedTransactionReceipts(catalogReceipts); LOG.log(Level.INFO, "Sorting Query Results . . . "); Collections.sort(transactionReceipts, new Comparator<TransactionReceipt>() { public int compare(TransactionReceipt o1, TransactionReceipt o2) { return o2.getTransactionDate().compareTo(o1.getTransactionDate()); } }); QueryPager queryPager = new QueryPager(transactionReceipts); queryPager.setPageInfo(pageInfo); return this.getPage(queryExpression, catalogIds, queryPager); }else { int currentIndex = 0; int desiredStartingIndex = pageInfo.getPageNum() * pageInfo.getPageSize(); List<CatalogReceipt> pageOfReceipts = new Vector<CatalogReceipt>(); for (Entry<String, Integer> entry : catalogToSizeOfMap.entrySet()) { if (desiredStartingIndex - currentIndex <= entry.getValue()) { Catalog catalog = this.getCatalog(entry.getKey()); QueryExpression qe = this.reduceToUnderstoodExpressions(catalog, queryExpression); if (qe != null) { List<CatalogReceipt> receipts = catalog.query(qe, desiredStartingIndex - currentIndex, Math.min((desiredStartingIndex - currentIndex) + pageInfo.getPageSize(), entry.getValue())); pageOfReceipts.addAll(receipts); if (pageOfReceipts.size() >= pageInfo.getPageSize()) { break; } } }else { currentIndex += entry.getValue(); } } return new Page(new ProcessedPageInfo(pageInfo.getPageSize(), pageInfo.getPageNum(), totalResults), queryExpression, catalogIds, this.indexReceipts(this.getPossiblyUnindexedTransactionReceipts(pageOfReceipts))); } }catch (Exception e) { throw new CatalogServiceException(e.getMessage(), e); } }else { QueryPager queryPager = new QueryPager(this._query(queryExpression, catalogIds)); queryPager.setPageInfo(pageInfo); return this.getPage(queryExpression, catalogIds, queryPager); } } public QueryPager query(QueryExpression queryExpression) throws CatalogServiceException { return this.query(queryExpression, this.getCurrentCatalogIds()); } public List<TransactionalMetadata> getMetadata(Page page) throws CatalogServiceException { return this.getMetadata(page.getReceipts()); } protected Page getPage(QueryExpression queryExpression, Set<String> restrictToCatalogIds, QueryPager queryPager) throws CatalogServiceException { return new Page(new ProcessedPageInfo(queryPager.getPageSize(), queryPager.getPageNum(), queryPager.getNumOfHits()), queryExpression, restrictToCatalogIds, this.indexReceipts(queryPager.getCurrentPage())); } public QueryPager query(QueryExpression queryExpression, Set<String> catalogIds) throws CatalogServiceException { return new QueryPager(this.indexReceipts(this._query(queryExpression, catalogIds))); } /** * * @param queryExpression * @return * @throws CatalogServiceException */ public List<TransactionReceipt> _query(QueryExpression queryExpression, Set<String> catalogIds) throws CatalogServiceException { if (this.restrictQueryPermissions) { throw new CatalogServiceException( "Query permissions are restricted for this CatalogService -- request denied"); } try { LOG.log(Level.INFO, "Recieved query '" + queryExpression + "'"); if (this.simplifyQueries) { queryExpression = QueryUtils.simplifyQuery(queryExpression); LOG.log(Level.INFO, "Simplified query to '" + queryExpression + "' -- routing query to catalogs"); } QueryResult queryResult = this.queryRecur(queryExpression, catalogIds); List<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); if (queryResult.getCatalogReceipts() == null && queryResult.getInterestedCatalogs() != null) { for (Catalog catalog : this.getCurrentCatalogList()) { try { if (queryResult.getInterestedCatalogs().contains(catalog.getId())) { LOG.log(Level.INFO, "Restricting query to understood terms for Catalog '" + catalog + "'"); QueryExpression reducedExpression = this.reduceToUnderstoodExpressions(catalog, queryExpression); LOG.log(Level.INFO, "Querying Catalog '" + catalog + "' with query '" + reducedExpression + "'"); catalogReceipts.addAll(catalog.query(reducedExpression)); } }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to query catalog '" + catalog.getId() + "' for query '" + queryExpression + "' : " + e.getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to query catalog '" + catalog.getId() + "' for query '" + queryExpression + "' : " + e.getMessage(), e); } } } } List<TransactionReceipt> transactionReceipts = this.getPossiblyUnindexedTransactionReceipts(catalogReceipts); LOG.log(Level.INFO, "Sorting Query Results . . . "); Collections.sort(transactionReceipts, new Comparator<TransactionReceipt>() { public int compare(TransactionReceipt o1, TransactionReceipt o2) { return o2.getTransactionDate().compareTo(o1.getTransactionDate()); } }); LOG.log(Level.INFO, "Query returned " + transactionReceipts.size() + " results"); return transactionReceipts; }catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogServiceException("Failed to get TransactionId to Metadata map for query '" + queryExpression + "' : " + e.getMessage(), e); } } protected List<TransactionReceipt> getPossiblyUnindexedTransactionReceipts(List<CatalogReceipt> catalogReceipts) throws CatalogServiceException { try { List<TransactionReceipt> returnList = new Vector<TransactionReceipt>(); LinkedHashMap<TransactionId<?>, List<CatalogReceipt>> existing = new LinkedHashMap<TransactionId<?>, List<CatalogReceipt>>(); for (CatalogReceipt catalogReceipt : catalogReceipts) { TransactionId<?> catalogServiceTransactionId = this.getCatalogServiceTransactionId(catalogReceipt.getTransactionId(), catalogReceipt.getCatalogId()); if (catalogServiceTransactionId != null) { List<CatalogReceipt> found = existing.get(catalogServiceTransactionId); if (found == null) { found = new Vector<CatalogReceipt>(); } found.add(catalogReceipt); existing.put(catalogServiceTransactionId, found); }else { returnList.add(new TransactionReceipt(null, Collections.singletonList(catalogReceipt))); } } for (Map.Entry<TransactionId<?>, List<CatalogReceipt>> transactionId : existing.entrySet()) { returnList.add(new TransactionReceipt(transactionId.getKey(), existing.get(transactionId.getKey()))); } return returnList; }catch (Exception e) { throw new CatalogServiceException(e.getMessage(), e); } } protected List<TransactionReceipt> indexReceipts(List<TransactionReceipt> transactionReceipts) throws CatalogServiceException { List<TransactionReceipt> indexedReceipts = new Vector<TransactionReceipt>(); for (TransactionReceipt transactionReceipt : transactionReceipts) { try { if (transactionReceipt.getTransactionId() == null) { transactionReceipt = new TransactionReceipt( this.getCatalogServiceTransactionId(transactionReceipt.getCatalogReceipts().get(0), true), transactionReceipt.getCatalogReceipts()); } indexedReceipts.add(transactionReceipt); }catch(Exception e) { throw new CatalogServiceException(e.getMessage(), e); } } return indexedReceipts; } public List<TransactionalMetadata> getNextPage(QueryPager queryPager) throws CatalogServiceException { try { return this.getMetadata(queryPager.getCurrentPage()); }catch (Exception e) { throw new CatalogServiceException("Failed to get next page of Metadata : " + e.getMessage(), e); } } public List<TransactionalMetadata> getAllPages(QueryPager queryPager) throws CatalogServiceException { try { return this.getMetadata(queryPager.getTransactionReceipts()); }catch (Exception e) { throw new CatalogServiceException("Failed to get all page of Metadata : " + e.getMessage(), e); } } public List<TransactionalMetadata> getMetadataFromTransactionIdStrings(List<String> catalogServiceTransactionIdStrings) throws CatalogServiceException { List<TransactionId<?>> catalogServiceTransactionIds = new Vector<TransactionId<?>>(); for (String catalogServiceTransactionIdString : catalogServiceTransactionIdStrings) { catalogServiceTransactionIds.add(this.generateTransactionId(catalogServiceTransactionIdString)); } return this.getMetadataFromTransactionIds(catalogServiceTransactionIds); } public List<TransactionalMetadata> getMetadata(List<TransactionReceipt> transactionReceipts) throws CatalogServiceException { LinkedHashSet<TransactionalMetadata> metadataSet = new LinkedHashSet<TransactionalMetadata>(); for (TransactionReceipt transactionReceipt : transactionReceipts) { Metadata metadata = new Metadata(); Vector<CatalogReceipt> successfulCatalogReceipts = new Vector<CatalogReceipt>(); for (CatalogReceipt catalogReceipt : transactionReceipt.getCatalogReceipts()) { try { Catalog catalog = this.getCatalog(catalogReceipt.getCatalogId()); metadata.addMetadata(catalog.getMetadata(catalogReceipt.getTransactionId())); successfulCatalogReceipts.add(catalogReceipt); }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to get metadata for transaction ids for catalog '" + catalogReceipt.getCatalogId() + "' : " + e.getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to get metadata for transaction ids for catalog '" + catalogReceipt.getCatalogId() + "' : " + e.getMessage(), e); } } } if (metadata.getMap().keySet().size() > 0) { metadataSet.add(new TransactionalMetadata( new TransactionReceipt(transactionReceipt.getTransactionId(), successfulCatalogReceipts), metadata)); } } return new Vector<TransactionalMetadata>(metadataSet); } public List<TransactionalMetadata> getMetadataFromTransactionIds(List<TransactionId<?>> catalogServiceTransactionIds) throws CatalogServiceException { LinkedHashSet<TransactionalMetadata> metadataSet = new LinkedHashSet<TransactionalMetadata>(); for (TransactionId<?> catalogServiceTransactionId : catalogServiceTransactionIds) { Metadata metadata = new Metadata(); Vector<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); for (Catalog catalog : this.getCurrentCatalogList()) { try { CatalogReceipt catalogReceipt = this.ingestMapper.getCatalogReceipt(catalogServiceTransactionId, catalog.getId()); if (catalogReceipt != null) { metadata.addMetadata(catalog.getMetadata(catalogReceipt.getTransactionId()).getMap()); catalogReceipts.add(catalogReceipt); } }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to get metadata for transaction ids for catalog '" + catalog.getId() + "' : " + e .getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to get metadata for transaction ids for catalog '" + catalog.getId() + "' : " + e .getMessage(), e); } } } if (metadata.getMap().keySet().size() > 0) { metadataSet.add( new TransactionalMetadata(new TransactionReceipt(catalogServiceTransactionId, catalogReceipts), metadata)); } } return new Vector<TransactionalMetadata>(metadataSet); } public List<TransactionId<?>> getCatalogServiceTransactionIds(List<TransactionId<?>> catalogTransactionIds, String catalogUrn) throws CatalogServiceException { LinkedHashSet<TransactionId<?>> catalogServiceTransactionIds = new LinkedHashSet<TransactionId<?>>(); for (TransactionId<?> catalogTransactionId : catalogTransactionIds) { TransactionId<?> catalogServiceTransactionId = this.getCatalogServiceTransactionId(catalogTransactionId, catalogUrn); catalogServiceTransactionIds.add(catalogServiceTransactionId); } return new Vector<TransactionId<?>>(catalogServiceTransactionIds); } public TransactionId<?> getCatalogServiceTransactionId(TransactionId<?> catalogTransactionId, String catalogUrn) throws CatalogServiceException { this.ingestMapperLock.readLock().lock(); try { return this.ingestMapper.getCatalogServiceTransactionId(catalogTransactionId, catalogUrn); }catch (Exception e) { throw new CatalogServiceException(e.getMessage(), e); }finally { this.ingestMapperLock.readLock().unlock(); } } public TransactionId<?> getCatalogServiceTransactionId(CatalogReceipt catalogReceipt, boolean generateNew) throws CatalogServiceException { try { TransactionId<?> catalogServiceTransactionId = this.getCatalogServiceTransactionId(catalogReceipt.getTransactionId(), catalogReceipt.getCatalogId()); if (catalogServiceTransactionId == null && generateNew) { catalogServiceTransactionId = this.generateNewUniqueTransactionId(); LOG.log(Level.INFO, "CatalogServer mapping transaction: " + catalogServiceTransactionId + "," + catalogReceipt.getTransactionId() + "," + catalogReceipt.getCatalogId()); this.ingestMapperLock.writeLock().lock(); try { this.ingestMapper.storeTransactionIdMapping(catalogServiceTransactionId, this.transactionIdFactory, catalogReceipt, this.getCatalog(catalogReceipt.getCatalogId()).getTransactionIdFactory()); }catch (Exception e) { throw new CatalogServiceException("Failed to write TransactionId '" + catalogServiceTransactionId + "' : " + e.getMessage(), e); }finally { this.ingestMapperLock.writeLock().unlock(); } } return catalogServiceTransactionId; }catch (Exception e) { throw new CatalogServiceException("Failed to get CatalogServiceTransactionId : " + e.getMessage(), e); } } protected TransactionId<?> generateNewUniqueTransactionId() { try { return this.transactionIdFactory.createNewTransactionId(); }catch (Exception e) { LOG.log(Level.SEVERE, "Failed to generate a new TransactionId from factory '" + this.transactionIdFactory.getClass().getCanonicalName() + "' : " + e.getMessage(), e); return null; } } protected TransactionId<?> generateTransactionId(String stringTransactionId) { try { return this.transactionIdFactory.createTransactionId(stringTransactionId); }catch (Exception e) { LOG.log(Level.SEVERE, "Failed to generate a new TransactionId from factory '" + this.transactionIdFactory.getClass().getCanonicalName() + "' for string value '" + stringTransactionId + ": " + e.getMessage(), e); return null; } } // check if transaction id was specified by user, otherwise generate a new one protected TransactionId<?> getCatalogServiceTransactionId(Metadata metadata) throws CatalogServiceException { return this.getCatalogServiceTransactionId(metadata, true); } protected TransactionId<?> getCatalogServiceTransactionId(Metadata metadata, boolean generateNew) throws CatalogServiceException { try { if (metadata.getMetadata(CatalogServiceLocal.CATALOG_SERVICE_TRANSACTION_ID_MET_KEY) != null) { return this.generateTransactionId(metadata.getMetadata(CatalogServiceLocal.CATALOG_SERVICE_TRANSACTION_ID_MET_KEY)); }else if (metadata.getMetadata(CatalogServiceLocal.CATALOG_TRANSACTION_ID_MET_KEY) != null && metadata.getMetadata(CatalogServiceLocal.CATALOG_ID_MET_KEY) != null) { String catalogId = metadata.getMetadata(CatalogServiceLocal.CATALOG_ID_MET_KEY); Catalog catalog = this.getCatalog(catalogId); if (catalog != null) { TransactionId<?> catalogTransactionId = catalog.getTransactionIdFromString(metadata.getMetadata(CatalogServiceLocal.CATALOG_TRANSACTION_ID_MET_KEY)); TransactionId<?> catalogServiceTransactionId = this.ingestMapper.getCatalogServiceTransactionId(catalogTransactionId, catalogId); if (catalogServiceTransactionId == null) { throw new CatalogServiceException( "CatalogService's Catalog '" + catalog.getId() + "' is not aware of TransactionId '" + catalogTransactionId + "'s"); } return catalogServiceTransactionId; }else { throw new CatalogServiceException("This CatalogService has no Catalog with ID = '" + catalogId + "'"); } }else if (generateNew) { return this.generateNewUniqueTransactionId(); }else { throw new CatalogServiceException("Metadata fields not present to determine a TransactionId"); } }catch (Exception e) { throw new CatalogServiceException("Failed determine TransactionId : " + e.getMessage(), e); } } protected Set<Catalog> getFilteredCatalogList(Metadata metadata) throws CatalogServiceException { try { if (metadata.containsKey(CATALOG_ID_MET_KEY)) { Catalog catalog = this.getCatalog(metadata.getMetadata(CATALOG_ID_MET_KEY)); if (catalog == null) { throw new CatalogServiceException("Catalog '" + metadata.getMetadata(CATALOG_ID_MET_KEY) + "' is not managed by this CatalogService"); } else { return Collections.singleton(catalog); } }else if (metadata.containsKey(CATALOG_IDS_MET_KEY)) { HashSet<Catalog> filteredCatalogList = new HashSet<Catalog>(); for (Object catalogUrn : metadata.getAllMetadata(CATALOG_IDS_MET_KEY)) { Catalog catalog = this.getCatalog((String) catalogUrn); if (catalog == null) { throw new CatalogServiceException("Catalog '" + metadata.getMetadata(CATALOG_ID_MET_KEY) + "' is not managed by this CatalogService"); } else { filteredCatalogList.add(catalog); } } return filteredCatalogList; }else { return new HashSet<Catalog>(this.catalogs); } }catch (Exception e) { throw new CatalogServiceException("Failed to get filtered catalog list : " + e.getMessage(), e); } } protected QueryResult queryRecur(QueryExpression queryExpression, Set<String> restrictToCatalogIds) throws CatalogServiceException, CatalogException { // get QueryResults for sub queries if (queryExpression instanceof QueryLogicalGroup) { // get children query results List<QueryResult> childrenQueryResults = new Vector<QueryResult>(); for (QueryExpression subQueryExpression : ((QueryLogicalGroup) queryExpression).getExpressions()) { childrenQueryResults.add(queryRecur(subQueryExpression, restrictToCatalogIds)); } if ((((QueryLogicalGroup) queryExpression).getOperator().equals(QueryLogicalGroup.Operator.AND) && containsUnbalancedCatalogInterest(childrenQueryResults)) || containsTranactionReceipts(childrenQueryResults)) { for (QueryResult childQueryResult : childrenQueryResults) { // if childQueryResult has not been used, use it if (childQueryResult.getCatalogReceipts() == null) { List<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); for (Catalog catalog : this.getCurrentCatalogList()) { try { if (childQueryResult.getInterestedCatalogs().contains(catalog.getId())) { catalogReceipts.addAll(catalog .query(this.reduceToUnderstoodExpressions(catalog, childQueryResult.getQueryExpression()))); } } catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogServiceException( "Failed to query catalog '" + catalog.getId() + "' for query '" + queryExpression + "' : " + e.getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to query catalog '" + catalog.getId() + "' for query '" + queryExpression + "' : " + e.getMessage(), e); } } } childQueryResult.setCatalogReceipts(catalogReceipts); } } // get intersection of results QueryResult queryResult = new QueryResult(queryExpression); queryResult.setCatalogReceipts(this.getIntersection(childrenQueryResults)); return queryResult; }else { // get merge of results QueryResult queryResult = new QueryResult(queryExpression); HashSet<String> interestedCatalogs = new HashSet<String>(); for (QueryResult childQueryResult : childrenQueryResults) { interestedCatalogs.addAll(childQueryResult.getInterestedCatalogs()); } queryResult.setInterestedCatalogs(interestedCatalogs); return queryResult; } }else if (queryExpression instanceof WrapperQueryExpression) { // check for catalogs interested in wrapper query expression restrictToCatalogIds.retainAll(getInterestedCatalogs(queryExpression, restrictToCatalogIds)); // check for catalogs interested in wrapped query expression QueryResult wrapperExprQueryResult; QueryExpression wrapperQE = ((WrapperQueryExpression) queryExpression).getQueryExpression(); if (wrapperQE instanceof QueryLogicalGroup) { wrapperExprQueryResult = this.queryRecur((QueryLogicalGroup) wrapperQE, restrictToCatalogIds); }else { wrapperExprQueryResult = new QueryResult(wrapperQE); wrapperExprQueryResult.interestedCatalogs = getInterestedCatalogs(wrapperQE, restrictToCatalogIds); wrapperExprQueryResult.interestedCatalogs.retainAll(restrictToCatalogIds); } return wrapperExprQueryResult; }else { // determine catalogs interested in this query expression QueryResult queryResult = new QueryResult(queryExpression); Set<String> interestedCatalogs = getInterestedCatalogs(queryExpression, restrictToCatalogIds); interestedCatalogs.retainAll(restrictToCatalogIds); queryResult.setInterestedCatalogs(interestedCatalogs); return queryResult; } } protected List<CatalogReceipt> getIntersection(List<QueryResult> queryResults) { List<CatalogReceipt> catalogReceipts = new Vector<CatalogReceipt>(); if (queryResults.size() > 0) { catalogReceipts.addAll(queryResults.get(0).getCatalogReceipts()); for (int i = 1; i < queryResults.size(); i++) { QueryResult qr = queryResults.get(i); TR: for (CatalogReceipt catalogReceipt : qr.getCatalogReceipts()) { for (CatalogReceipt compCatalogReceipt : catalogReceipts) { if (catalogReceipt.getTransactionId().equals(compCatalogReceipt.getTransactionId())) { continue TR; } } catalogReceipts.remove(catalogReceipt); } } } return catalogReceipts; } protected QueryExpression reduceToUnderstoodExpressions(Catalog catalog, QueryExpression queryExpression) throws CatalogException { if (queryExpression instanceof QueryLogicalGroup) { QueryLogicalGroup queryLogicalGroup = (QueryLogicalGroup) queryExpression; List<QueryExpression> restrictedExpressions = new Vector<QueryExpression>(); for (QueryExpression qe : queryLogicalGroup.getExpressions()) { QueryExpression restrictedQE = this.reduceToUnderstoodExpressions(catalog, qe); if (restrictedQE == null && queryLogicalGroup.getOperator().equals(QueryLogicalGroup.Operator.AND) && this.disableIntersectingCrossCatalogQueries) { restrictedExpressions.clear(); break; } if (restrictedQE != null) { restrictedExpressions.add(restrictedQE); } } if (restrictedExpressions.size() > 0) { if (restrictedExpressions.size() == 1) { return restrictedExpressions.get(0); }else { QueryLogicalGroup restrictedQueryLogicalGroup = queryLogicalGroup.clone(); restrictedQueryLogicalGroup.setExpressions(restrictedExpressions); return restrictedQueryLogicalGroup; } }else { return null; } }else if (queryExpression instanceof WrapperQueryExpression) { WrapperQueryExpression wrapperQueryExpresssion = (WrapperQueryExpression) queryExpression; if (catalog.isInterested(queryExpression)) { QueryExpression qe = this.reduceToUnderstoodExpressions(catalog, wrapperQueryExpresssion.getQueryExpression()); if (qe != null) { WrapperQueryExpression wqe = wrapperQueryExpresssion.clone(); wqe.setQueryExpression(qe); return wqe; }else if (wrapperQueryExpresssion.isValidWithNoSubExpression()){ WrapperQueryExpression wqe = wrapperQueryExpresssion.clone(); wqe.setQueryExpression(null); return wqe; }else { return null; } }else { return null; } }else if (catalog.isInterested(queryExpression)) { return queryExpression; }else { return null; } } protected boolean containsTranactionReceipts(List<QueryResult> queryResults) { for (QueryResult queryResult : queryResults) { if (queryResult.getCatalogReceipts() != null) { return true; } } return false; } protected boolean containsUnbalancedCatalogInterest(List<QueryResult> queryResults) { if (queryResults.size() > 0) { QueryResult firstQueryResult = queryResults.get(0); for (int i = 1; i < queryResults.size(); i++) { QueryResult queryResult = queryResults.get(i); if (!(queryResult.interestedCatalogs.containsAll(firstQueryResult.interestedCatalogs) && firstQueryResult.interestedCatalogs.containsAll(queryResult.interestedCatalogs))) { return true; } } return false; }else { return false; } } protected HashSet<String> getInterestedCatalogs(QueryExpression queryExpression, Set<String> restrictToCatalogIds) throws CatalogException, CatalogServiceException { HashSet<String> interestedCatalogs = new HashSet<String>(); for (Catalog catalog : this.getCurrentCatalogList()) { try { if (restrictToCatalogIds.contains(catalog.getId())) { if (catalog.isInterested(queryExpression)) { interestedCatalogs.add(catalog.getId()); } } }catch (Exception e) { if (this.oneCatalogFailsAllFail) { throw new CatalogException( "Failed to determine if Catalog '" + catalog.getId() + "' is interested in query expression '" + queryExpression + "' : " + e.getMessage(), e); } else { LOG.log(Level.WARNING, "Failed to determine if Catalog '" + catalog.getId() + "' is interested in query expression '" + queryExpression + "' : " + e.getMessage(), e); } } } return interestedCatalogs; } protected class QueryResult { private QueryExpression queryExpression; private List<CatalogReceipt> catalogReceipts; private Set<String> interestedCatalogs; public QueryResult(QueryExpression queryExpression) { this.queryExpression = queryExpression; } public QueryExpression getQueryExpression() { return queryExpression; } public void setQueryExpression(QueryExpression queryExpression) { this.queryExpression = queryExpression; } public List<CatalogReceipt> getCatalogReceipts() { return catalogReceipts; } public void setCatalogReceipts( List<CatalogReceipt> catalogReceipts) { this.catalogReceipts = catalogReceipts; } public Set<String> getInterestedCatalogs() { return interestedCatalogs; } public void setInterestedCatalogs(Set<String> interestedCatalogs) { this.interestedCatalogs = interestedCatalogs; } } protected class QueryResultGroup { HashSet<TransactionReceipt> transactionReceipts; String id; public QueryResultGroup(String id) { this.id = id; transactionReceipts = new HashSet<TransactionReceipt>(); } public HashSet<TransactionReceipt> getResults() { return this.transactionReceipts; } public void addTransactionReceipt(TransactionReceipt transactionReceipt) { this.transactionReceipts.add(transactionReceipt); } } }
apache-2.0
Colex/cassandra-mv
interface/thrift/gen-java/org/apache/cassandra/thrift/TokenRange.java
33502
/** * Autogenerated by Thrift Compiler (1.0.0-dev) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.cassandra.thrift; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A TokenRange describes part of the Cassandra ring, it is a mapping from a range to * endpoints responsible for that range. * @param start_token The first token in the range * @param end_token The last token in the range * @param endpoints The endpoints responsible for the range (listed by their configured listen_address) * @param rpc_endpoints The endpoints responsible for the range (listed by their configured rpc_address) */ public class TokenRange implements org.apache.thrift.TBase<TokenRange, TokenRange._Fields>, java.io.Serializable, Cloneable, Comparable<TokenRange> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TokenRange"); private static final org.apache.thrift.protocol.TField START_TOKEN_FIELD_DESC = new org.apache.thrift.protocol.TField("start_token", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField END_TOKEN_FIELD_DESC = new org.apache.thrift.protocol.TField("end_token", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField ENDPOINTS_FIELD_DESC = new org.apache.thrift.protocol.TField("endpoints", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField RPC_ENDPOINTS_FIELD_DESC = new org.apache.thrift.protocol.TField("rpc_endpoints", org.apache.thrift.protocol.TType.LIST, (short)4); private static final org.apache.thrift.protocol.TField ENDPOINT_DETAILS_FIELD_DESC = new org.apache.thrift.protocol.TField("endpoint_details", org.apache.thrift.protocol.TType.LIST, (short)5); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TokenRangeStandardSchemeFactory()); schemes.put(TupleScheme.class, new TokenRangeTupleSchemeFactory()); } public String start_token; // required public String end_token; // required public List<String> endpoints; // required public List<String> rpc_endpoints; // optional public List<EndpointDetails> endpoint_details; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { START_TOKEN((short)1, "start_token"), END_TOKEN((short)2, "end_token"), ENDPOINTS((short)3, "endpoints"), RPC_ENDPOINTS((short)4, "rpc_endpoints"), ENDPOINT_DETAILS((short)5, "endpoint_details"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // START_TOKEN return START_TOKEN; case 2: // END_TOKEN return END_TOKEN; case 3: // ENDPOINTS return ENDPOINTS; case 4: // RPC_ENDPOINTS return RPC_ENDPOINTS; case 5: // ENDPOINT_DETAILS return ENDPOINT_DETAILS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private _Fields optionals[] = {_Fields.RPC_ENDPOINTS,_Fields.ENDPOINT_DETAILS}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.START_TOKEN, new org.apache.thrift.meta_data.FieldMetaData("start_token", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.END_TOKEN, new org.apache.thrift.meta_data.FieldMetaData("end_token", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ENDPOINTS, new org.apache.thrift.meta_data.FieldMetaData("endpoints", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.RPC_ENDPOINTS, new org.apache.thrift.meta_data.FieldMetaData("rpc_endpoints", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.ENDPOINT_DETAILS, new org.apache.thrift.meta_data.FieldMetaData("endpoint_details", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, EndpointDetails.class)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TokenRange.class, metaDataMap); } public TokenRange() { } public TokenRange( String start_token, String end_token, List<String> endpoints) { this(); this.start_token = start_token; this.end_token = end_token; this.endpoints = endpoints; } /** * Performs a deep copy on <i>other</i>. */ public TokenRange(TokenRange other) { if (other.isSetStart_token()) { this.start_token = other.start_token; } if (other.isSetEnd_token()) { this.end_token = other.end_token; } if (other.isSetEndpoints()) { List<String> __this__endpoints = new ArrayList<String>(other.endpoints); this.endpoints = __this__endpoints; } if (other.isSetRpc_endpoints()) { List<String> __this__rpc_endpoints = new ArrayList<String>(other.rpc_endpoints); this.rpc_endpoints = __this__rpc_endpoints; } if (other.isSetEndpoint_details()) { List<EndpointDetails> __this__endpoint_details = new ArrayList<EndpointDetails>(other.endpoint_details.size()); for (EndpointDetails other_element : other.endpoint_details) { __this__endpoint_details.add(new EndpointDetails(other_element)); } this.endpoint_details = __this__endpoint_details; } } public TokenRange deepCopy() { return new TokenRange(this); } @Override public void clear() { this.start_token = null; this.end_token = null; this.endpoints = null; this.rpc_endpoints = null; this.endpoint_details = null; } public String getStart_token() { return this.start_token; } public TokenRange setStart_token(String start_token) { this.start_token = start_token; return this; } public void unsetStart_token() { this.start_token = null; } /** Returns true if field start_token is set (has been assigned a value) and false otherwise */ public boolean isSetStart_token() { return this.start_token != null; } public void setStart_tokenIsSet(boolean value) { if (!value) { this.start_token = null; } } public String getEnd_token() { return this.end_token; } public TokenRange setEnd_token(String end_token) { this.end_token = end_token; return this; } public void unsetEnd_token() { this.end_token = null; } /** Returns true if field end_token is set (has been assigned a value) and false otherwise */ public boolean isSetEnd_token() { return this.end_token != null; } public void setEnd_tokenIsSet(boolean value) { if (!value) { this.end_token = null; } } public int getEndpointsSize() { return (this.endpoints == null) ? 0 : this.endpoints.size(); } public java.util.Iterator<String> getEndpointsIterator() { return (this.endpoints == null) ? null : this.endpoints.iterator(); } public void addToEndpoints(String elem) { if (this.endpoints == null) { this.endpoints = new ArrayList<String>(); } this.endpoints.add(elem); } public List<String> getEndpoints() { return this.endpoints; } public TokenRange setEndpoints(List<String> endpoints) { this.endpoints = endpoints; return this; } public void unsetEndpoints() { this.endpoints = null; } /** Returns true if field endpoints is set (has been assigned a value) and false otherwise */ public boolean isSetEndpoints() { return this.endpoints != null; } public void setEndpointsIsSet(boolean value) { if (!value) { this.endpoints = null; } } public int getRpc_endpointsSize() { return (this.rpc_endpoints == null) ? 0 : this.rpc_endpoints.size(); } public java.util.Iterator<String> getRpc_endpointsIterator() { return (this.rpc_endpoints == null) ? null : this.rpc_endpoints.iterator(); } public void addToRpc_endpoints(String elem) { if (this.rpc_endpoints == null) { this.rpc_endpoints = new ArrayList<String>(); } this.rpc_endpoints.add(elem); } public List<String> getRpc_endpoints() { return this.rpc_endpoints; } public TokenRange setRpc_endpoints(List<String> rpc_endpoints) { this.rpc_endpoints = rpc_endpoints; return this; } public void unsetRpc_endpoints() { this.rpc_endpoints = null; } /** Returns true if field rpc_endpoints is set (has been assigned a value) and false otherwise */ public boolean isSetRpc_endpoints() { return this.rpc_endpoints != null; } public void setRpc_endpointsIsSet(boolean value) { if (!value) { this.rpc_endpoints = null; } } public int getEndpoint_detailsSize() { return (this.endpoint_details == null) ? 0 : this.endpoint_details.size(); } public java.util.Iterator<EndpointDetails> getEndpoint_detailsIterator() { return (this.endpoint_details == null) ? null : this.endpoint_details.iterator(); } public void addToEndpoint_details(EndpointDetails elem) { if (this.endpoint_details == null) { this.endpoint_details = new ArrayList<EndpointDetails>(); } this.endpoint_details.add(elem); } public List<EndpointDetails> getEndpoint_details() { return this.endpoint_details; } public TokenRange setEndpoint_details(List<EndpointDetails> endpoint_details) { this.endpoint_details = endpoint_details; return this; } public void unsetEndpoint_details() { this.endpoint_details = null; } /** Returns true if field endpoint_details is set (has been assigned a value) and false otherwise */ public boolean isSetEndpoint_details() { return this.endpoint_details != null; } public void setEndpoint_detailsIsSet(boolean value) { if (!value) { this.endpoint_details = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case START_TOKEN: if (value == null) { unsetStart_token(); } else { setStart_token((String)value); } break; case END_TOKEN: if (value == null) { unsetEnd_token(); } else { setEnd_token((String)value); } break; case ENDPOINTS: if (value == null) { unsetEndpoints(); } else { setEndpoints((List<String>)value); } break; case RPC_ENDPOINTS: if (value == null) { unsetRpc_endpoints(); } else { setRpc_endpoints((List<String>)value); } break; case ENDPOINT_DETAILS: if (value == null) { unsetEndpoint_details(); } else { setEndpoint_details((List<EndpointDetails>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case START_TOKEN: return getStart_token(); case END_TOKEN: return getEnd_token(); case ENDPOINTS: return getEndpoints(); case RPC_ENDPOINTS: return getRpc_endpoints(); case ENDPOINT_DETAILS: return getEndpoint_details(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case START_TOKEN: return isSetStart_token(); case END_TOKEN: return isSetEnd_token(); case ENDPOINTS: return isSetEndpoints(); case RPC_ENDPOINTS: return isSetRpc_endpoints(); case ENDPOINT_DETAILS: return isSetEndpoint_details(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof TokenRange) return this.equals((TokenRange)that); return false; } public boolean equals(TokenRange that) { if (that == null) return false; boolean this_present_start_token = true && this.isSetStart_token(); boolean that_present_start_token = true && that.isSetStart_token(); if (this_present_start_token || that_present_start_token) { if (!(this_present_start_token && that_present_start_token)) return false; if (!this.start_token.equals(that.start_token)) return false; } boolean this_present_end_token = true && this.isSetEnd_token(); boolean that_present_end_token = true && that.isSetEnd_token(); if (this_present_end_token || that_present_end_token) { if (!(this_present_end_token && that_present_end_token)) return false; if (!this.end_token.equals(that.end_token)) return false; } boolean this_present_endpoints = true && this.isSetEndpoints(); boolean that_present_endpoints = true && that.isSetEndpoints(); if (this_present_endpoints || that_present_endpoints) { if (!(this_present_endpoints && that_present_endpoints)) return false; if (!this.endpoints.equals(that.endpoints)) return false; } boolean this_present_rpc_endpoints = true && this.isSetRpc_endpoints(); boolean that_present_rpc_endpoints = true && that.isSetRpc_endpoints(); if (this_present_rpc_endpoints || that_present_rpc_endpoints) { if (!(this_present_rpc_endpoints && that_present_rpc_endpoints)) return false; if (!this.rpc_endpoints.equals(that.rpc_endpoints)) return false; } boolean this_present_endpoint_details = true && this.isSetEndpoint_details(); boolean that_present_endpoint_details = true && that.isSetEndpoint_details(); if (this_present_endpoint_details || that_present_endpoint_details) { if (!(this_present_endpoint_details && that_present_endpoint_details)) return false; if (!this.endpoint_details.equals(that.endpoint_details)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_start_token = true && (isSetStart_token()); builder.append(present_start_token); if (present_start_token) builder.append(start_token); boolean present_end_token = true && (isSetEnd_token()); builder.append(present_end_token); if (present_end_token) builder.append(end_token); boolean present_endpoints = true && (isSetEndpoints()); builder.append(present_endpoints); if (present_endpoints) builder.append(endpoints); boolean present_rpc_endpoints = true && (isSetRpc_endpoints()); builder.append(present_rpc_endpoints); if (present_rpc_endpoints) builder.append(rpc_endpoints); boolean present_endpoint_details = true && (isSetEndpoint_details()); builder.append(present_endpoint_details); if (present_endpoint_details) builder.append(endpoint_details); return builder.toHashCode(); } @Override public int compareTo(TokenRange other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetStart_token()).compareTo(other.isSetStart_token()); if (lastComparison != 0) { return lastComparison; } if (isSetStart_token()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.start_token, other.start_token); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetEnd_token()).compareTo(other.isSetEnd_token()); if (lastComparison != 0) { return lastComparison; } if (isSetEnd_token()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.end_token, other.end_token); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetEndpoints()).compareTo(other.isSetEndpoints()); if (lastComparison != 0) { return lastComparison; } if (isSetEndpoints()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.endpoints, other.endpoints); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetRpc_endpoints()).compareTo(other.isSetRpc_endpoints()); if (lastComparison != 0) { return lastComparison; } if (isSetRpc_endpoints()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rpc_endpoints, other.rpc_endpoints); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetEndpoint_details()).compareTo(other.isSetEndpoint_details()); if (lastComparison != 0) { return lastComparison; } if (isSetEndpoint_details()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.endpoint_details, other.endpoint_details); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("TokenRange("); boolean first = true; sb.append("start_token:"); if (this.start_token == null) { sb.append("null"); } else { sb.append(this.start_token); } first = false; if (!first) sb.append(", "); sb.append("end_token:"); if (this.end_token == null) { sb.append("null"); } else { sb.append(this.end_token); } first = false; if (!first) sb.append(", "); sb.append("endpoints:"); if (this.endpoints == null) { sb.append("null"); } else { sb.append(this.endpoints); } first = false; if (isSetRpc_endpoints()) { if (!first) sb.append(", "); sb.append("rpc_endpoints:"); if (this.rpc_endpoints == null) { sb.append("null"); } else { sb.append(this.rpc_endpoints); } first = false; } if (isSetEndpoint_details()) { if (!first) sb.append(", "); sb.append("endpoint_details:"); if (this.endpoint_details == null) { sb.append("null"); } else { sb.append(this.endpoint_details); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (start_token == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'start_token' was not present! Struct: " + toString()); } if (end_token == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'end_token' was not present! Struct: " + toString()); } if (endpoints == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'endpoints' was not present! Struct: " + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TokenRangeStandardSchemeFactory implements SchemeFactory { public TokenRangeStandardScheme getScheme() { return new TokenRangeStandardScheme(); } } private static class TokenRangeStandardScheme extends StandardScheme<TokenRange> { public void read(org.apache.thrift.protocol.TProtocol iprot, TokenRange struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // START_TOKEN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.start_token = iprot.readString(); struct.setStart_tokenIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // END_TOKEN if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.end_token = iprot.readString(); struct.setEnd_tokenIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // ENDPOINTS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list48 = iprot.readListBegin(); struct.endpoints = new ArrayList<String>(_list48.size); for (int _i49 = 0; _i49 < _list48.size; ++_i49) { String _elem50; _elem50 = iprot.readString(); struct.endpoints.add(_elem50); } iprot.readListEnd(); } struct.setEndpointsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // RPC_ENDPOINTS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list51 = iprot.readListBegin(); struct.rpc_endpoints = new ArrayList<String>(_list51.size); for (int _i52 = 0; _i52 < _list51.size; ++_i52) { String _elem53; _elem53 = iprot.readString(); struct.rpc_endpoints.add(_elem53); } iprot.readListEnd(); } struct.setRpc_endpointsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // ENDPOINT_DETAILS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list54 = iprot.readListBegin(); struct.endpoint_details = new ArrayList<EndpointDetails>(_list54.size); for (int _i55 = 0; _i55 < _list54.size; ++_i55) { EndpointDetails _elem56; _elem56 = new EndpointDetails(); _elem56.read(iprot); struct.endpoint_details.add(_elem56); } iprot.readListEnd(); } struct.setEndpoint_detailsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TokenRange struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.start_token != null) { oprot.writeFieldBegin(START_TOKEN_FIELD_DESC); oprot.writeString(struct.start_token); oprot.writeFieldEnd(); } if (struct.end_token != null) { oprot.writeFieldBegin(END_TOKEN_FIELD_DESC); oprot.writeString(struct.end_token); oprot.writeFieldEnd(); } if (struct.endpoints != null) { oprot.writeFieldBegin(ENDPOINTS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.endpoints.size())); for (String _iter57 : struct.endpoints) { oprot.writeString(_iter57); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.rpc_endpoints != null) { if (struct.isSetRpc_endpoints()) { oprot.writeFieldBegin(RPC_ENDPOINTS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.rpc_endpoints.size())); for (String _iter58 : struct.rpc_endpoints) { oprot.writeString(_iter58); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.endpoint_details != null) { if (struct.isSetEndpoint_details()) { oprot.writeFieldBegin(ENDPOINT_DETAILS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.endpoint_details.size())); for (EndpointDetails _iter59 : struct.endpoint_details) { _iter59.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TokenRangeTupleSchemeFactory implements SchemeFactory { public TokenRangeTupleScheme getScheme() { return new TokenRangeTupleScheme(); } } private static class TokenRangeTupleScheme extends TupleScheme<TokenRange> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TokenRange struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeString(struct.start_token); oprot.writeString(struct.end_token); { oprot.writeI32(struct.endpoints.size()); for (String _iter60 : struct.endpoints) { oprot.writeString(_iter60); } } BitSet optionals = new BitSet(); if (struct.isSetRpc_endpoints()) { optionals.set(0); } if (struct.isSetEndpoint_details()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.isSetRpc_endpoints()) { { oprot.writeI32(struct.rpc_endpoints.size()); for (String _iter61 : struct.rpc_endpoints) { oprot.writeString(_iter61); } } } if (struct.isSetEndpoint_details()) { { oprot.writeI32(struct.endpoint_details.size()); for (EndpointDetails _iter62 : struct.endpoint_details) { _iter62.write(oprot); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TokenRange struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.start_token = iprot.readString(); struct.setStart_tokenIsSet(true); struct.end_token = iprot.readString(); struct.setEnd_tokenIsSet(true); { org.apache.thrift.protocol.TList _list63 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.endpoints = new ArrayList<String>(_list63.size); for (int _i64 = 0; _i64 < _list63.size; ++_i64) { String _elem65; _elem65 = iprot.readString(); struct.endpoints.add(_elem65); } } struct.setEndpointsIsSet(true); BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list66 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.rpc_endpoints = new ArrayList<String>(_list66.size); for (int _i67 = 0; _i67 < _list66.size; ++_i67) { String _elem68; _elem68 = iprot.readString(); struct.rpc_endpoints.add(_elem68); } } struct.setRpc_endpointsIsSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list69 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.endpoint_details = new ArrayList<EndpointDetails>(_list69.size); for (int _i70 = 0; _i70 < _list69.size; ++_i70) { EndpointDetails _elem71; _elem71 = new EndpointDetails(); _elem71.read(iprot); struct.endpoint_details.add(_elem71); } } struct.setEndpoint_detailsIsSet(true); } } } }
apache-2.0
AfricaRegex/SjcProduct
SjcProject/src/com/sjc/cc/service/service/impl/DBInstanceServiceImpl.java
2075
package com.sjc.cc.service.service.impl; import java.util.List; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.Restrictions; import com.sjc.cc.base.service.AbstractBusinessService; import com.sjc.cc.entity.TccDbInstance; import com.sjc.cc.service.service.DBInstanceService; public class DBInstanceServiceImpl extends AbstractBusinessService implements DBInstanceService { private static final long serialVersionUID = 6898341081172805401L; @Override public Long saveDBInstance(TccDbInstance dbInstance) { return commonDao.save(dbInstance); } @Override public void removeDBInstance(TccDbInstance dbInstance) { commonDao.delete(dbInstance); } @Override public void updateDBInstance(TccDbInstance dbInstance) { commonDao.update(dbInstance); } @Override public TccDbInstance getDBInstance(Long instanceId) { return (TccDbInstance) commonDao.get(TccDbInstance.class, instanceId); } @SuppressWarnings("unchecked") @Override public List<TccDbInstance> getDBInstanceList(TccDbInstance dbInstance) { DetachedCriteria criteria = DetachedCriteria .forClass(TccDbInstance.class); if (null != dbInstance && null != dbInstance.getInstanceId()) { criteria.add(Restrictions.eq("instanceId", dbInstance.getInstanceId())); } if (null != dbInstance && null != dbInstance.getInstanceName()) { criteria.add(Restrictions.like("instanceName", "%" + dbInstance.getInstanceName() + "%")); } if (null != dbInstance && null != dbInstance.getInstanceEnconding()) { criteria.add(Restrictions.eq("instanceEnconding", dbInstance.getInstanceEnconding())); } return commonDao.findByCriteria(criteria); } @Override public List<TccDbInstance> getDBInstanceAll() { return getDBInstanceList(null); } }
apache-2.0
azusa/hatunatu
hatunatu-util/src/main/java/jp/fieldnotes/hatunatu/util/beans/BeanDesc.java
10357
/* * Copyright 2004-2012 the Seasar Foundation and the Others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package jp.fieldnotes.hatunatu.util.beans; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Map; import jp.fieldnotes.hatunatu.api.beans.FieldDesc; import jp.fieldnotes.hatunatu.api.beans.MethodDesc; import jp.fieldnotes.hatunatu.api.beans.PropertyDesc; import jp.fieldnotes.hatunatu.util.beans.factory.BeanDescFactory; /** * JavaBeansのメタデータを扱うためのインターフェースです。 * <p> * {@link jp.fieldnotes.hatunatu.api.beans.BeanDesc}のインスタンスは{@link BeanDescFactory}から取得します。 * <p> * * <pre> * BeanDesc beanDesc = BeanDescFactory.getBeanDesc(Foo.class); * </pre> * <p> * 取得した{@link jp.fieldnotes.hatunatu.api.beans.BeanDesc}から,対象となるJavaBeansのプロパティやフィールド、コンストラクタ、メソッドのメタデータを取得できます。 * </p> * * <pre> * for (PropertyDesc propertyDesc : beanDesc.getPropertyDescs()) { * propertyDesc.getValue(foo); // Foo のプロパティの値を取得 * } * * for (FieldDesc fieldDesc : beanDesc.getFieldDescs()) { * fieldDesc.getFileldValue(foo); // Foo のフィールドの値を取得 * } * * for (ConstructorDesc constructorDesc : beanDesc.getConstructorDescs()) { * constructorDesc.newInstance(...); // Foo のインスタンスを生成 * } * * for (String methodName : beanDesc.getMethodNames()) { * for (MethodDesc methodDesc : beanDesc.getMethodDescs(methodName)) { * methodDesc.invoke(foo, ...); // Foo のメソッドを起動 * } * } * </pre> * * @author higa * @see BeanDescFactory */ public interface BeanDesc { /** * Beanのクラスを返します。 * * @param <T> * Beanのクラス * @return Beanのクラス */ <T> Class<T> getBeanClass(); /** * 型変数から型引数へのマップを返します。 * * @return 型変数から型引数へのマップ */ Map<TypeVariable<?>, Type> getTypeVariables(); /** * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}を持っているかどうかを返します。 * * @param propertyName * プロパティ名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}を持っているかどうか */ boolean hasPropertyDesc(String propertyName); /** * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}を返します。 * * @param propertyName * プロパティ名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc} */ jp.fieldnotes.hatunatu.api.beans.PropertyDesc getPropertyDesc(String propertyName); /** * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}を返します。 * * @param index * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}のインデックス * @return {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc} */ jp.fieldnotes.hatunatu.api.beans.PropertyDesc getPropertyDesc(int index); /** * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}の数を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}の数 */ int getPropertyDescSize(); /** * {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}の{@link Iterable}を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.PropertyDesc}の{@link Iterable} */ Iterable<PropertyDesc> getPropertyDescs(); /** * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}を持っているかどうかを返します。 * * @param fieldName * フィールド名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}を持っているかどうか */ boolean hasFieldDesc(String fieldName); /** * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}を返します。 * * @param fieldName * フィールド名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc} */ jp.fieldnotes.hatunatu.api.beans.FieldDesc getFieldDesc(String fieldName); /** * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}を返します。 * * @param index * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}のインデックス * @return {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc} */ jp.fieldnotes.hatunatu.api.beans.FieldDesc getFieldDesc(int index); /** * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}の数を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}の数 */ int getFieldDescSize(); /** * {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}の{@link Iterable}を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.FieldDesc}の{@link Iterable} */ Iterable<FieldDesc> getFieldDescs(); /** * 新しいインスタンスを作成します。 * * @param <T> * Beanクラスの型 * @param args * コンストラクタに渡す引数の並び * @return 新しいインスタンス */ <T> T newInstance(Object... args); /** * 引数の型に応じた{@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}を返します。 * * @param paramTypes * コンストラクタに渡す引数型の並び * @return 引数の型に応じた{@link ConstructorDesc} */ jp.fieldnotes.hatunatu.api.beans.ConstructorDesc getConstructorDesc(Class<?>... paramTypes); /** * 引数に適合する{@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}を返します。 * * @param args * コンストラクタに渡す引数の並び * @return 引数に適合する{@link Constructor} */ jp.fieldnotes.hatunatu.api.beans.ConstructorDesc getSuitableConstructorDesc(Object... args); /** * {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}を返します。 * * @param index * {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}のインデックス * @return {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc} */ jp.fieldnotes.hatunatu.api.beans.ConstructorDesc getConstructorDesc(int index); /** * {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}の数を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}の数 */ int getConstructorDescSize(); /** * {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}の{@link Iterable}を返します。 * * @return {@link jp.fieldnotes.hatunatu.api.beans.ConstructorDesc}の{@link Iterable} */ Iterable<jp.fieldnotes.hatunatu.api.beans.ConstructorDesc> getConstructorDescs(); /** * 引数の型に応じた{@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}を返します。 * * @param methodName * メソッド名。{@literal null}や空文字列であってはいけません * @param paramTypes * メソッドの引数型の並び * @return 引数の型に応じた{@link MethodDesc} メソッド */ jp.fieldnotes.hatunatu.api.beans.MethodDesc getMethodDesc(String methodName, Class<?>... paramTypes); /** * 引数の型に応じた{@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}を返します。見つからない場合は、{@literal null}を返します。 * * @param methodName * メソッド名。{@literal null}や空文字列であってはいけません * @param paramTypes * メソッドの引数型の並び * @return 引数の型に応じた{@link MethodDesc} */ jp.fieldnotes.hatunatu.api.beans.MethodDesc getMethodDescNoException(String methodName, Class<?>... paramTypes); /** * 引数に適合する{@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}を返します。 * * @param methodName * メソッド名。{@literal null}や空文字列であってはいけません * @param args * メソッドの引数の並び * @return 引数に適合する{@link MethodDesc} メソッド */ jp.fieldnotes.hatunatu.api.beans.MethodDesc getSuitableMethodDesc(String methodName, Object... args); /** * {@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}があるかどうか返します。 * * @param methodName * メソッド名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}があるかどうか */ boolean hasMethodDesc(String methodName); /** * {@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}の配列を返します。 * * @param methodName * メソッド名。{@literal null}や空文字列であってはいけません * @return {@link jp.fieldnotes.hatunatu.api.beans.MethodDesc}の配列 */ MethodDesc[] getMethodDescs(String methodName); /** * メソッド名の配列を返します。 * * @return メソッド名の配列 */ String[] getMethodNames(); }
apache-2.0
christian-posta/activemq-apollo-java-port
apollo-broker/src/main/java/org/apache/activemq/apollo/broker/web/JettyWebServer.java
16996
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.apollo.broker.web; import org.apache.activemq.apollo.broker.Broker; import org.apache.activemq.apollo.dto.BrokerDTO; import org.apache.activemq.apollo.dto.WebAdminDTO; import org.apache.activemq.apollo.util.BaseService; import org.apache.activemq.apollo.util.FileSupport; import org.apache.activemq.apollo.util.URISupport; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.server.nio.SelectChannelConnector; import org.eclipse.jetty.server.ssl.SslSelectChannelConnector; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.util.log.Slf4jLog; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.ExecutorThreadPool; import org.eclipse.jetty.webapp.WebAppContext; import org.fusesource.hawtdispatch.Dispatch; import org.fusesource.hawtdispatch.Task; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import javax.servlet.*; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; import static org.apache.activemq.apollo.util.FunctionUtils.stripPrefix; import static org.apache.activemq.apollo.util.FunctionUtils.stripSuffix; /** * @author <a href="http://www.christianposta.com/blog">Christian Posta</a> */ public class JettyWebServer extends BaseService implements WebServer { private Logger LOG = LoggerFactory.getLogger(getClass().getName()); private Broker broker; private Server server; private List<WebAdminDTO> webAdmins; private List<URI> uriAddresses; public JettyWebServer(Broker broker) { super(Dispatch.createQueue()); this.broker = broker; } @Override public void update(final Task onComplete) { List<WebAdminDTO> newList = broker.getConfig().web_admins; if (newList != webAdmins) { // restart to pickup the changes! stop(new Task() { @Override public void run() { start(onComplete); } }); }else { onComplete.run(); } } @Override public URI[] uris() { return (URI[]) this.uriAddresses.toArray(); } @Override protected void _start(final Task onCompleted) { Broker.BLOCKABLE_THREAD_POOL.execute(new Task() { @Override public void run() { synchronized (this) { // Explicitly set the Jetty Log impl to avoid // the NPE raised at https://issues.apache.org/jira/browse/APLO-264 try { org.eclipse.jetty.util.log.Log.setLog(new Slf4jLog()); } catch (Exception e) { // ignore this... } BrokerDTO config = broker.getConfig(); File webappPath = webapp(broker.getTmp()); if (webappPath == null) { LOG.warn("Administration interface cannot be started: webapp resources not found"); } else { // start up the admin interface LOG.debug("Starting administration interface"); if (broker.getTmp() != null) { try { System.setProperty("scalate.workdir", new File(broker.getTmp(), "scalate").getCanonicalPath()); } catch (IOException e) { LOG.warn("Could not set scalate workdir. Admin interface may not function properly"); } } HashMap<String, Handler> contexts = new HashMap<String, Handler>(); HashMap<String, Connector> connectors = new HashMap<String, Connector>(); webAdmins = config.web_admins; for (WebAdminDTO webAdmin : webAdmins) { String bind = webAdmin.bind == null ? "http://127.0.0.1:61680" : webAdmin.bind; URI bindUri; try { bindUri = new URI(bind); } catch (URISyntaxException e) { throw new RuntimeException(e); } String prefix = "/" + stripPrefix(bindUri.getPath(), "/"); String scheme = bindUri.getScheme(); String host = bindUri.getHost(); int port = bindUri.getPort(); Map<String, String> query = null; try { query = URISupport.parseQuery(bindUri.getQuery()); } catch (URISyntaxException e) { throw new RuntimeException(e); } String corsOrigin = query.get("cors_origin"); if (port == -1) { if (scheme.equals("http")) { port = 80; }else if (scheme.equals("https")) { port = 433; }else { throw new RuntimeException("Invalid 'web_admin' bind setting. Protocol scheme should be 'http' or 'https'"); } } // Only add the connector if has not been added yet String connectorId = scheme + "://" + host + ":" + port; if (!connectors.containsKey(connectorId)) { Connector connector = getConnector(scheme); connector.setHost(host); connector.setPort(port); connectors.put(connectorId, connector); } // only add the app context if not yet added... if (!contexts.containsKey(prefix)) { WebAppContext context = new WebAppContext(); context.setContextPath(prefix); try { context.setWar(webappPath.getCanonicalPath()); } catch (IOException e) { throw new RuntimeException(e); } context.setClassLoader(Broker.classLoader()); EnumSet<DispatcherType> ALL = EnumSet.allOf(DispatcherType.class); if (corsOrigin != null && !corsOrigin.trim().isEmpty()) { Set<String> origins = resolveOrigins(corsOrigin); context.addFilter(new FilterHolder(new AllowAnyOriginFilter(origins)), "/*", ALL); } context.addFilter(new FilterHolder(new Filter() { @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void destroy() { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { request.setAttribute("APOLLO_BROKER", broker); chain.doFilter(request, response); } }), "/*", ALL); if (broker.getTmp() != null) { context.setTempDirectory(broker.getTmp()); } contexts.put(prefix, context); } } HandlerList contextList = new HandlerList(); for (Handler h : contexts.values()) { contextList.addHandler(h); } server = new Server(); server.setHandler(contextList); server.setConnectors((Connector[]) connectors.values().toArray()); server.setThreadPool(new ExecutorThreadPool(Broker.BLOCKABLE_THREAD_POOL)); try { server.start(); } catch (Exception e) { throw new RuntimeException(e); } Iterator<String> iter = contexts.keySet().iterator(); for (Connector c : connectors.values()) { int localPort = c.getLocalPort(); String prefix = iter.next(); String scheme = c instanceof SslSelectChannelConnector ? "https" : "http"; try { URI uri = new URI(scheme, null, c.getHost(), localPort, prefix, null, null); broker.getConsoleLog().info("Administration interface available at: {}", uri); uriAddresses.add(uri); } catch (URISyntaxException e) { throw new RuntimeException(e); } } } onCompleted.run(); } } }); } private Set<String> resolveOrigins(String corsOrigin) { String[] origins = corsOrigin.split(","); LinkedHashSet<String> rc = new LinkedHashSet<String>(); for (String s : origins) { rc.add(s.trim()); } return rc; } private Connector getConnector(String scheme) { if (scheme.equals("http")) { return new SelectChannelConnector(); }else { SSLContext sslContext; if (broker.getKeyStorage() != null) { String protocol = "TLS"; try { sslContext = SSLContext.getInstance(protocol); sslContext.init(broker.getKeyStorage().createKeyManagers(), broker.getKeyStorage().createTrustManagers(), null); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } catch (KeyManagementException e) { throw new RuntimeException(e); } } else { try { sslContext = SSLContext.getDefault(); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } SslSelectChannelConnector rc = new SslSelectChannelConnector(); SslContextFactory factory = rc.getSslContextFactory(); factory.setSslContext(sslContext); factory.setWantClientAuth(true); return rc; } } private static File webapp(File tmp) { ClassLoader classLoader = JettyWebServer.class.getClassLoader(); File rc = null; if (System.getProperty("apollo.webapp") != null) { rc = new File(System.getProperty("apollo.webapp")); } else { try { // so the apollo-web module will put together this jar file by default and put in this location... // but it could be elsewhere on the classpath in other set ups Enumeration<URL> resources = classLoader.getResources("META-INF/services/org.apache.activemq.apollo/webapp-resources.jar"); while (resources.hasMoreElements()) { URL url = resources.nextElement(); rc = new File(tmp, "webapp-resources"); rc.mkdirs(); explodeWebappResources(rc, url); } } catch (IOException e) { // no worries, just continue on } // the war might be on the classpath... if (rc == null) { String bootClazz = "org/apache/activemq/apollo/web/Boot.class"; URL url = classLoader.getResource(bootClazz); if (rc == null) { rc = null; } else { if (url.getProtocol().equals("file")) { // we are probably being run from an IDE :) File classesDir = new File(stripSuffix(url.getFile(), "/" + bootClazz)); if (new File(classesDir, "../../src/main/webapp").isDirectory()) { rc = new File(classesDir, "../../src/main/webapp"); }else if (new File(classesDir, "../../apollo-web/src/main/webapp").isDirectory()) { rc = new File(classesDir, "../../apollo-web/src/main/webapp"); } else { rc = null; } } else { rc = null; } } } } return rc; } private static void explodeWebappResources(File rc, URL url) throws IOException { JarInputStream is = null; try { is = new JarInputStream(url.openStream()); JarEntry entry = is.getNextJarEntry(); while (entry != null) { // if we found a dir, create the dir.. if (entry.isDirectory()) { new File(rc, entry.getName()).mkdirs(); }else { // otherwise, it's a file, and we should explode it // a little nasty with all these nested try-finally... FileOutputStream fos = null; try { fos = new FileOutputStream(new File(rc, entry.getName())); FileSupport.copy(is, fos); }finally { if (fos != null) { fos.close(); } } } } }finally { if (is != null) { is.close(); } } } @Override protected void _stop(final Task onCompleted) { // jetty operations can block, so don't do them on hawtdispatch threads... // but we'll also need to resort to traditional mutex synchronization Broker.BLOCKABLE_THREAD_POOL.execute(new Task() { @Override public void run() { synchronized (this) { if (server != null) { try { server.stop(); } catch (Exception e) { LOG.info("Could not shutdown jetty server properly when stopping the web server", e); } server = null; uriAddresses = null; } onCompleted.run(); } } }); } @Override public String toString() { return "jetty webserver"; } }
apache-2.0
chrishumphreys/provocateur
provocateur-thirdparty/src/main/java/org/targettest/org/apache/lucene/search/QueryTermVector.java
4524
package org.targettest.org.apache.lucene.search; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.targettest.org.apache.lucene.analysis.Analyzer; import org.targettest.org.apache.lucene.analysis.TokenStream; import org.targettest.org.apache.lucene.analysis.tokenattributes.TermAttribute; import org.targettest.org.apache.lucene.index.TermFreqVector; /** * * **/ public class QueryTermVector implements TermFreqVector { private String [] terms = new String[0]; private int [] termFreqs = new int[0]; public String getField() { return null; } /** * * @param queryTerms The original list of terms from the query, can contain duplicates */ public QueryTermVector(String [] queryTerms) { processTerms(queryTerms); } public QueryTermVector(String queryString, Analyzer analyzer) { if (analyzer != null) { TokenStream stream = analyzer.tokenStream("", new StringReader(queryString)); if (stream != null) { List<String> terms = new ArrayList<String>(); try { boolean hasMoreTokens = false; stream.reset(); TermAttribute termAtt = stream.addAttribute(TermAttribute.class); hasMoreTokens = stream.incrementToken(); while (hasMoreTokens) { terms.add(termAtt.term()); hasMoreTokens = stream.incrementToken(); } processTerms(terms.toArray(new String[terms.size()])); } catch (IOException e) { } } } } private void processTerms(String[] queryTerms) { if (queryTerms != null) { Arrays.sort(queryTerms); Map<String,Integer> tmpSet = new HashMap<String,Integer>(queryTerms.length); //filter out duplicates List<String> tmpList = new ArrayList<String>(queryTerms.length); List<Integer> tmpFreqs = new ArrayList<Integer>(queryTerms.length); int j = 0; for (int i = 0; i < queryTerms.length; i++) { String term = queryTerms[i]; Integer position = tmpSet.get(term); if (position == null) { tmpSet.put(term, Integer.valueOf(j++)); tmpList.add(term); tmpFreqs.add(Integer.valueOf(1)); } else { Integer integer = tmpFreqs.get(position.intValue()); tmpFreqs.set(position.intValue(), Integer.valueOf(integer.intValue() + 1)); } } terms = tmpList.toArray(terms); //termFreqs = (int[])tmpFreqs.toArray(termFreqs); termFreqs = new int[tmpFreqs.size()]; int i = 0; for (final Integer integer : tmpFreqs) { termFreqs[i++] = integer.intValue(); } } } @Override public final String toString() { StringBuilder sb = new StringBuilder(); sb.append('{'); for (int i=0; i<terms.length; i++) { if (i>0) sb.append(", "); sb.append(terms[i]).append('/').append(termFreqs[i]); } sb.append('}'); return sb.toString(); } public int size() { return terms.length; } public String[] getTerms() { return terms; } public int[] getTermFrequencies() { return termFreqs; } public int indexOf(String term) { int res = Arrays.binarySearch(terms, term); return res >= 0 ? res : -1; } public int[] indexesOf(String[] terms, int start, int len) { int res[] = new int[len]; for (int i=0; i < len; i++) { res[i] = indexOf(terms[i]); } return res; } }
apache-2.0
willemsrb/sonar-rci-plugin
src/test/java/nl/futureedge/sonar/plugin/rci/RciPropertiesTest.java
3489
package nl.futureedge.sonar.plugin.rci; import org.junit.Assert; import org.junit.Test; import org.sonar.api.ce.measure.test.TestSettings; public class RciPropertiesTest { @Test public void testWeigths() { final TestSettings settings = new TestSettings(); settings.setValue(RciProperties.WEIGTHS_KEY, "40,30,20,10,5,3"); RciWeights weights = RciProperties.getWeights(settings); Assert.assertEquals(40, weights.getBlocker()); Assert.assertEquals(30, weights.getCritical()); Assert.assertEquals(20, weights.getMajor()); Assert.assertEquals(10, weights.getMinor()); Assert.assertEquals(5, weights.getInfo()); settings.setValue(RciProperties.WEIGTHS_KEY, "40,30"); weights = RciProperties.getWeights(settings); Assert.assertEquals(40, weights.getBlocker()); Assert.assertEquals(30, weights.getCritical()); Assert.assertEquals(0, weights.getMajor()); Assert.assertEquals(0, weights.getMinor()); Assert.assertEquals(0, weights.getInfo()); settings.setValue(RciProperties.WEIGTHS_KEY, ""); weights = RciProperties.getWeights(settings); Assert.assertEquals(0, weights.getBlocker()); Assert.assertEquals(0, weights.getCritical()); Assert.assertEquals(0, weights.getMajor()); Assert.assertEquals(0, weights.getMinor()); Assert.assertEquals(0, weights.getInfo()); settings.setValue(RciProperties.WEIGTHS_KEY, null); weights = RciProperties.getWeights(settings); Assert.assertEquals(0, weights.getBlocker()); Assert.assertEquals(0, weights.getCritical()); Assert.assertEquals(0, weights.getMajor()); Assert.assertEquals(0, weights.getMinor()); Assert.assertEquals(0, weights.getInfo()); } @Test public void testRatings() { final TestSettings settings = new TestSettings(); settings.setValue(RciProperties.RATINGS_KEY, "40,30,20,10,5,3"); RciRating rating = RciProperties.getRating(settings); Assert.assertEquals(1, rating.getRating(45)); Assert.assertEquals(2, rating.getRating(35)); Assert.assertEquals(3, rating.getRating(25)); Assert.assertEquals(4, rating.getRating(15)); Assert.assertEquals(5, rating.getRating(5)); settings.setValue(RciProperties.RATINGS_KEY, "40,30"); rating = RciProperties.getRating(settings); Assert.assertEquals(1, rating.getRating(45)); Assert.assertEquals(2, rating.getRating(35)); Assert.assertEquals(3, rating.getRating(25)); Assert.assertEquals(3, rating.getRating(15)); Assert.assertEquals(3, rating.getRating(5)); settings.setValue(RciProperties.RATINGS_KEY, ""); rating = RciProperties.getRating(settings); Assert.assertEquals(1, rating.getRating(45)); Assert.assertEquals(1, rating.getRating(35)); Assert.assertEquals(1, rating.getRating(25)); Assert.assertEquals(1, rating.getRating(15)); Assert.assertEquals(1, rating.getRating(5)); settings.setValue(RciProperties.RATINGS_KEY, null); rating = RciProperties.getRating(settings); Assert.assertEquals(1, rating.getRating(45)); Assert.assertEquals(1, rating.getRating(35)); Assert.assertEquals(1, rating.getRating(25)); Assert.assertEquals(1, rating.getRating(15)); Assert.assertEquals(1, rating.getRating(5)); } }
apache-2.0
stevenhva/InfoLearn_OpenOLAT
src/main/java/org/olat/ims/qti/editor/ChoiceItemController.java
11733
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <hr> * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * This file has been modified by the OpenOLAT community. Changes are licensed * under the Apache 2.0 license as the original file. */ package org.olat.ims.qti.editor; import java.util.List; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.Component; import org.olat.core.gui.components.velocity.VelocityContainer; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.ControllerEventListener; import org.olat.core.gui.control.DefaultController; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController; import org.olat.core.gui.control.generic.dialog.DialogController; import org.olat.core.gui.translator.Translator; import org.olat.core.util.Util; import org.olat.ims.qti.editor.beecom.objects.ChoiceQuestion; import org.olat.ims.qti.editor.beecom.objects.ChoiceResponse; import org.olat.ims.qti.editor.beecom.objects.Item; import org.olat.ims.qti.editor.beecom.objects.Material; import org.olat.ims.qti.editor.beecom.objects.Mattext; import org.olat.ims.qti.editor.beecom.objects.Question; import org.olat.ims.qti.editor.beecom.objects.Response; /** * Initial Date: Oct 21, 2004 <br> * * @author mike */ public class ChoiceItemController extends DefaultController implements ControllerEventListener { /* * Logging, Velocity */ private static final String PACKAGE = Util.getPackageName(ChoiceItemController.class); private static final String VC_ROOT = Util.getPackageVelocityRoot(PACKAGE); private VelocityContainer main; private Translator trnsltr; private Item item; private QTIEditorPackage qtiPackage; private DialogController delYesNoCtrl; private boolean restrictedEdit; private Material editQuestion; private Response editResponse; private CloseableModalController dialogCtr; private MaterialFormController matFormCtr; /** * @param item * @param qtiPackage * @param trnsltr * @param wControl */ public ChoiceItemController(Item item, QTIEditorPackage qtiPackage, Translator trnsltr, WindowControl wControl, boolean restrictedEdit) { super(wControl); this.restrictedEdit = restrictedEdit; this.item = item; this.qtiPackage = qtiPackage; this.trnsltr = trnsltr; main = new VelocityContainer("scitem", VC_ROOT + "/tab_scItem.html", trnsltr, this); main.contextPut("question", item.getQuestion()); main.contextPut("isSurveyMode", qtiPackage.getQTIDocument().isSurvey() ? "true" : "false"); main.contextPut("isRestrictedEdit", restrictedEdit ? Boolean.TRUE : Boolean.FALSE); main.contextPut("mediaBaseURL", qtiPackage.getMediaBaseURL()); if (item.getQuestion().getType() == Question.TYPE_MC) main.setPage(VC_ROOT + "/tab_mcItem.html"); else if (item.getQuestion().getType() == Question.TYPE_KPRIM) main.setPage(VC_ROOT + "/tab_kprimItem.html"); setInitialComponent(main); } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, * org.olat.core.gui.components.Component, org.olat.core.gui.control.Event) */ public void event(UserRequest ureq, Component source, Event event) { if (source == main) { // olat::: as: improve easy fix since almost all operations change the main vc. main.setDirty(true); String cmd = event.getCommand(); String sPosid = ureq.getParameter("posid"); int posid = 0; if (sPosid != null) posid = Integer.parseInt(sPosid); if (cmd.equals("up")) { if (posid > 0) { List<Response> elements = item.getQuestion().getResponses(); Response obj = elements.remove(posid); elements.add(posid - 1, obj); } } else if (cmd.equals("down")) { List<Response> elements = item.getQuestion().getResponses(); if (posid < elements.size() - 1) { Response obj = elements.remove(posid); elements.add(posid + 1, obj); } } else if (cmd.equals("editq")) { editQuestion = item.getQuestion().getQuestion(); displayMaterialFormController(ureq, editQuestion, restrictedEdit); } else if (cmd.equals("editr")) { editResponse = ((Response) item.getQuestion().getResponses().get(posid)); Material responseMat = editResponse.getContent(); displayMaterialFormController(ureq, responseMat, restrictedEdit); } else if (cmd.equals("addchoice")) { ChoiceQuestion question = (ChoiceQuestion) item.getQuestion(); List<Response> choices = question.getResponses(); ChoiceResponse newChoice = new ChoiceResponse(); newChoice.getContent().add(new Mattext(trnsltr.translate("newresponsetext"))); newChoice.setCorrect(false); newChoice.setPoints(-1f); // default value is negative to make sure // people understand the meaning of this value choices.add(newChoice); } else if (cmd.equals("del")) { delYesNoCtrl = DialogController.createYesNoDialogController(getWindowControl(), ureq.getLocale(), trnsltr.translate("confirm.delete.element"), this, new Integer(posid)); getWindowControl().pushAsModalDialog( delYesNoCtrl.getInitialComponent()); } else if (cmd.equals("ssc")) { // submit sc ChoiceQuestion question = (ChoiceQuestion) item.getQuestion(); List<Response> q_choices = question.getResponses(); String correctChoice = ureq.getParameter("correctChoice"); for (int i = 0; i < q_choices.size(); i++) { ChoiceResponse choice = (ChoiceResponse) q_choices.get(i); if (correctChoice != null && correctChoice.equals("value_q" + i)) { choice.setCorrect(true); } else { choice.setCorrect(false); } choice.setPoints(ureq.getParameter("points_q" + i)); } String score = ureq.getParameter("single_score"); float sc; try { sc = Float.parseFloat(score); if(sc <= 0.0001f) { getWindowControl().setWarning(trnsltr.translate("editor.info.mc.zero.points")); } } catch(Exception e) { getWindowControl().setWarning(trnsltr.translate("editor.info.mc.zero.points")); sc = 1.0f; } question.setSingleCorrectScore(sc); } else if (cmd.equals("smc")) { // submit mc ChoiceQuestion question = (ChoiceQuestion) item.getQuestion(); List<Response> choices = question.getResponses(); boolean hasZeroPointChoice = false; for (int i = 0; i < choices.size(); i++) { ChoiceResponse choice = (ChoiceResponse) choices.get(i); if (ureq.getParameter("value_q" + i) != null && ureq.getParameter("value_q" + i).equalsIgnoreCase("true")) { choice.setCorrect(true); } else { choice.setCorrect(false); } choice.setPoints(ureq.getParameter("points_q" + i)); if (choice.getPoints() == 0) hasZeroPointChoice = true; } if (hasZeroPointChoice && !question.isSingleCorrect()) { getWindowControl().setInfo(trnsltr.translate("editor.info.mc.zero.points")); } // set min/max before single_correct score // will be corrected by single_correct score afterwards question.setMinValue(ureq.getParameter("min_value")); question.setMaxValue(ureq.getParameter("max_value")); question.setSingleCorrect(ureq.getParameter("valuation_method").equals("single")); if (question.isSingleCorrect()) question.setSingleCorrectScore(ureq.getParameter("single_score")); else question.setSingleCorrectScore(0); } else if (cmd.equals("skprim")) { // submit kprim float maxValue = 0; try { maxValue = Float.parseFloat(ureq.getParameter("max_value")); } catch (NumberFormatException e) { // invalid input, set maxValue 0 } ChoiceQuestion question = (ChoiceQuestion) item.getQuestion(); List<Response> q_choices = question.getResponses(); for (int i = 0; i < q_choices.size(); i++) { String correctChoice = ureq.getParameter("correctChoice_q" + i); ChoiceResponse choice = (ChoiceResponse) q_choices.get(i); choice.setPoints(maxValue / 4); if ("correct".equals(correctChoice)) { choice.setCorrect(true); } else { choice.setCorrect(false); } } question.setMaxValue(maxValue); } qtiPackage.serializeQTIDocument(); } } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, * org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event) */ public void event(UserRequest ureq, Controller controller, Event event) { if (controller == matFormCtr) { if (event instanceof QTIObjectBeforeChangeEvent) { QTIObjectBeforeChangeEvent qobce = (QTIObjectBeforeChangeEvent) event; NodeBeforeChangeEvent nce = new NodeBeforeChangeEvent(); if (editQuestion != null) { nce.setNewQuestionMaterial(qobce.getContent()); nce.setItemIdent(item.getIdent()); nce.setQuestionIdent(editQuestion.getId()); nce.setMatIdent(qobce.getId()); fireEvent(ureq, nce); } else if (editResponse != null) { nce.setNewResponseMaterial(qobce.getContent()); nce.setItemIdent(item.getIdent()); nce.setResponseIdent(editResponse.getIdent()); nce.setMatIdent(qobce.getId()); fireEvent(ureq, nce); } } else if (event == Event.DONE_EVENT || event == Event.CANCELLED_EVENT) { if (event == Event.DONE_EVENT) { // serialize document qtiPackage.serializeQTIDocument(); // force rerendering of view main.setDirty(true); editQuestion = null; editResponse = null; } // dispose controllers dialogCtr.deactivate(); dialogCtr.dispose(); dialogCtr = null; matFormCtr.dispose(); matFormCtr = null; } } else if (controller == dialogCtr) { if (event == Event.CANCELLED_EVENT) { dialogCtr.dispose(); dialogCtr = null; matFormCtr.dispose(); matFormCtr = null; } } else if (controller == delYesNoCtrl) { getWindowControl().pop(); if (event == DialogController.EVENT_FIRSTBUTTON) { item.getQuestion().getResponses().remove(((Integer) delYesNoCtrl.getUserObject()).intValue()); main.setDirty(true);//repaint } } } /** * Displays the MaterialFormController in a closable box. * * @param ureq * @param mat * @param isRestrictedEditMode */ private void displayMaterialFormController(UserRequest ureq, Material mat, boolean isRestrictedEditMode) { matFormCtr = new MaterialFormController(ureq, getWindowControl(), mat, qtiPackage, isRestrictedEditMode); matFormCtr.addControllerListener(this); dialogCtr = new CloseableModalController(getWindowControl(), "close", matFormCtr.getInitialComponent()); matFormCtr.addControllerListener(dialogCtr); dialogCtr.activate(); } /** * @see org.olat.core.gui.control.DefaultController#doDispose(boolean) */ protected void doDispose() { main = null; item = null; trnsltr = null; if (dialogCtr != null) { dialogCtr.dispose(); dialogCtr = null; } if (matFormCtr != null) { matFormCtr.dispose(); matFormCtr = null; } } }
apache-2.0
bshp/midPoint
infra/prism-api/src/main/java/com/evolveum/midpoint/prism/ItemFactory.java
2837
/* * Copyright (c) 2010-2018 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.prism; import com.evolveum.midpoint.prism.path.ItemName; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.prism.xnode.XNode; import com.evolveum.midpoint.util.annotation.Experimental; import com.evolveum.midpoint.util.exception.SchemaException; import javax.xml.namespace.QName; /** * Factory for items (property, reference, container, object) and item values. * * Eliminates the need of calls like "new PrismPropertyValue(...)" in midPoint 3.x. */ public interface ItemFactory { PrismValue createValue(Object realValue); <T> PrismProperty<T> createProperty(QName itemName); <T> PrismProperty<T> createProperty(QName itemName, PrismPropertyDefinition<T> definition); <T> PrismPropertyValue<T> createPropertyValue(); <T> PrismPropertyValue<T> createPropertyValue(T content); <T> PrismPropertyValue<T> createPropertyValue(XNode rawContent); <T> PrismPropertyValue<T> createPropertyValue(T value, OriginType originType, Objectable originObject); PrismReference createReference(QName name); PrismReference createReference(QName name, PrismReferenceDefinition definition); PrismReferenceValue createReferenceValue(); PrismReferenceValue createReferenceValue(PrismObject<?> target); PrismReferenceValue createReferenceValue(String targetOid); PrismReferenceValue createReferenceValue(String oid, OriginType originType, Objectable originObject); PrismReferenceValue createReferenceValue(String oid, QName targetType); PrismContainer createContainer(QName name); <C extends Containerable> PrismContainer<C> createContainer(QName name, PrismContainerDefinition<C> definition); <O extends Objectable> PrismObject<O> createObject(QName name, PrismObjectDefinition<O> definition); // TODO is this needed? <O extends Objectable> PrismObjectValue<O> createObjectValue(O objectable); // TODO is this needed? <C extends Containerable> PrismContainerValue<C> createContainerValue(C containerable); <C extends Containerable> PrismContainerValue<C> createContainerValue(); /** * Creates a dummy container with a fixed path. * This container is good for storing values, e.g. in case of delta computations to get * preview of the new item. * But such container cannot be used to fit into any prism structure (cannot set parent). */ @Experimental <V extends PrismValue,D extends ItemDefinition> Item<V,D> createDummyItem(Item<V,D> itemOld, D definition, ItemPath path) throws SchemaException; }
apache-2.0
JeffreyWei/db_monitor
src/main/java/com/founder/action/Hello.java
991
package com.founder.action; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; /** * 功能说明: * * @author weij */ public class Hello extends javax.servlet.http.HttpServlet implements javax.servlet.Servlet { public Hello() { super(); } protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doPost(request, response); } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html"); PrintWriter writer = response.getWriter(); writer.println("<html>"); writer.println("<head><title>Hello Ajava.org</title></head>"); writer.println("<body>Hello mark! How are you doing?</body>"); writer.println("</html>"); writer.close(); } }
apache-2.0
cordmata/handle-service
src/main/java/edu/asu/lib/handle/HandleClient.java
1249
package edu.asu.lib.handle; import java.net.URL; import net.handle.hdllib.HandleException; public interface HandleClient { /** * Get the first HandleValue with type URL and return a URL Object. * * @param handle - The handle to lookup, in the form of "prefix/suffix". * @return The target URL * @throws HandleException */ public abstract URL resolve(String handle) throws HandleException; /** * Create a new Handle with the supplied target URL. * * @param handle - The Handle to create. * @param target - The URL target for the new Handle. * @throws HandleException */ public abstract void create(String handle, URL target) throws HandleException; /** * Update the Handle with the new supplied target URL. * * @param handle - The Handle to update. * @param target - URL to replace the existing target. * @throws HandleException */ public abstract void update(String handle, URL target) throws HandleException; /** * Purge the supplied Handle. * * @param handle - The Handle to purge. * @throws HandleException */ public abstract void delete(String handle) throws HandleException; }
apache-2.0
FJplant/AntIDE
src/com/antsoft/ant/property/LibraryListModel.java
1888
/* * $Header: /usr/cvsroot/AntIDE/source/com/antsoft/ant/property/LibraryListModel.java,v 1.3 1999/07/22 03:42:03 multipia Exp $ * Ant ( JDK wrapper Java IDE ) * Version 1.0 * Copyright (c) 1998-1999 Antsoft Co. All rights reserved. * This program and source file is protected by Korea and international * Copyright laws. * * $Revision: 1.3 $ */ package com.antsoft.ant.property; import javax.swing.AbstractListModel; /** * Library ListModel class */ public class LibraryListModel extends AbstractListModel{ private LibInfoContainer libInfos; public LibraryListModel(){ this.libInfos = new LibInfoContainer(); } public LibraryListModel(LibInfoContainer infos){ this.libInfos = infos; } public int getSize(){ return libInfos.getSize(); } public LibInfoContainer getLibInfoContainer(){ return this.libInfos; } public Object getElementAt(int index){ return libInfos.getLibraryInfo(index); } public void setElementAt(Object libInfo, int index){ libInfos.setLibraryInfo(libInfo, index); } public void addElement(Object libInfo){ int index = libInfos.getSize(); libInfos.addLibraryInfo(libInfo); fireIntervalAdded(this, index, index); } public void setLibInfoContainer(LibInfoContainer libInfos){ this.libInfos = libInfos; } public void removeElement(Object obj){ this.libInfos.removeLibraryInfo(obj); } public void removeElementAt(int index){ libInfos.removeLibraryInfo(index); fireIntervalRemoved(this, index, index); } public void fireUpdate(Object updatedLibInfo, int index){ fireContentsChanged(updatedLibInfo, index, index); } public int indexOf(Object obj){ return libInfos.indexOf(obj); } }
apache-2.0
kevinjava/pattern
src/com/github/kevinjava/pattern/behavior/interpreter/SymbolExpression.java
269
package com.github.kevinjava.pattern.behavior.interpreter; public abstract class SymbolExpression extends Expression { Expression left; Expression right; public SymbolExpression(Expression left, Expression right) { this.left = left; this.right = right; } }
apache-2.0
srose/keycloak
quarkus/tests/integration/src/main/java/org/keycloak/it/utils/KeycloakDistribution.java
1173
package org.keycloak.it.utils; import java.util.ArrayList; import java.util.List; import static org.keycloak.quarkus.runtime.Environment.LAUNCH_MODE; public interface KeycloakDistribution { void start(List<String> arguments); void stop(); List<String> getOutputStream(); List<String> getErrorStream(); int getExitCode(); boolean isDebug(); boolean isManualStop(); default String[] getCliArgs(List<String> arguments) { List<String> commands = new ArrayList<>(); commands.add("./kc.sh"); if (this.isDebug()) { commands.add("--debug"); } if (!this.isManualStop()) { commands.add("-D" + LAUNCH_MODE + "=test"); } commands.addAll(arguments); return commands.toArray(new String[0]); } default void setQuarkusProperty(String key, String value) { throw new RuntimeException("Not implemented"); } default void setProperty(String key, String value) { throw new RuntimeException("Not implemented"); } default void deleteQuarkusProperties() { throw new RuntimeException("Not implemented"); } }
apache-2.0
spinnaker/clouddriver
clouddriver-aws/src/main/java/com/netflix/spinnaker/clouddriver/aws/deploy/asg/asgbuilders/AsgBuilder.java
17002
/* * Copyright 2021 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.spinnaker.clouddriver.aws.deploy.asg.asgbuilders; import com.amazonaws.services.autoscaling.AmazonAutoScaling; import com.amazonaws.services.autoscaling.model.AlreadyExistsException; import com.amazonaws.services.autoscaling.model.AutoScalingGroup; import com.amazonaws.services.autoscaling.model.CreateAutoScalingGroupRequest; import com.amazonaws.services.autoscaling.model.DescribeAutoScalingGroupsRequest; import com.amazonaws.services.autoscaling.model.DescribeAutoScalingGroupsResult; import com.amazonaws.services.autoscaling.model.EnableMetricsCollectionRequest; import com.amazonaws.services.autoscaling.model.SuspendProcessesRequest; import com.amazonaws.services.autoscaling.model.Tag; import com.amazonaws.services.autoscaling.model.UpdateAutoScalingGroupRequest; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.DescribeSubnetsResult; import com.amazonaws.services.ec2.model.Subnet; import com.google.common.collect.ImmutableMap; import com.netflix.spinnaker.clouddriver.aws.deploy.asg.AsgLifecycleHookWorker; import com.netflix.spinnaker.clouddriver.aws.deploy.asg.AutoScalingWorker.AsgConfiguration; import com.netflix.spinnaker.clouddriver.aws.model.SubnetData; import com.netflix.spinnaker.clouddriver.aws.model.SubnetTarget; import com.netflix.spinnaker.clouddriver.data.task.Task; import com.netflix.spinnaker.kork.core.RetrySupport; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; /** A builder used to build an AWS Autoscaling group. */ @Slf4j public abstract class AsgBuilder { private final RetrySupport retrySupport = new RetrySupport(); private AmazonAutoScaling autoScaling; private AmazonEC2 ec2; private AsgLifecycleHookWorker asgLifecycleHookWorker; AsgBuilder( AmazonAutoScaling autoScaling, AmazonEC2 ec2, AsgLifecycleHookWorker asgLifecycleHookWorker) { this.autoScaling = autoScaling; this.ec2 = ec2; this.asgLifecycleHookWorker = asgLifecycleHookWorker; } /** * Abstract method to build a CreateAutoScalingGroupRequest given the input parameters in form of * AsgConfiguration. * * @return the CreateAutoScalingGroupRequest built */ protected abstract CreateAutoScalingGroupRequest buildRequest( Task task, String taskPhase, String asgName, AsgConfiguration cfg); /** * Build and launch an ASG. * * @return the ASG name */ public String build(Task task, String taskPhase, String asgName, AsgConfiguration cfg) { return createAsg(task, taskPhase, buildRequest(task, taskPhase, asgName, cfg), cfg); } /** * Build partial CreateAutoScalingGroupRequest. All parameters except launchConfiguration / * launchTemplate are configured. * * @return CreateAutoScalingGroupRequest with all but 1 parameter configured */ protected CreateAutoScalingGroupRequest buildPartialRequest( Task task, String taskPhase, String name, AsgConfiguration cfg) { CreateAutoScalingGroupRequest request = new CreateAutoScalingGroupRequest() .withAutoScalingGroupName(name) .withMinSize(cfg.getMinInstances()) .withMaxSize(cfg.getMaxInstances()) .withDesiredCapacity(cfg.getDesiredInstances()) .withLoadBalancerNames(cfg.getClassicLoadBalancers()) .withTargetGroupARNs(cfg.getTargetGroupArns()) .withDefaultCooldown(cfg.getCooldown()) .withHealthCheckGracePeriod(cfg.getHealthCheckGracePeriod()) .withHealthCheckType(cfg.getHealthCheckType()) .withTerminationPolicies(cfg.getTerminationPolicies()); if (cfg.getTags() != null && !cfg.getTags().isEmpty()) { task.updateStatus(taskPhase, "Adding tags for " + name); cfg.getTags().entrySet().stream() .forEach( e -> request.withTags( new Tag() .withKey(e.getKey()) .withValue(e.getValue()) .withPropagateAtLaunch(true))); } // if we have explicitly specified subnetIds, don't require that they are tagged with a // subnetType/purpose boolean filterForSubnetPurposeTags = cfg.getSubnetIds() == null || cfg.getSubnetIds().isEmpty(); // favor subnetIds over availability zones final String subnetIds = String.join( ",", getSubnetIds( getSubnets( filterForSubnetPurposeTags, cfg.getSubnetType(), cfg.getAvailabilityZones()), cfg.getSubnetIds(), cfg.getAvailabilityZones())); List<Subnet> subnets = getSubnets(true, cfg.getSubnetType(), cfg.getAvailabilityZones()); if (StringUtils.isNotEmpty(subnetIds)) { task.updateStatus(taskPhase, " > Deploying to subnetIds: " + subnetIds); request.withVPCZoneIdentifier(subnetIds); } else if (StringUtils.isNotEmpty(cfg.getSubnetType()) && (subnets == null || subnets.isEmpty())) { throw new RuntimeException( String.format( "No suitable subnet was found for internal subnet purpose '%s'!", cfg.getSubnetType())); } else { task.updateStatus(taskPhase, "Deploying to availabilityZones: " + cfg.getAvailabilityZones()); request.withAvailabilityZones(cfg.getAvailabilityZones()); } // configure capacity rebalance if (cfg.getCapacityRebalance() != null) { task.updateStatus( taskPhase, "Setting capacity rebalance to " + cfg.getCapacityRebalance() + " for " + name); request.withCapacityRebalance(cfg.getCapacityRebalance()); } return request; } private String createAsg( Task task, String taskPhase, CreateAutoScalingGroupRequest request, AsgConfiguration cfg) { final String asgName = request.getAutoScalingGroupName(); // create ASG final RuntimeException ex = retrySupport.retry( () -> { try { autoScaling.createAutoScalingGroup(request); return null; } catch (AlreadyExistsException e) { if (!shouldProceedWithExistingState( autoScaling, asgName, request, task, taskPhase)) { return e; } log.debug("Determined pre-existing ASG is desired state, continuing...", e); return null; } }, 10, 1000, false); if (ex != null) { throw ex; } // configure lifecycle hooks if (cfg.getLifecycleHooks() != null && !cfg.getLifecycleHooks().isEmpty()) { final Exception e = retrySupport.retry( () -> { task.updateStatus(taskPhase, "Creating lifecycle hooks for: " + asgName); asgLifecycleHookWorker.attach(task, cfg.getLifecycleHooks(), asgName); return null; }, 10, 1000, false); if (e != null) { task.updateStatus( taskPhase, "Unable to attach lifecycle hooks to ASG (" + asgName + "): " + e.getMessage()); } } // suspend auto scaling processes if (cfg.getSuspendedProcesses() != null && !cfg.getSuspendedProcesses().isEmpty()) { task.updateStatus(taskPhase, "Suspending processes for: " + asgName); retrySupport.retry( () -> autoScaling.suspendProcesses( new SuspendProcessesRequest() .withAutoScalingGroupName(asgName) .withScalingProcesses(cfg.getSuspendedProcesses())), 10, 1000, false); } // enable metrics and monitoring if (cfg.getEnabledMetrics() != null && !cfg.getEnabledMetrics().isEmpty() && cfg.getInstanceMonitoring() != null && cfg.getInstanceMonitoring()) { task.updateStatus(taskPhase, "Enabling metrics collection for: " + asgName); retrySupport.retry( () -> autoScaling.enableMetricsCollection( new EnableMetricsCollectionRequest() .withAutoScalingGroupName(asgName) .withGranularity("1Minute") .withMetrics(cfg.getEnabledMetrics())), 10, 1000, false); } // udpate ASG retrySupport.retry( () -> { task.updateStatus( taskPhase, String.format( "Setting size of %s in %s/%s to [min=%s, max=%s, desired=%s]", asgName, cfg.getCredentials().getName(), cfg.getRegion(), cfg.getMinInstances(), cfg.getMaxInstances(), cfg.getDesiredInstances())); autoScaling.updateAutoScalingGroup( new UpdateAutoScalingGroupRequest() .withAutoScalingGroupName(asgName) .withMinSize(cfg.getMinInstances()) .withMaxSize(cfg.getMaxInstances()) .withDesiredCapacity(cfg.getDesiredInstances())); return true; }, 10, 1000, false); task.updateStatus(taskPhase, "Deployed EC2 server group named " + asgName); return asgName; } private boolean shouldProceedWithExistingState( AmazonAutoScaling autoScaling, String asgName, CreateAutoScalingGroupRequest request, Task task, String taskPhase) { final DescribeAutoScalingGroupsResult result = autoScaling.describeAutoScalingGroups( new DescribeAutoScalingGroupsRequest().withAutoScalingGroupNames(asgName)); if (result.getAutoScalingGroups().isEmpty()) { // This will only happen if we get an AlreadyExistsException from AWS, then immediately after // describing it, we // don't get a result back. We'll continue with trying to create because who knows may as well // try. log.error("Attempted to find pre-existing ASG but none was found: " + asgName); return true; } final AutoScalingGroup existingAsg = result.getAutoScalingGroups().get(0); // build predicates and identify failed ones List<String> existingAsgSubnetIds = null; if (StringUtils.isNotEmpty(existingAsg.getVPCZoneIdentifier())) { existingAsgSubnetIds = sortList(Arrays.asList(existingAsg.getVPCZoneIdentifier().split(","))); } List<String> requestedSubnetIds = null; if (StringUtils.isNotEmpty(request.getVPCZoneIdentifier())) { requestedSubnetIds = sortList(Arrays.asList(request.getVPCZoneIdentifier().split(","))); } Map<String, Boolean> predicates = ImmutableMap.<String, Boolean>builder() .put( "launch configuration", Objects.equals( existingAsg.getLaunchConfigurationName(), request.getLaunchConfigurationName())) .put( "launch template", Objects.equals(existingAsg.getLaunchTemplate(), request.getLaunchTemplate())) .put( "mixed instances policy", Objects.equals( existingAsg.getMixedInstancesPolicy(), request.getMixedInstancesPolicy())) .put( "availability zones", Objects.equals( sortList(existingAsg.getAvailabilityZones()), sortList(request.getAvailabilityZones()))) .put("subnets", Objects.equals(existingAsgSubnetIds, requestedSubnetIds)) .put( "load balancers", Objects.equals( sortList(existingAsg.getLoadBalancerNames()), sortList(request.getLoadBalancerNames()))) .put( "target groups", Objects.equals( sortList(existingAsg.getTargetGroupARNs()), sortList(request.getTargetGroupARNs()))) .put("cooldown", existingAsg.getDefaultCooldown() == request.getDefaultCooldown()) .put( "health check grace period", existingAsg.getHealthCheckGracePeriod() == request.getHealthCheckGracePeriod()) .put( "health check type", Objects.equals(existingAsg.getHealthCheckType(), request.getHealthCheckType())) .put( "termination policies", Objects.equals( sortList(existingAsg.getTerminationPolicies()), sortList(request.getTerminationPolicies()))) .build(); final Set<String> failedPredicates = predicates.entrySet().stream() .filter(p -> !p.getValue()) .map(Map.Entry::getKey) .collect(Collectors.toSet()); if (!failedPredicates.isEmpty()) { task.updateStatus( taskPhase, String.format( "%s already exists and does not seem to match desired state on: %s", asgName, String.join(",", failedPredicates))); log.debug("Failed predicates: " + predicates); return false; } if (existingAsg .getCreatedTime() .toInstant() .isBefore(Instant.now().minus(1, ChronoUnit.HOURS))) { task.updateStatus( taskPhase, asgName + " already exists and appears to be valid, but falls outside of safety window for idempotent deploy (1 hour)"); return false; } return true; } /** * This is an obscure rule that Subnets are tagged at Amazon with a data structure, which defines * their purpose and what type of resources (elb or ec2) are able to make use of them. We also * need to ensure that the Subnet IDs that we provide back are able to be deployed to based off of * the supplied availability zones. * * @return list of subnet ids applicable to this deployment. */ private List<String> getSubnetIds( List<Subnet> allSubnetsForTypeAndAvailabilityZone, List<String> subnetIds, List<String> availabilityZones) { final List<String> allSubnetIds = allSubnetsForTypeAndAvailabilityZone.stream() .map(s -> s.getSubnetId()) .collect(Collectors.toList()); List<String> invalidSubnetIds = null; if (subnetIds != null && !subnetIds.isEmpty()) { invalidSubnetIds = subnetIds.stream().filter(it -> !allSubnetIds.contains(it)).collect(Collectors.toList()); } if (invalidSubnetIds != null && !invalidSubnetIds.isEmpty()) { throw new IllegalStateException( String.format( "One or more subnet ids are not valid (invalidSubnetIds: %s, availabilityZones: %s)", String.join(",", invalidSubnetIds), String.join(",", availabilityZones))); } return (subnetIds != null && !subnetIds.isEmpty()) ? subnetIds : allSubnetIds; } private List<Subnet> getSubnets( boolean filterForSubnetPurposeTags, String subnetType, List<String> availabilityZones) { if (StringUtils.isEmpty(subnetType)) { return Collections.emptyList(); } final DescribeSubnetsResult result = ec2.describeSubnets(); List<Subnet> mySubnets = new ArrayList<>(); for (Subnet subnet : result.getSubnets()) { if (availabilityZones != null && !availabilityZones.isEmpty() && !availabilityZones.contains(subnet.getAvailabilityZone())) { continue; } if (filterForSubnetPurposeTags) { final SubnetData sd = SubnetData.from(subnet); if ((sd.getPurpose() != null && sd.getPurpose().equals(subnetType)) && (sd.getTarget() == null || sd.getTarget() == SubnetTarget.EC2)) { mySubnets.add(subnet); } } else { mySubnets.add(subnet); } } return mySubnets; } private List<String> sortList(List<String> list) { return list.stream().sorted(Comparator.naturalOrder()).collect(Collectors.toList()); } }
apache-2.0
chris-wood/ORAM3P
src/sprout/oram/Tree.java
20386
package sprout.oram; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import sprout.crypto.Random; import sprout.util.Util; import sprout.util.RC; public class Tree { // Tree parameters private int level; private int fanout; private int bucketSize; private int bucketDepth; private int tupleSize; private int numTuples; private int numLevels; private int leafExpansion; private long N; // Tuple params for the tree private int lBits; private int lBytes; private int nBytes; private int dBytes; // Disk stuff private String dbfile = null; private int offset = 0; /** * Create an empty tree with the specified number of leaves. * * @param numLeaves */ public Tree(long offset, int level, String dbfile, ForestMetadata metadata) { this.level = level; this.offset = (int)offset; this.dbfile = dbfile; this.fanout = metadata.getFanout(); this.bucketSize = metadata.getTupleSizeInBytes(level) * metadata.getBucketDepth(); this.bucketDepth = metadata.getBucketDepth(); this.tupleSize = metadata.getTupleSizeInBytes(level); this.numLevels = (int)(Math.log(metadata.getNumLeaves(level)) / Math.log(fanout)); this.N = metadata.getNumLeaves(level); this.leafExpansion = metadata.getLeafExpansion(); // this is the factor 4, for example this.lBits = metadata.getTupleBitsL(level); this.lBytes = metadata.getTupleBytesL(level); this.nBytes = metadata.getTupleBytesN(level); this.dBytes = metadata.getDataSize(); this.numTuples = bucketDepth * (int)((N * leafExpansion) + (N - 1)); // complete, balanced k-ary tree with leaves expanded (e.g., to 4) Util.disp("Tuple size = " + tupleSize); Util.disp("Tree #tuples = " + numTuples); Util.disp("Tree size in bytes = " + (tupleSize * numTuples)); Util.disp("Num levels = " + numLevels); Util.disp("Database file offset = " + offset); Util.disp("***************************************************"); Util.disp("level:\t" + level); Util.disp("fanout:\t" + fanout); Util.disp("bucketSize:\t" + bucketSize); Util.disp("bucketDepth:\t" + bucketDepth); Util.disp("tupleSize:\t" + tupleSize); Util.disp("numTuples:\t" + numTuples); Util.disp("numLevels:\t" + numLevels); Util.disp("leafExpansion:\t" + leafExpansion); Util.disp("N:\t" + N); Util.disp("lBits:\t" + lBits); Util.disp("lBytes:\t" + lBytes); Util.disp("nBytes:\t" + nBytes); Util.disp("dBytes:\t" + dBytes); Util.disp("***************************************************"); } public int getTreeLevel() { return level; } public int getNumberOfNonExpandedBuckets() { return bucketDepth * (int)((N + (N - 1))); } public int getNumberOfBuckets() { return (numTuples / bucketDepth); } public int getNumberOfTuples() { return numTuples; } public int getBucketDepth() { return bucketDepth; } public int getBucketSize() { return bucketSize; } public int getFanout() { return fanout; } public int getNumLevels() { return numLevels; } public long getNumLeaves() { return N; } public int getLeafExpansion() { return leafExpansion; } public boolean getSlotStatus(int slot) throws TreeException { if (slot < 0 || slot >= numTuples) { throw new TreeException("Slot index out of bounds: " + slot + " > " + numTuples); } byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, slot); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); return t.isOccupied(); } /** * Retrieve the size of the tree in bytes. * * @return tree size in bytes */ public long getSizeInBytes() { return numTuples * tupleSize; } public int getLBytes() { return lBytes; } public int getNBytes() { return nBytes; } public int getDBytes() { return dBytes; } /** * Find the tuple in the tree that matches the specified tag. * @param tag * @return * @throws TreeException */ public Tuple findTupleByTag(byte[] tag) throws TreeException { for (int i = 0; i < numTuples; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple other = new Tuple(raw, lBytes, nBytes, dBytes); if (level == 1 && raw[0] == 1) { // System.out.println("Comparing with: " + other); } if (other.isOccupied() && other.matchesRawTag(tag)) { return other; } } return null; } /** * Read/return the bytes in the specified bucket. * * @param bucketNum * @return bucket bytes */ public byte[] readBucket(int bucketNum) { byte[] buffer = new byte[bucketSize]; try { RandomAccessFile ro = new RandomAccessFile(dbfile, "r"); ro.seek(offset + (bucketNum * bucketSize)); ro.read(buffer, 0, bucketSize); ro.close(); return buffer; } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return buffer; } /** * Write the new contents of a bucket to disk. * * @param newBucket * @param pos * @return RC.SUCCESS on success, something else otherwise */ public RC writeBucket(byte[] bucket, int pos) { if (pos > 0 && pos < N) { if (bucket.length != bucketSize) { return RC.INVALID_BUCKET_SIZE; } try { RandomAccessFile ro = new RandomAccessFile(dbfile, "rw"); ro.seek(offset + (pos * bucketSize)); ro.write(bucket, 0, bucket.length); ro.close(); return RC.SUCCESS; } catch (IOException e) { e.printStackTrace(); } return RC.IO_ERROR; } return RC.TREE_INVALID_BUCKET_INDEX; } /** * Read/return the bytes in the specified tuple. * * @param bucketNum * @return bucket bytes */ public RC readTuple(byte[] buffer, int tupleNum) { RC ret = RC.SUCCESS; try { RandomAccessFile ro = new RandomAccessFile(dbfile, "r"); ro.seek(offset + (tupleNum * tupleSize)); ro.read(buffer, 0, tupleSize); ro.close(); } catch (FileNotFoundException e) { e.printStackTrace(); ret = RC.IO_ERROR; } catch (IOException e) { e.printStackTrace(); ret = RC.IO_ERROR; } return ret; } public Tuple getTuple(int tupleNum) throws TreeException { byte[] buffer = new byte[tupleSize]; RC ret = readTuple(buffer, tupleNum); if (ret != RC.SUCCESS) { throw new TreeException("Error getting tuple " + tupleNum); } return new Tuple(buffer, lBytes, nBytes, dBytes); } /** * Write the specified tuple in place in the tree. * * @param tuple - tuple to save * @param slot - slot index in the tree (NOT THE INDEX WITHIN A BUCKET) * @return */ public RC writeTuple(byte[] tuple, int slot) { if (slot > 0 && slot < numTuples) { if (tupleSize != tuple.length) { return RC.INVALID_TUPLE_SIZE; } try { RandomAccessFile ro = new RandomAccessFile(dbfile, "rw"); ro.seek(offset + (slot * tupleSize)); ro.write(tuple, 0, tuple.length); ro.close(); return RC.SUCCESS; } catch (IOException e) { e.printStackTrace(); } } return RC.TREE_INVALID_SLOT_INDEX; } /** * Determine if the bucket at position pos is full. * (Loop over every tuple in the bucket and check to see if it's in use) * * @param pos - target bucket * @return true if full, false otherwise * @throws TreeException */ private boolean isBucketFull(int pos) throws TreeException { int start = pos * bucketDepth; int len = bucketDepth; for (int i = start; i < start + len; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); if (!t.isOccupied()) return false; // found an empty } return true; } /** * Retrieve a list of tuple indices that are full in the specified bucket. * * @param bucketPos - target bucket * @return * @throws TreeException */ private List<Integer> getFullSlots(int bucketPos) throws TreeException { List<Integer> indices = new ArrayList<Integer>(); int start = bucketPos * bucketDepth; int len = bucketDepth; for (int i = start; i < start + len; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); if (t.isOccupied()) indices.add(i); } return indices; } /** * Retrieve a list of tuple indices that are empty in the specified bucket. * * @param bucketPos - target bucket * @return * @throws TreeException */ private List<Integer> getEmptySlots(int bucketPos) throws TreeException { List<Integer> indices = new ArrayList<Integer>(); int start = bucketPos * bucketDepth; int len = bucketDepth; for (int i = start; i < start + len; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); if (!t.isOccupied()) indices.add(i); } return indices; } /** * Retrieve the indices of non-empty buckets at the specified level in the tree. * * @param level * @return * @throws TreeException */ private List<Integer> getNonEmptyBucketsAtLevel(int level) throws TreeException { List<Integer> nonempty = new ArrayList<Integer>(); int len = (int)Math.pow(fanout, level); int low = len - 1; for (int i = low; i < low + len; i++) { if (!getFullSlots(i).isEmpty()) { nonempty.add(i); } } return nonempty; } /** * Retrieve the tuple slots that are not occupied (empty) at the given level in the tree. * * @param level * @return * @throws TreeException */ private List<Integer> getEmptyTuplesAtLevel(int level) throws TreeException { List<Integer> empties = new ArrayList<Integer>(); int len = (int)Math.pow(fanout, level); int low = len - 1; for (int i = low; i < low + len; i++) { empties.addAll(getEmptySlots(i)); } return empties; } /** * Retrieve the tuple slots that are occupied (full) at the given level in the tree. * * @param level * @return * @throws TreeException */ private List<Integer> getFullTuplesAtLevel(int level) throws TreeException { List<Integer> filled = new ArrayList<Integer>(); int len = (int)Math.pow(fanout, level); int low = len - 1; for (int i = low; i < low + len; i++) { filled.addAll(getFullSlots(i)); } return filled; } /** * Determine the child index at the next lower layer in the tree * based on the leaf encoding L and the specified level in the tree. * * @param t - tuple under consideration * @param level - current level in the tree * @return the child index in the next level of the tree */ private int nextChildrenIndices(Tuple t, int level) { String rep = t.getKAryRep(fanout, lBits); Util.debug(rep + ", " + level); int child = Integer.parseInt("" + rep.charAt(level)); // levels are 0-based return child; } /** * Select a random non-empty tuple from some node in the level and push it * down towards the right child. * * @param level (!= last level of leaves) * @return success or error * @throws TreeException */ private RC pushDown(int level) throws TreeException { List<Integer> fullIndices = getFullTuplesAtLevel(level); if (fullIndices.isEmpty()) { // If there are no full tuples at this level (rare), do nothing Util.disp("Nothing to move at level " + level); return RC.SUCCESS; } // Select random full tuple at level and empty tuple at level+1 List<Integer> nonEmptyBuckets = getNonEmptyBucketsAtLevel(level); if (nonEmptyBuckets.isEmpty()) { return RC.SUCCESS; } int sourceBucket = nonEmptyBuckets.get(Random.generateRandomInt(0, nonEmptyBuckets.size() - 1)); List<Integer> filled = getFullSlots(sourceBucket); if (filled.isEmpty()) { return RC.SUCCESS; } int sourceTupleIndex = filled.get(Random.generateRandomInt(0, filled.size() - 1)); byte[] src = new byte[tupleSize]; RC ret = readTuple(src, sourceTupleIndex); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple srcTuple = new Tuple(src, lBytes, nBytes, dBytes); if (srcTuple.isOccupied() == false) { throw new TreeException("Tuple state corrupt: " + sourceTupleIndex); } // Move to an empty tuple slot in the appropriate bucket (based on leaf address) int child = nextChildrenIndices(srcTuple, level + 1); int targetBucket = (fanout * sourceBucket) + child + 1; List<Integer> empties = null; // If we've overflown into the leaves, take the leaf expansion into account // Choose a random bucket in the leaf to which this tuple will be placed if (targetBucket >= (N - 1)) { int base = (int) (N + ((targetBucket - N) * leafExpansion)); int offset = Random.generateRandomInt(0, leafExpansion); targetBucket = base + offset; empties = getEmptySlots(targetBucket); while (empties.isEmpty()) { offset = Random.generateRandomInt(0, leafExpansion); targetBucket = base + offset; empties = getEmptySlots(targetBucket); } } else { empties = getEmptySlots(targetBucket); } if (empties.isEmpty()) { throw new TreeException("No room in bucket " + targetBucket + " to move (" + sourceBucket + "," + sourceTupleIndex + ")"); } int targetTupleIndex = empties.get(Random.generateRandomInt(0, empties.size() - 1));; // Push to level+1 (copy into buffer) and overwrite the destination tuple writeTuple(src, targetTupleIndex); byte[] zeros = new byte[tupleSize]; Arrays.fill(zeros, (byte)0); writeTuple(zeros, sourceTupleIndex); // this will zero out the full bit for the source tuple // TODO: replace this complete zero-out with a single byte write that zeros out the prepended tuple metadata return RC.SUCCESS; } /** * Insert a new tuple into the tree during initialization. * * NOTE: ONLY INVOKED DURING TREE CREATION! * * @param t - tuple to be inserted * @return SUCCESS if successful, something else otherwise * @throws TreeException */ public RC initialInsertTuple(Tuple t) throws TreeException { RC ret = RC.SUCCESS; Util.disp("ORAM-" + level + " inserting: " + t); // Put in one children of the root, and then call push down on every level // => child c is at index: 1 + c int targetBucketIndex = nextChildrenIndices(t, 1) + 1; // Pick a random tuple in the target bucket List<Integer> emptySpots = getEmptySlots(targetBucketIndex); if (emptySpots.isEmpty()) { throw new TreeException("No empty spots in the target bucket: " + targetBucketIndex); } int targetTupleIndex = emptySpots.get(Random.generateRandomInt(0, emptySpots.size() - 1)); // Store the tuple and mark the slot as in use byte[] raw = t.toArray(); ret = writeTuple(raw, targetTupleIndex); if (ret != RC.SUCCESS) { System.out.println(ret.toString()); throw new TreeException("Failed to write tuple at slot (" + targetBucketIndex + "," + targetTupleIndex + ")"); } // Sanity check... raw = new byte[tupleSize]; ret = readTuple(raw, targetTupleIndex); if (ret != RC.SUCCESS) { return ret; } Tuple copy = new Tuple(raw, lBytes, nBytes, dBytes); if (!copy.isOccupied()) { Util.disp(t.toString()); Util.disp(copy.toString()); throw new TreeException("Write error."); } // Fetch the number of occupied entries to perform sanity check after push int numOccupied = getNumOccupiedTuples(); // Call push down at every level (in reverse order) for (int i = numLevels - 1; i >= 1; i--) { pushDown(i); } // Post-push down occupancy check if (numOccupied != getNumOccupiedTuples()) { ret = RC.TREE_PUSH_DOWN_ERROR; } return ret; } private List<Integer> getTupleIndicesOnPathToLeaf(long leafNum) { List<Integer> indices = new ArrayList<Integer>(); // The root is always included in the path for (int i = 0; i < this.bucketDepth; i++) { indices.add(i); } // Add the tuple indices in the bucket based on the leaf k-ary representation for (int l = 1; l < numLevels; l++) { String rep = Util.toKaryString(leafNum, fanout, lBits); int bucketPos = (fanout * l) + 1 + Integer.parseInt("" + rep.charAt(l)); // levels are 0-based int start = bucketPos * bucketDepth; int len = bucketDepth; for (int i = start; i < start + len; i++) { indices.add(i); } } return indices; } public List<Integer> getBucketIndicesOnPathToLeaf(long leafNum) { List<Integer> indices = new ArrayList<Integer>(); // The root is always included in the path indices.add(0); // non-leaf buckets for (int l = 1; l < numLevels; l++) { String rep = Util.toKaryString(leafNum, fanout, lBits); int bucketPos = (fanout * l) + 1 + Integer.parseInt("" + rep.charAt(l)); // levels are 0-based indices.add(bucketPos); } // leaf buckets int leafBucket = (int) (leafNum * bucketDepth + Math.pow(2, numLevels)); for (int i=0; i<bucketDepth; i++) { indices.add(leafBucket + i); } return indices; } /** * Retrieve a list of tuples along the path from the root to the leaf. * * @param leafNum * @return * @throws TreeException */ public List<Tuple> getPathToLeaf(long leafNum) throws TreeException { List<Tuple> path = new ArrayList<Tuple>(); for (Integer i : getTupleIndicesOnPathToLeaf(leafNum)) { byte[] buffer = new byte[tupleSize]; RC ret = readTuple(buffer, i); if (ret != RC.SUCCESS) { throw new TreeException("Error extracting root-to-leaf " + leafNum + " tuples given tuple " + i); } path.add(new Tuple(buffer, lBytes, nBytes, dBytes)); } return path; } /** * Blindly update the root-to-leaf path with the new set of tuples. * * @param tuples * @param leafNum * @return */ public RC updatePathToLeaf(List<Tuple> tuples, long leafNum) { RC ret = RC.SUCCESS; int tupleIndex = 0; List<Integer> indices = getTupleIndicesOnPathToLeaf(leafNum); if (indices.size() != tuples.size()) { return RC.TREE_INVALID_PATH_LENGTH; } for (Integer i : getTupleIndicesOnPathToLeaf(leafNum)) { ret = writeTuple(tuples.get(tupleIndex++).toArray(), i); if (ret != RC.SUCCESS) { return ret; } } return ret; } /** * Display all information about the tree * * @return human-readable (?) string representation */ @Override public String toString() { StringBuilder builder = new StringBuilder(); for (int i = 0; i < numTuples; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { try { throw new TreeException("Error reading tuple."); } catch (TreeException e) { e.printStackTrace(); } } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); builder.append(i + ": " + t.isOccupied() + "\n"); } return builder.toString(); } /** * Retrieve a list of the tuples currently in use. * * @return in-use tuples * @throws TreeException */ public List<Integer> inUseList() throws TreeException { List<Integer> tuples = new ArrayList<Integer>(); for (int i = 0; i < numTuples; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple t = new Tuple(raw, lBytes, nBytes, dBytes); if (t.isOccupied()) tuples.add(i); } return tuples; } /** * Retrieve the number of occupied tuples in the tree. * @return * @throws TreeException */ public int getNumOccupiedTuples() throws TreeException { int count = 0; for (int i = 0; i < numTuples; i++) { byte[] raw = new byte[tupleSize]; RC ret = readTuple(raw, i); if (ret != RC.SUCCESS) { throw new TreeException("Error reading tuple."); } Tuple other = new Tuple(raw, lBytes, nBytes, dBytes); if (other.isOccupied()) { count++; } } return count; } }
apache-2.0
saki4510t/libcommon
common/src/main/java/com/serenegiant/media/IRecycleParent.java
1210
package com.serenegiant.media; /* * libcommon * utility/helper classes for myself * * Copyright (c) 2014-2021 saki t_saki@serenegiant.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import androidx.annotation.NonNull; /** * IRecycleBufferの親となるオブジェクトが最低限実装すべきメソッドをしめすインターフェース */ public interface IRecycleParent<T extends IRecycleBuffer> { /** * バッファを再利用可能にする * @param buffer * @return true:リサイクルできたとき, false:リサイクルできなかった時(プールがの最大保持数を超えた時など) */ public boolean recycle(@NonNull final T buffer); }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/transform/WorkflowExecutionFailedEventAttributesMarshaller.java
2891
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.simpleworkflow.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * WorkflowExecutionFailedEventAttributesMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class WorkflowExecutionFailedEventAttributesMarshaller { private static final MarshallingInfo<String> REASON_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("reason").build(); private static final MarshallingInfo<String> DETAILS_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("details").build(); private static final MarshallingInfo<Long> DECISIONTASKCOMPLETEDEVENTID_BINDING = MarshallingInfo.builder(MarshallingType.LONG) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("decisionTaskCompletedEventId").build(); private static final WorkflowExecutionFailedEventAttributesMarshaller instance = new WorkflowExecutionFailedEventAttributesMarshaller(); public static WorkflowExecutionFailedEventAttributesMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(WorkflowExecutionFailedEventAttributes workflowExecutionFailedEventAttributes, ProtocolMarshaller protocolMarshaller) { if (workflowExecutionFailedEventAttributes == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(workflowExecutionFailedEventAttributes.getReason(), REASON_BINDING); protocolMarshaller.marshall(workflowExecutionFailedEventAttributes.getDetails(), DETAILS_BINDING); protocolMarshaller.marshall(workflowExecutionFailedEventAttributes.getDecisionTaskCompletedEventId(), DECISIONTASKCOMPLETEDEVENTID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
jbossorg/borg
src/main/java/org/jboss/planet/model/PostFilter.java
399
/* * JBoss, Home of Professional Open Source * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. */ package org.jboss.planet.model; import java.io.Serializable; /** * * @author Adam Warski (adam at warski dot org) * */ public interface PostFilter extends Serializable { public boolean filter(Post post); }
apache-2.0
android-opensource-library-56/android-opensource-library-56
09-04-Robolectric/RobolectricSample/test/jp/mydns/sys1yagi/android/robolectricsample/RobolectricSampleActivityTest.java
1980
package jp.mydns.sys1yagi.android.robolectricsample; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.Assert.assertThat; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.shadows.ShadowActivity; import org.robolectric.shadows.ShadowWebView; import android.app.Activity; import android.webkit.WebView; import android.widget.TextView; @RunWith(RobolectricTestRunner.class) public class RobolectricSampleActivityTest { @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Test public void 画面に表示するTextViewの文字列を取得してチェックする() { Activity activity = Robolectric .buildActivity(RobolectricSampleActivity.class).create().get(); TextView textView = (TextView) activity.findViewById(R.id.text); assertThat(textView, notNullValue()); assertThat(textView.getText().toString(), is(activity.getString(R.string.hello_world))); } @Test public void WebViewで読み込んだURLのテスト() { Activity activity = Robolectric .buildActivity(RobolectricSampleActivity.class).create().get(); WebView webView = (WebView) activity.findViewById(R.id.webview); ShadowWebView shadowWebView = Robolectric.shadowOf((WebView) webView); assertThat("http://robolectric.org/index.html", is(shadowWebView.getLastLoadedUrl())); } @Test public void lifecycle(){ RobolectricSampleActivity activity = Robolectric .buildActivity(RobolectricSampleActivity.class) .create() .start() .resume() .get(); } }
apache-2.0
masonmei/apm-agent
profiler/src/main/java/com/baidu/oped/apm/profiler/interceptor/bci/MultipleClassPool.java
776
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.baidu.oped.apm.profiler.interceptor.bci; /** * @author emeroad */ public interface MultipleClassPool { NamedClassPool getClassPool(ClassLoader classLoader); }
apache-2.0
ismael-sarmento-jr/hapi-fhir
hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java
2049
package ca.uhn.fhir.util; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2016 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.List; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; /** * Fetch resources from a bundle */ public class BundleUtil { /** * Extract all of the resources from a given bundle */ public static List<IBaseResource> toListOfResources(FhirContext theContext, IBaseBundle theBundle) { List<IBaseResource> retVal = new ArrayList<IBaseResource>(); RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChild = def.getChildByName("entry"); List<IBase> entries = entryChild.getAccessor().getValues(theBundle); BaseRuntimeElementCompositeDefinition<?> entryChildElem = (BaseRuntimeElementCompositeDefinition<?>) entryChild.getChildByName("entry"); BaseRuntimeChildDefinition resourceChild = entryChildElem.getChildByName("resource"); for (IBase nextEntry : entries) { for (IBase next : resourceChild.getAccessor().getValues(nextEntry)) { retVal.add((IBaseResource) next); } } return retVal; } }
apache-2.0
li9766090/coolweatherData
app/src/main/java/com/example/lijing/coolwwath/db/DBHelper.java
1840
package com.example.lijing.coolwwath.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; /** * Created by lijing on 15/6/8. */ //用来装载天气信息的数据库 public class DBHelper extends SQLiteOpenHelper { // // 数据库表名 // private static final String DB_NAME = "coolWeath.db"; // // 数据版本 // private static final int VERSION = 1; // // 创建省份数据库 private static final String CREATE_PROVINCE = "create table Province(_id integer primary key autoincrement," + "province_id integer,province_name text,province_code text)"; // 市 private static final String CREATE_CITY = "create table City(_id integer primary key autoincrement," + "city_id integer,city_name text,city_code text,province_id integer)"; // 县 private static final String CREATE_COUNTY = "create table County(_id integer primary key autoincrement," + "county_id integer,county_name text,county_code text,city_id integer)"; public DBHelper(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) { super(context, name, factory, version); } // // 静态对象的引用 // private static DBHelper dbHelper = null; // // // 获得类的唯一对象 // public static DBHelper getInstance(Context context) { // if (dbHelper == null) { // dbHelper = new DBHelper(context); // } // return dbHelper; // } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_PROVINCE); db.execSQL(CREATE_CITY); db.execSQL(CREATE_COUNTY); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { } }
apache-2.0
MPLauncher/MPLauncher
src/main/java/pl/mplauncher/launcher/bootstrap/MPLauncherBootstrap.java
1021
/* Copyright 2017 MPLauncher Team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package pl.mplauncher.launcher.bootstrap; import javafx.application.Application; import javafx.stage.Stage; import pl.mplauncher.launcher.MPLauncher; public class MPLauncherBootstrap extends Application { public static void main(String[] args) { launch(args); } @Override public void start(Stage stage) throws Exception { MPLauncher launcher = new MPLauncher(); // TODO: init } }
apache-2.0
hasancelik/hazelcast-stabilizer
simulator/src/main/java/com/hazelcast/simulator/protocol/processors/AgentOperationProcessor.java
8416
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.simulator.protocol.processors; import com.hazelcast.simulator.agent.Agent; import com.hazelcast.simulator.agent.workerjvm.WorkerJvmLauncher; import com.hazelcast.simulator.agent.workerjvm.WorkerJvmManager; import com.hazelcast.simulator.agent.workerjvm.WorkerJvmSettings; import com.hazelcast.simulator.protocol.core.Response; import com.hazelcast.simulator.protocol.core.ResponseFuture; import com.hazelcast.simulator.protocol.core.ResponseType; import com.hazelcast.simulator.protocol.core.SimulatorAddress; import com.hazelcast.simulator.protocol.exception.ExceptionLogger; import com.hazelcast.simulator.protocol.operation.CreateWorkerOperation; import com.hazelcast.simulator.protocol.operation.InitTestSuiteOperation; import com.hazelcast.simulator.protocol.operation.IntegrationTestOperation; import com.hazelcast.simulator.protocol.operation.LogOperation; import com.hazelcast.simulator.protocol.operation.OperationType; import com.hazelcast.simulator.protocol.operation.SimulatorOperation; import com.hazelcast.simulator.worker.WorkerType; import org.apache.log4j.Logger; import java.io.File; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import static com.hazelcast.simulator.protocol.core.ResponseType.SUCCESS; import static com.hazelcast.simulator.protocol.core.ResponseType.UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR; import static com.hazelcast.simulator.utils.FileUtils.ensureExistingDirectory; import static com.hazelcast.simulator.utils.FileUtils.getSimulatorHome; import static java.lang.String.format; /** * An {@link OperationProcessor} implementation to process {@link SimulatorOperation} instances on a Simulator Agent. */ public class AgentOperationProcessor extends OperationProcessor { private static final Logger LOGGER = Logger.getLogger(AgentOperationProcessor.class); private final Agent agent; private final WorkerJvmManager workerJvmManager; private final ExecutorService executorService; public AgentOperationProcessor(ExceptionLogger exceptionLogger, Agent agent, WorkerJvmManager workerJvmManager, ExecutorService executorService) { super(exceptionLogger); this.agent = agent; this.workerJvmManager = workerJvmManager; this.executorService = executorService; } @Override protected ResponseType processOperation(OperationType operationType, SimulatorOperation operation, SimulatorAddress sourceAddress) throws Exception { switch (operationType) { case INTEGRATION_TEST: return processIntegrationTest((IntegrationTestOperation) operation, sourceAddress); case INIT_TEST_SUITE: processInitTestSuite((InitTestSuiteOperation) operation); break; case CREATE_WORKER: return processCreateWorker((CreateWorkerOperation) operation); case START_TIMEOUT_DETECTION: processStartTimeoutDetection(); break; case STOP_TIMEOUT_DETECTION: processStopTimeoutDetection(); break; default: return UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR; } return SUCCESS; } private ResponseType processIntegrationTest(IntegrationTestOperation operation, SimulatorAddress sourceAddress) throws Exception { SimulatorOperation nestedOperation; Response response; ResponseFuture future; switch (operation.getType()) { case NESTED_SYNC: nestedOperation = new LogOperation("Sync nested integration test message"); response = agent.getAgentConnector().write(sourceAddress, nestedOperation); LOGGER.debug("Got response for sync nested message: " + response); return response.getFirstErrorResponseType(); case NESTED_ASYNC: nestedOperation = new LogOperation("Async nested integration test message"); future = agent.getAgentConnector().submit(sourceAddress, nestedOperation); response = future.get(); LOGGER.debug("Got response for async nested message: " + response); return response.getFirstErrorResponseType(); case DEEP_NESTED_SYNC: nestedOperation = new LogOperation("Sync deep nested integration test message"); response = agent.getAgentConnector().write(SimulatorAddress.COORDINATOR, nestedOperation); LOGGER.debug("Got response for sync deep nested message: " + response); return response.getFirstErrorResponseType(); case DEEP_NESTED_ASYNC: nestedOperation = new LogOperation("Sync deep nested integration test message"); future = agent.getAgentConnector().submit(SimulatorAddress.COORDINATOR, nestedOperation); response = future.get(); LOGGER.debug("Got response for async deep nested message: " + response); return response.getFirstErrorResponseType(); default: return UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR; } } private void processInitTestSuite(InitTestSuiteOperation operation) { agent.setTestSuite(operation.getTestSuite()); File workersHome = ensureExistingDirectory(getSimulatorHome(), "workers"); File testSuiteDir = ensureExistingDirectory(workersHome, operation.getTestSuite().getId()); ensureExistingDirectory(testSuiteDir, "lib"); } private ResponseType processCreateWorker(CreateWorkerOperation operation) throws Exception { ArrayList<Future<Boolean>> futures = new ArrayList<Future<Boolean>>(); for (WorkerJvmSettings workerJvmSettings : operation.getWorkerJvmSettings()) { WorkerJvmLauncher launcher = new WorkerJvmLauncher(agent, workerJvmManager, workerJvmSettings); Future<Boolean> future = executorService.submit(new LaunchWorkerCallable(launcher, workerJvmSettings)); futures.add(future); } for (Future<Boolean> future : futures) { if (!future.get()) { return ResponseType.EXCEPTION_DURING_OPERATION_EXECUTION; } } return SUCCESS; } private void processStartTimeoutDetection() { agent.getWorkerJvmFailureMonitor().startTimeoutDetection(); } private void processStopTimeoutDetection() { agent.getWorkerJvmFailureMonitor().stopTimeoutDetection(); } private final class LaunchWorkerCallable implements Callable<Boolean> { private final WorkerJvmLauncher launcher; private final WorkerJvmSettings workerJvmSettings; private LaunchWorkerCallable(WorkerJvmLauncher launcher, WorkerJvmSettings workerJvmSettings) { this.launcher = launcher; this.workerJvmSettings = workerJvmSettings; } @Override public Boolean call() { try { launcher.launch(); int workerIndex = workerJvmSettings.getWorkerIndex(); int workerPort = agent.getPort() + workerIndex; SimulatorAddress workerAddress = agent.getAgentConnector().addWorker(workerIndex, "127.0.0.1", workerPort); WorkerType workerType = workerJvmSettings.getWorkerType(); agent.getCoordinatorLogger().debug(format("Created %s Worker %s", workerType, workerAddress)); return true; } catch (Exception e) { return false; } } } }
apache-2.0
mike10004/appengine-imaging
gaecompat-awt-imaging/src/common/com/gaecompat/repackaged/com/google/common/collect/Sets.java
60237
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gaecompat.repackaged.com.google.common.collect; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkArgument; import static com.gaecompat.repackaged.com.google.common.base.Preconditions.checkNotNull; import com.gaecompat.repackaged.com.google.common.annotations.GwtCompatible; import com.gaecompat.repackaged.com.google.common.annotations.GwtIncompatible; import com.gaecompat.repackaged.com.google.common.base.Predicate; import com.gaecompat.repackaged.com.google.common.base.Predicates; import com.gaecompat.repackaged.com.google.common.collect.Collections2.FilteredCollection; import java.io.Serializable; import java.util.AbstractSet; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArraySet; import javax.annotation.Nullable; /** * Static utility methods pertaining to {@link Set} instances. Also see this * class's counterparts {@link Lists}, {@link Maps} and {@link Queues}. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/CollectionUtilitiesExplained#Sets"> * {@code Sets}</a>. * * @author Kevin Bourrillion * @author Jared Levy * @author Chris Povirk * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Sets { private Sets() {} /** * {@link AbstractSet} substitute without the potentially-quadratic * {@code removeAll} implementation. */ abstract static class ImprovedAbstractSet<E> extends AbstractSet<E> { @Override public boolean removeAll(Collection<?> c) { return removeAllImpl(this, c); } @Override public boolean retainAll(Collection<?> c) { return super.retainAll(checkNotNull(c)); // GWT compatibility } } /** * Returns an immutable set instance containing the given enum elements. * Internally, the returned set will be backed by an {@link EnumSet}. * * <p>The iteration order of the returned set follows the enum's iteration * order, not the order in which the elements are provided to the method. * * @param anElement one of the elements the set should contain * @param otherElements the rest of the elements the set should contain * @return an immutable set containing those elements, minus duplicates */ // http://code.google.com/p/google-web-toolkit/issues/detail?id=3028 @GwtCompatible(serializable = true) public static <E extends Enum<E>> ImmutableSet<E> immutableEnumSet( E anElement, E... otherElements) { return ImmutableEnumSet.asImmutable(EnumSet.of(anElement, otherElements)); } /** * Returns an immutable set instance containing the given enum elements. * Internally, the returned set will be backed by an {@link EnumSet}. * * <p>The iteration order of the returned set follows the enum's iteration * order, not the order in which the elements appear in the given collection. * * @param elements the elements, all of the same {@code enum} type, that the * set should contain * @return an immutable set containing those elements, minus duplicates */ // http://code.google.com/p/google-web-toolkit/issues/detail?id=3028 @GwtCompatible(serializable = true) public static <E extends Enum<E>> ImmutableSet<E> immutableEnumSet( Iterable<E> elements) { if (elements instanceof ImmutableEnumSet) { return (ImmutableEnumSet<E>) elements; } else if (elements instanceof Collection) { Collection<E> collection = (Collection<E>) elements; if (collection.isEmpty()) { return ImmutableSet.of(); } else { return ImmutableEnumSet.asImmutable(EnumSet.copyOf(collection)); } } else { Iterator<E> itr = elements.iterator(); if (itr.hasNext()) { EnumSet<E> enumSet = EnumSet.of(itr.next()); Iterators.addAll(enumSet, itr); return ImmutableEnumSet.asImmutable(enumSet); } else { return ImmutableSet.of(); } } } /** * Returns a new {@code EnumSet} instance containing the given elements. * Unlike {@link EnumSet#copyOf(Collection)}, this method does not produce an * exception on an empty collection, and it may be called on any iterable, not * just a {@code Collection}. */ public static <E extends Enum<E>> EnumSet<E> newEnumSet(Iterable<E> iterable, Class<E> elementType) { EnumSet<E> set = EnumSet.noneOf(elementType); Iterables.addAll(set, iterable); return set; } // HashSet /** * Creates a <i>mutable</i>, empty {@code HashSet} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSet#of()} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use {@link * EnumSet#noneOf} instead. * * @return a new, empty {@code HashSet} */ public static <E> HashSet<E> newHashSet() { return new HashSet<E>(); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use an overload of {@link ImmutableSet#of()} (for varargs) or * {@link ImmutableSet#copyOf(Object[])} (for an array) instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use {@link * EnumSet#of(Enum, Enum[])} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(E... elements) { HashSet<E> set = newHashSetWithExpectedSize(elements.length); Collections.addAll(set, elements); return set; } /** * Creates a {@code HashSet} instance, with a high enough "initial capacity" * that it <i>should</i> hold {@code expectedSize} elements without growth. * This behavior cannot be broadly guaranteed, but it is observed to be true * for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned set. * * @param expectedSize the number of elements you expect to add to the * returned set * @return a new, empty {@code HashSet} with enough capacity to hold {@code * expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative */ public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) { return new HashSet<E>(Maps.capacity(expectedSize)); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use * {@link #newEnumSet(Iterable, Class)} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(Iterable<? extends E> elements) { return (elements instanceof Collection) ? new HashSet<E>(Collections2.cast(elements)) : newHashSet(elements.iterator()); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, you should create an * {@link EnumSet} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(Iterator<? extends E> elements) { HashSet<E> set = newHashSet(); Iterators.addAll(set, elements); return set; } /** * Creates a thread-safe set backed by a hash map. The set is backed by a * {@link ConcurrentHashMap} instance, and thus carries the same concurrency * guarantees. * * <p>Unlike {@code HashSet}, this class does NOT allow {@code null} to be * used as an element. The set is serializable. * * @return a new, empty thread-safe {@code Set} * @since 15.0 */ public static <E> Set<E> newConcurrentHashSet() { return newSetFromMap(new ConcurrentHashMap<E, Boolean>()); } /** * Creates a thread-safe set backed by a hash map and containing the given * elements. The set is backed by a {@link ConcurrentHashMap} instance, and * thus carries the same concurrency guarantees. * * <p>Unlike {@code HashSet}, this class does NOT allow {@code null} to be * used as an element. The set is serializable. * * @param elements the elements that the set should contain * @return a new thread-safe set containing those elements (minus duplicates) * @throws NullPointerException if {@code elements} or any of its contents is * null * @since 15.0 */ public static <E> Set<E> newConcurrentHashSet( Iterable<? extends E> elements) { Set<E> set = newConcurrentHashSet(); Iterables.addAll(set, elements); return set; } // LinkedHashSet /** * Creates a <i>mutable</i>, empty {@code LinkedHashSet} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSet#of()} instead. * * @return a new, empty {@code LinkedHashSet} */ public static <E> LinkedHashSet<E> newLinkedHashSet() { return new LinkedHashSet<E>(); } /** * Creates a {@code LinkedHashSet} instance, with a high enough "initial * capacity" that it <i>should</i> hold {@code expectedSize} elements without * growth. This behavior cannot be broadly guaranteed, but it is observed to * be true for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned set. * * @param expectedSize the number of elements you expect to add to the * returned set * @return a new, empty {@code LinkedHashSet} with enough capacity to hold * {@code expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative * @since 11.0 */ public static <E> LinkedHashSet<E> newLinkedHashSetWithExpectedSize( int expectedSize) { return new LinkedHashSet<E>(Maps.capacity(expectedSize)); } /** * Creates a <i>mutable</i> {@code LinkedHashSet} instance containing the * given elements in order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * @param elements the elements that the set should contain, in order * @return a new {@code LinkedHashSet} containing those elements (minus * duplicates) */ public static <E> LinkedHashSet<E> newLinkedHashSet( Iterable<? extends E> elements) { if (elements instanceof Collection) { return new LinkedHashSet<E>(Collections2.cast(elements)); } LinkedHashSet<E> set = newLinkedHashSet(); Iterables.addAll(set, elements); return set; } // TreeSet /** * Creates a <i>mutable</i>, empty {@code TreeSet} instance sorted by the * natural sort ordering of its elements. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedSet#of()} instead. * * @return a new, empty {@code TreeSet} */ public static <E extends Comparable> TreeSet<E> newTreeSet() { return new TreeSet<E>(); } /** * Creates a <i>mutable</i> {@code TreeSet} instance containing the given * elements sorted by their natural ordering. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> If {@code elements} is a {@code SortedSet} with an explicit * comparator, this method has different behavior than * {@link TreeSet#TreeSet(SortedSet)}, which returns a {@code TreeSet} with * that comparator. * * @param elements the elements that the set should contain * @return a new {@code TreeSet} containing those elements (minus duplicates) */ public static <E extends Comparable> TreeSet<E> newTreeSet( Iterable<? extends E> elements) { TreeSet<E> set = newTreeSet(); Iterables.addAll(set, elements); return set; } /** * Creates a <i>mutable</i>, empty {@code TreeSet} instance with the given * comparator. * * <p><b>Note:</b> if mutability is not required, use {@code * ImmutableSortedSet.orderedBy(comparator).build()} instead. * * @param comparator the comparator to use to sort the set * @return a new, empty {@code TreeSet} * @throws NullPointerException if {@code comparator} is null */ public static <E> TreeSet<E> newTreeSet(Comparator<? super E> comparator) { return new TreeSet<E>(checkNotNull(comparator)); } /** * Creates an empty {@code Set} that uses identity to determine equality. It * compares object references, instead of calling {@code equals}, to * determine whether a provided object matches an element in the set. For * example, {@code contains} returns {@code false} when passed an object that * equals a set member, but isn't the same instance. This behavior is similar * to the way {@code IdentityHashMap} handles key lookups. * * @since 8.0 */ public static <E> Set<E> newIdentityHashSet() { return Sets.newSetFromMap(Maps.<E, Boolean>newIdentityHashMap()); } /** * Creates an empty {@code CopyOnWriteArraySet} instance. * * <p><b>Note:</b> if you need an immutable empty {@link Set}, use * {@link Collections#emptySet} instead. * * @return a new, empty {@code CopyOnWriteArraySet} * @since 12.0 */ @GwtIncompatible("CopyOnWriteArraySet") public static <E> CopyOnWriteArraySet<E> newCopyOnWriteArraySet() { return new CopyOnWriteArraySet<E>(); } /** * Creates a {@code CopyOnWriteArraySet} instance containing the given elements. * * @param elements the elements that the set should contain, in order * @return a new {@code CopyOnWriteArraySet} containing those elements * @since 12.0 */ @GwtIncompatible("CopyOnWriteArraySet") public static <E> CopyOnWriteArraySet<E> newCopyOnWriteArraySet( Iterable<? extends E> elements) { // We copy elements to an ArrayList first, rather than incurring the // quadratic cost of adding them to the COWAS directly. Collection<? extends E> elementsCollection = (elements instanceof Collection) ? Collections2.cast(elements) : Lists.newArrayList(elements); return new CopyOnWriteArraySet<E>(elementsCollection); } /** * Creates an {@code EnumSet} consisting of all enum values that are not in * the specified collection. If the collection is an {@link EnumSet}, this * method has the same behavior as {@link EnumSet#complementOf}. Otherwise, * the specified collection must contain at least one element, in order to * determine the element type. If the collection could be empty, use * {@link #complementOf(Collection, Class)} instead of this method. * * @param collection the collection whose complement should be stored in the * enum set * @return a new, modifiable {@code EnumSet} containing all values of the enum * that aren't present in the given collection * @throws IllegalArgumentException if {@code collection} is not an * {@code EnumSet} instance and contains no elements */ public static <E extends Enum<E>> EnumSet<E> complementOf( Collection<E> collection) { if (collection instanceof EnumSet) { return EnumSet.complementOf((EnumSet<E>) collection); } checkArgument(!collection.isEmpty(), "collection is empty; use the other version of this method"); Class<E> type = collection.iterator().next().getDeclaringClass(); return makeComplementByHand(collection, type); } /** * Creates an {@code EnumSet} consisting of all enum values that are not in * the specified collection. This is equivalent to * {@link EnumSet#complementOf}, but can act on any input collection, as long * as the elements are of enum type. * * @param collection the collection whose complement should be stored in the * {@code EnumSet} * @param type the type of the elements in the set * @return a new, modifiable {@code EnumSet} initially containing all the * values of the enum not present in the given collection */ public static <E extends Enum<E>> EnumSet<E> complementOf( Collection<E> collection, Class<E> type) { checkNotNull(collection); return (collection instanceof EnumSet) ? EnumSet.complementOf((EnumSet<E>) collection) : makeComplementByHand(collection, type); } private static <E extends Enum<E>> EnumSet<E> makeComplementByHand( Collection<E> collection, Class<E> type) { EnumSet<E> result = EnumSet.allOf(type); result.removeAll(collection); return result; } /** * Returns a set backed by the specified map. The resulting set displays * the same ordering, concurrency, and performance characteristics as the * backing map. In essence, this factory method provides a {@link Set} * implementation corresponding to any {@link Map} implementation. There is no * need to use this method on a {@link Map} implementation that already has a * corresponding {@link Set} implementation (such as {@link java.util.HashMap} * or {@link java.util.TreeMap}). * * <p>Each method invocation on the set returned by this method results in * exactly one method invocation on the backing map or its {@code keySet} * view, with one exception. The {@code addAll} method is implemented as a * sequence of {@code put} invocations on the backing map. * * <p>The specified map must be empty at the time this method is invoked, * and should not be accessed directly after this method returns. These * conditions are ensured if the map is created empty, passed directly * to this method, and no reference to the map is retained, as illustrated * in the following code fragment: <pre> {@code * * Set<Object> identityHashSet = Sets.newSetFromMap( * new IdentityHashMap<Object, Boolean>());}</pre> * * <p>This method has the same behavior as the JDK 6 method * {@code Collections.newSetFromMap()}. The returned set is serializable if * the backing map is. * * @param map the backing map * @return the set backed by the map * @throws IllegalArgumentException if {@code map} is not empty */ public static <E> Set<E> newSetFromMap(Map<E, Boolean> map) { return Platform.newSetFromMap(map); } /** * An unmodifiable view of a set which may be backed by other sets; this view * will change as the backing sets do. Contains methods to copy the data into * a new set which will then remain stable. There is usually no reason to * retain a reference of type {@code SetView}; typically, you either use it * as a plain {@link Set}, or immediately invoke {@link #immutableCopy} or * {@link #copyInto} and forget the {@code SetView} itself. * * @since 2.0 (imported from Google Collections Library) */ public abstract static class SetView<E> extends AbstractSet<E> { private SetView() {} // no subclasses but our own /** * Returns an immutable copy of the current contents of this set view. * Does not support null elements. * * <p><b>Warning:</b> this may have unexpected results if a backing set of * this view uses a nonstandard notion of equivalence, for example if it is * a {@link TreeSet} using a comparator that is inconsistent with {@link * Object#equals(Object)}. */ public ImmutableSet<E> immutableCopy() { return ImmutableSet.copyOf(this); } /** * Copies the current contents of this set view into an existing set. This * method has equivalent behavior to {@code set.addAll(this)}, assuming that * all the sets involved are based on the same notion of equivalence. * * @return a reference to {@code set}, for convenience */ // Note: S should logically extend Set<? super E> but can't due to either // some javac bug or some weirdness in the spec, not sure which. public <S extends Set<E>> S copyInto(S set) { set.addAll(this); return set; } } /** * Returns an unmodifiable <b>view</b> of the union of two sets. The returned * set contains all elements that are contained in either backing set. * Iterating over the returned set iterates first over all the elements of * {@code set1}, then over each element of {@code set2}, in order, that is not * contained in {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based on * different equivalence relations (as {@link HashSet}, {@link TreeSet}, and * the {@link Map#keySet} of an {@code IdentityHashMap} all are). * * <p><b>Note:</b> The returned view performs better when {@code set1} is the * smaller of the two sets. If you have reason to believe one of your sets * will generally be smaller than the other, pass it first. * * <p>Further, note that the current implementation is not suitable for nested * {@code union} views, i.e. the following should be avoided when in a loop: * {@code union = Sets.union(union, anotherSet);}, since iterating over the resulting * set has a cubic complexity to the depth of the nesting. */ public static <E> SetView<E> union( final Set<? extends E> set1, final Set<? extends E> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Set<? extends E> set2minus1 = difference(set2, set1); return new SetView<E>() { @Override public int size() { return set1.size() + set2minus1.size(); } @Override public boolean isEmpty() { return set1.isEmpty() && set2.isEmpty(); } @Override public Iterator<E> iterator() { return Iterators.unmodifiableIterator( Iterators.concat(set1.iterator(), set2minus1.iterator())); } @Override public boolean contains(Object object) { return set1.contains(object) || set2.contains(object); } @Override public <S extends Set<E>> S copyInto(S set) { set.addAll(set1); set.addAll(set2); return set; } @Override public ImmutableSet<E> immutableCopy() { return new ImmutableSet.Builder<E>() .addAll(set1).addAll(set2).build(); } }; } /** * Returns an unmodifiable <b>view</b> of the intersection of two sets. The * returned set contains all elements that are contained by both backing sets. * The iteration order of the returned set matches that of {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * * <p><b>Note:</b> The returned view performs slightly better when {@code * set1} is the smaller of the two sets. If you have reason to believe one of * your sets will generally be smaller than the other, pass it first. * Unfortunately, since this method sets the generic type of the returned set * based on the type of the first set passed, this could in rare cases force * you to make a cast, for example: <pre> {@code * * Set<Object> aFewBadObjects = ... * Set<String> manyBadStrings = ... * * // impossible for a non-String to be in the intersection * SuppressWarnings("unchecked") * Set<String> badStrings = (Set) Sets.intersection( * aFewBadObjects, manyBadStrings);}</pre> * * <p>This is unfortunate, but should come up only very rarely. */ public static <E> SetView<E> intersection( final Set<E> set1, final Set<?> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Predicate<Object> inSet2 = Predicates.in(set2); return new SetView<E>() { @Override public Iterator<E> iterator() { return Iterators.filter(set1.iterator(), inSet2); } @Override public int size() { return Iterators.size(iterator()); } @Override public boolean isEmpty() { return !iterator().hasNext(); } @Override public boolean contains(Object object) { return set1.contains(object) && set2.contains(object); } @Override public boolean containsAll(Collection<?> collection) { return set1.containsAll(collection) && set2.containsAll(collection); } }; } /** * Returns an unmodifiable <b>view</b> of the difference of two sets. The * returned set contains all elements that are contained by {@code set1} and * not contained by {@code set2}. {@code set2} may also contain elements not * present in {@code set1}; these are simply ignored. The iteration order of * the returned set matches that of {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). */ public static <E> SetView<E> difference( final Set<E> set1, final Set<?> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Predicate<Object> notInSet2 = Predicates.not(Predicates.in(set2)); return new SetView<E>() { @Override public Iterator<E> iterator() { return Iterators.filter(set1.iterator(), notInSet2); } @Override public int size() { return Iterators.size(iterator()); } @Override public boolean isEmpty() { return set2.containsAll(set1); } @Override public boolean contains(Object element) { return set1.contains(element) && !set2.contains(element); } }; } /** * Returns an unmodifiable <b>view</b> of the symmetric difference of two * sets. The returned set contains all elements that are contained in either * {@code set1} or {@code set2} but not in both. The iteration order of the * returned set is undefined. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * * @since 3.0 */ public static <E> SetView<E> symmetricDifference( Set<? extends E> set1, Set<? extends E> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); // TODO(kevinb): Replace this with a more efficient implementation return difference(union(set1, set2), intersection(set1, set2)); } /** * Returns the elements of {@code unfiltered} that satisfy a predicate. The * returned set is a live view of {@code unfiltered}; changes to one affect * the other. * * <p>The resulting set's iterator does not support {@code remove()}, but all * other set methods are supported. When given an element that doesn't satisfy * the predicate, the set's {@code add()} and {@code addAll()} methods throw * an {@link IllegalArgumentException}. When methods such as {@code * removeAll()} and {@code clear()} are called on the filtered set, only * elements that satisfy the filter will be removed from the underlying set. * * <p>The returned set isn't threadsafe or serializable, even if * {@code unfiltered} is. * * <p>Many of the filtered set's methods, such as {@code size()}, iterate * across every element in the underlying set and determine which elements * satisfy the filter. When a live view is <i>not</i> needed, it may be faster * to copy {@code Iterables.filter(unfiltered, predicate)} and use the copy. * * <p><b>Warning:</b> {@code predicate} must be <i>consistent with equals</i>, * as documented at {@link Predicate#apply}. Do not provide a predicate such * as {@code Predicates.instanceOf(ArrayList.class)}, which is inconsistent * with equals. (See {@link Iterables#filter(Iterable, Class)} for related * functionality.) */ // TODO(kevinb): how to omit that last sentence when building GWT javadoc? public static <E> Set<E> filter( Set<E> unfiltered, Predicate<? super E> predicate) { if (unfiltered instanceof SortedSet) { return filter((SortedSet<E>) unfiltered, predicate); } if (unfiltered instanceof FilteredSet) { // Support clear(), removeAll(), and retainAll() when filtering a filtered // collection. FilteredSet<E> filtered = (FilteredSet<E>) unfiltered; Predicate<E> combinedPredicate = Predicates.<E>and(filtered.predicate, predicate); return new FilteredSet<E>( (Set<E>) filtered.unfiltered, combinedPredicate); } return new FilteredSet<E>( checkNotNull(unfiltered), checkNotNull(predicate)); } private static class FilteredSet<E> extends FilteredCollection<E> implements Set<E> { FilteredSet(Set<E> unfiltered, Predicate<? super E> predicate) { super(unfiltered, predicate); } @Override public boolean equals(@Nullable Object object) { return equalsImpl(this, object); } @Override public int hashCode() { return hashCodeImpl(this); } } /** * Returns the elements of a {@code SortedSet}, {@code unfiltered}, that * satisfy a predicate. The returned set is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting set's iterator does not support {@code remove()}, but all * other set methods are supported. When given an element that doesn't satisfy * the predicate, the set's {@code add()} and {@code addAll()} methods throw * an {@link IllegalArgumentException}. When methods such as * {@code removeAll()} and {@code clear()} are called on the filtered set, * only elements that satisfy the filter will be removed from the underlying * set. * * <p>The returned set isn't threadsafe or serializable, even if * {@code unfiltered} is. * * <p>Many of the filtered set's methods, such as {@code size()}, iterate across * every element in the underlying set and determine which elements satisfy * the filter. When a live view is <i>not</i> needed, it may be faster to copy * {@code Iterables.filter(unfiltered, predicate)} and use the copy. * * <p><b>Warning:</b> {@code predicate} must be <i>consistent with equals</i>, * as documented at {@link Predicate#apply}. Do not provide a predicate such as * {@code Predicates.instanceOf(ArrayList.class)}, which is inconsistent with * equals. (See {@link Iterables#filter(Iterable, Class)} for related * functionality.) * * @since 11.0 */ public static <E> SortedSet<E> filter( SortedSet<E> unfiltered, Predicate<? super E> predicate) { return Platform.setsFilterSortedSet(unfiltered, predicate); } static <E> SortedSet<E> filterSortedIgnoreNavigable( SortedSet<E> unfiltered, Predicate<? super E> predicate) { if (unfiltered instanceof FilteredSet) { // Support clear(), removeAll(), and retainAll() when filtering a filtered // collection. FilteredSet<E> filtered = (FilteredSet<E>) unfiltered; Predicate<E> combinedPredicate = Predicates.<E>and(filtered.predicate, predicate); return new FilteredSortedSet<E>( (SortedSet<E>) filtered.unfiltered, combinedPredicate); } return new FilteredSortedSet<E>( checkNotNull(unfiltered), checkNotNull(predicate)); } private static class FilteredSortedSet<E> extends FilteredSet<E> implements SortedSet<E> { FilteredSortedSet(SortedSet<E> unfiltered, Predicate<? super E> predicate) { super(unfiltered, predicate); } @Override public Comparator<? super E> comparator() { return ((SortedSet<E>) unfiltered).comparator(); } @Override public SortedSet<E> subSet(E fromElement, E toElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).subSet(fromElement, toElement), predicate); } @Override public SortedSet<E> headSet(E toElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).headSet(toElement), predicate); } @Override public SortedSet<E> tailSet(E fromElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).tailSet(fromElement), predicate); } @Override public E first() { return iterator().next(); } @Override public E last() { SortedSet<E> sortedUnfiltered = (SortedSet<E>) unfiltered; while (true) { E element = sortedUnfiltered.last(); if (predicate.apply(element)) { return element; } sortedUnfiltered = sortedUnfiltered.headSet(element); } } } /** * Returns the elements of a {@code NavigableSet}, {@code unfiltered}, that * satisfy a predicate. The returned set is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting set's iterator does not support {@code remove()}, but all * other set methods are supported. When given an element that doesn't satisfy * the predicate, the set's {@code add()} and {@code addAll()} methods throw * an {@link IllegalArgumentException}. When methods such as * {@code removeAll()} and {@code clear()} are called on the filtered set, * only elements that satisfy the filter will be removed from the underlying * set. * * <p>The returned set isn't threadsafe or serializable, even if * {@code unfiltered} is. * * <p>Many of the filtered set's methods, such as {@code size()}, iterate across * every element in the underlying set and determine which elements satisfy * the filter. When a live view is <i>not</i> needed, it may be faster to copy * {@code Iterables.filter(unfiltered, predicate)} and use the copy. * * <p><b>Warning:</b> {@code predicate} must be <i>consistent with equals</i>, * as documented at {@link Predicate#apply}. Do not provide a predicate such as * {@code Predicates.instanceOf(ArrayList.class)}, which is inconsistent with * equals. (See {@link Iterables#filter(Iterable, Class)} for related * functionality.) * * @since 14.0 */ @GwtIncompatible("NavigableSet") @SuppressWarnings("unchecked") public static <E> NavigableSet<E> filter( NavigableSet<E> unfiltered, Predicate<? super E> predicate) { if (unfiltered instanceof FilteredSet) { // Support clear(), removeAll(), and retainAll() when filtering a filtered // collection. FilteredSet<E> filtered = (FilteredSet<E>) unfiltered; Predicate<E> combinedPredicate = Predicates.<E>and(filtered.predicate, predicate); return new FilteredNavigableSet<E>( (NavigableSet<E>) filtered.unfiltered, combinedPredicate); } return new FilteredNavigableSet<E>( checkNotNull(unfiltered), checkNotNull(predicate)); } @GwtIncompatible("NavigableSet") private static class FilteredNavigableSet<E> extends FilteredSortedSet<E> implements NavigableSet<E> { FilteredNavigableSet(NavigableSet<E> unfiltered, Predicate<? super E> predicate) { super(unfiltered, predicate); } NavigableSet<E> unfiltered() { return (NavigableSet<E>) unfiltered; } @Override @Nullable public E lower(E e) { return Iterators.getNext(headSet(e, false).descendingIterator(), null); } @Override @Nullable public E floor(E e) { return Iterators.getNext(headSet(e, true).descendingIterator(), null); } @Override public E ceiling(E e) { return Iterables.getFirst(tailSet(e, true), null); } @Override public E higher(E e) { return Iterables.getFirst(tailSet(e, false), null); } @Override public E pollFirst() { return Iterables.removeFirstMatching(unfiltered(), predicate); } @Override public E pollLast() { return Iterables.removeFirstMatching(unfiltered().descendingSet(), predicate); } @Override public NavigableSet<E> descendingSet() { return Sets.filter(unfiltered().descendingSet(), predicate); } @Override public Iterator<E> descendingIterator() { return Iterators.filter(unfiltered().descendingIterator(), predicate); } @Override public E last() { return descendingIterator().next(); } @Override public NavigableSet<E> subSet( E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return filter( unfiltered().subSet(fromElement, fromInclusive, toElement, toInclusive), predicate); } @Override public NavigableSet<E> headSet(E toElement, boolean inclusive) { return filter(unfiltered().headSet(toElement, inclusive), predicate); } @Override public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return filter(unfiltered().tailSet(fromElement, inclusive), predicate); } } /** * Returns every possible list that can be formed by choosing one element * from each of the given sets in order; the "n-ary * <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian * product</a>" of the sets. For example: <pre> {@code * * Sets.cartesianProduct(ImmutableList.of( * ImmutableSet.of(1, 2), * ImmutableSet.of("A", "B", "C")))}</pre> * * <p>returns a set containing six lists: * * <ul> * <li>{@code ImmutableList.of(1, "A")} * <li>{@code ImmutableList.of(1, "B")} * <li>{@code ImmutableList.of(1, "C")} * <li>{@code ImmutableList.of(2, "A")} * <li>{@code ImmutableList.of(2, "B")} * <li>{@code ImmutableList.of(2, "C")} * </ul> * * <p>The result is guaranteed to be in the "traditional", lexicographical * order for Cartesian products that you would get from nesting for loops: * <pre> {@code * * for (B b0 : sets.get(0)) { * for (B b1 : sets.get(1)) { * ... * ImmutableList<B> tuple = ImmutableList.of(b0, b1, ...); * // operate on tuple * } * }}</pre> * * <p>Note that if any input set is empty, the Cartesian product will also be * empty. If no sets at all are provided (an empty list), the resulting * Cartesian product has one element, an empty list (counter-intuitive, but * mathematically consistent). * * <p><i>Performance notes:</i> while the cartesian product of sets of size * {@code m, n, p} is a set of size {@code m x n x p}, its actual memory * consumption is much smaller. When the cartesian set is constructed, the * input sets are merely copied. Only as the resulting set is iterated are the * individual lists created, and these are not retained after iteration. * * @param sets the sets to choose elements from, in the order that * the elements chosen from those sets should appear in the resulting * lists * @param <B> any common base class shared by all axes (often just {@link * Object}) * @return the Cartesian product, as an immutable set containing immutable * lists * @throws NullPointerException if {@code sets}, any one of the {@code sets}, * or any element of a provided set is null * @since 2.0 */ public static <B> Set<List<B>> cartesianProduct( List<? extends Set<? extends B>> sets) { return CartesianSet.create(sets); } /** * Returns every possible list that can be formed by choosing one element * from each of the given sets in order; the "n-ary * <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian * product</a>" of the sets. For example: <pre> {@code * * Sets.cartesianProduct( * ImmutableSet.of(1, 2), * ImmutableSet.of("A", "B", "C"))}</pre> * * <p>returns a set containing six lists: * * <ul> * <li>{@code ImmutableList.of(1, "A")} * <li>{@code ImmutableList.of(1, "B")} * <li>{@code ImmutableList.of(1, "C")} * <li>{@code ImmutableList.of(2, "A")} * <li>{@code ImmutableList.of(2, "B")} * <li>{@code ImmutableList.of(2, "C")} * </ul> * * <p>The result is guaranteed to be in the "traditional", lexicographical * order for Cartesian products that you would get from nesting for loops: * <pre> {@code * * for (B b0 : sets.get(0)) { * for (B b1 : sets.get(1)) { * ... * ImmutableList<B> tuple = ImmutableList.of(b0, b1, ...); * // operate on tuple * } * }}</pre> * * <p>Note that if any input set is empty, the Cartesian product will also be * empty. If no sets at all are provided (an empty list), the resulting * Cartesian product has one element, an empty list (counter-intuitive, but * mathematically consistent). * * <p><i>Performance notes:</i> while the cartesian product of sets of size * {@code m, n, p} is a set of size {@code m x n x p}, its actual memory * consumption is much smaller. When the cartesian set is constructed, the * input sets are merely copied. Only as the resulting set is iterated are the * individual lists created, and these are not retained after iteration. * * @param sets the sets to choose elements from, in the order that * the elements chosen from those sets should appear in the resulting * lists * @param <B> any common base class shared by all axes (often just {@link * Object}) * @return the Cartesian product, as an immutable set containing immutable * lists * @throws NullPointerException if {@code sets}, any one of the {@code sets}, * or any element of a provided set is null * @since 2.0 */ public static <B> Set<List<B>> cartesianProduct( Set<? extends B>... sets) { return cartesianProduct(Arrays.asList(sets)); } private static final class CartesianSet<E> extends ForwardingCollection<List<E>> implements Set<List<E>> { private transient final ImmutableList<ImmutableSet<E>> axes; private transient final CartesianList<E> delegate; static <E> Set<List<E>> create(List<? extends Set<? extends E>> sets) { ImmutableList.Builder<ImmutableSet<E>> axesBuilder = new ImmutableList.Builder<ImmutableSet<E>>(sets.size()); for (Set<? extends E> set : sets) { ImmutableSet<E> copy = ImmutableSet.copyOf(set); if (copy.isEmpty()) { return ImmutableSet.of(); } axesBuilder.add(copy); } final ImmutableList<ImmutableSet<E>> axes = axesBuilder.build(); ImmutableList<List<E>> listAxes = new ImmutableList<List<E>>() { @Override public int size() { return axes.size(); } @Override public List<E> get(int index) { return axes.get(index).asList(); } @Override boolean isPartialView() { return true; } }; return new CartesianSet<E>(axes, new CartesianList<E>(listAxes)); } private CartesianSet( ImmutableList<ImmutableSet<E>> axes, CartesianList<E> delegate) { this.axes = axes; this.delegate = delegate; } @Override protected Collection<List<E>> delegate() { return delegate; } @Override public boolean equals(@Nullable Object object) { // Warning: this is broken if size() == 0, so it is critical that we // substitute an empty ImmutableSet to the user in place of this if (object instanceof CartesianSet) { CartesianSet<?> that = (CartesianSet<?>) object; return this.axes.equals(that.axes); } return super.equals(object); } @Override public int hashCode() { // Warning: this is broken if size() == 0, so it is critical that we // substitute an empty ImmutableSet to the user in place of this // It's a weird formula, but tests prove it works. int adjust = size() - 1; for (int i = 0; i < axes.size(); i++) { adjust *= 31; adjust = ~~adjust; // in GWT, we have to deal with integer overflow carefully } int hash = 1; for (Set<E> axis : axes) { hash = 31 * hash + (size() / axis.size() * axis.hashCode()); hash = ~~hash; } hash += adjust; return ~~hash; } } /** * Returns the set of all possible subsets of {@code set}. For example, * {@code powerSet(ImmutableSet.of(1, 2))} returns the set {@code {{}, * {1}, {2}, {1, 2}}}. * * <p>Elements appear in these subsets in the same iteration order as they * appeared in the input set. The order in which these subsets appear in the * outer set is undefined. Note that the power set of the empty set is not the * empty set, but a one-element set containing the empty set. * * <p>The returned set and its constituent sets use {@code equals} to decide * whether two elements are identical, even if the input set uses a different * concept of equivalence. * * <p><i>Performance notes:</i> while the power set of a set with size {@code * n} is of size {@code 2^n}, its memory usage is only {@code O(n)}. When the * power set is constructed, the input set is merely copied. Only as the * power set is iterated are the individual subsets created, and these subsets * themselves occupy only a small constant amount of memory. * * @param set the set of elements to construct a power set from * @return the power set, as an immutable set of immutable sets * @throws IllegalArgumentException if {@code set} has more than 30 unique * elements (causing the power set size to exceed the {@code int} range) * @throws NullPointerException if {@code set} is or contains {@code null} * @see <a href="http://en.wikipedia.org/wiki/Power_set">Power set article at * Wikipedia</a> * @since 4.0 */ @GwtCompatible(serializable = false) public static <E> Set<Set<E>> powerSet(Set<E> set) { return new PowerSet<E>(set); } private static final class SubSet<E> extends AbstractSet<E> { private final ImmutableMap<E, Integer> inputSet; private final int mask; SubSet(ImmutableMap<E, Integer> inputSet, int mask) { this.inputSet = inputSet; this.mask = mask; } @Override public Iterator<E> iterator() { return new UnmodifiableIterator<E>() { final ImmutableList<E> elements = inputSet.keySet().asList(); int remainingSetBits = mask; @Override public boolean hasNext() { return remainingSetBits != 0; } @Override public E next() { int index = Integer.numberOfTrailingZeros(remainingSetBits); if (index == 32) { throw new NoSuchElementException(); } remainingSetBits &= ~(1 << index); return elements.get(index); } }; } @Override public int size() { return Integer.bitCount(mask); } @Override public boolean contains(@Nullable Object o) { Integer index = inputSet.get(o); return index != null && (mask & (1 << index)) != 0; } } private static final class PowerSet<E> extends AbstractSet<Set<E>> { final ImmutableMap<E, Integer> inputSet; PowerSet(Set<E> input) { ImmutableMap.Builder<E, Integer> builder = ImmutableMap.builder(); int i = 0; for (E e : checkNotNull(input)) { builder.put(e, i++); } this.inputSet = builder.build(); checkArgument(inputSet.size() <= 30, "Too many elements to create power set: %s > 30", inputSet.size()); } @Override public int size() { return 1 << inputSet.size(); } @Override public boolean isEmpty() { return false; } @Override public Iterator<Set<E>> iterator() { return new AbstractIndexedListIterator<Set<E>>(size()) { @Override protected Set<E> get(final int setBits) { return new SubSet<E>(inputSet, setBits); } }; } @Override public boolean contains(@Nullable Object obj) { if (obj instanceof Set) { Set<?> set = (Set<?>) obj; return inputSet.keySet().containsAll(set); } return false; } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof PowerSet) { PowerSet<?> that = (PowerSet<?>) obj; return inputSet.equals(that.inputSet); } return super.equals(obj); } @Override public int hashCode() { /* * The sum of the sums of the hash codes in each subset is just the sum of * each input element's hash code times the number of sets that element * appears in. Each element appears in exactly half of the 2^n sets, so: */ return inputSet.keySet().hashCode() << (inputSet.size() - 1); } @Override public String toString() { return "powerSet(" + inputSet + ")"; } } /** * An implementation for {@link Set#hashCode()}. */ static int hashCodeImpl(Set<?> s) { int hashCode = 0; for (Object o : s) { hashCode += o != null ? o.hashCode() : 0; hashCode = ~~hashCode; // Needed to deal with unusual integer overflow in GWT. } return hashCode; } /** * An implementation for {@link Set#equals(Object)}. */ static boolean equalsImpl(Set<?> s, @Nullable Object object) { if (s == object) { return true; } if (object instanceof Set) { Set<?> o = (Set<?>) object; try { return s.size() == o.size() && s.containsAll(o); } catch (NullPointerException ignored) { return false; } catch (ClassCastException ignored) { return false; } } return false; } /** * Returns an unmodifiable view of the specified navigable set. This method * allows modules to provide users with "read-only" access to internal * navigable sets. Query operations on the returned set "read through" to the * specified set, and attempts to modify the returned set, whether direct or * via its collection views, result in an * {@code UnsupportedOperationException}. * * <p>The returned navigable set will be serializable if the specified * navigable set is serializable. * * @param set the navigable set for which an unmodifiable view is to be * returned * @return an unmodifiable view of the specified navigable set * @since 12.0 */ @GwtIncompatible("NavigableSet") public static <E> NavigableSet<E> unmodifiableNavigableSet( NavigableSet<E> set) { if (set instanceof ImmutableSortedSet || set instanceof UnmodifiableNavigableSet) { return set; } return new UnmodifiableNavigableSet<E>(set); } @GwtIncompatible("NavigableSet") static final class UnmodifiableNavigableSet<E> extends ForwardingSortedSet<E> implements NavigableSet<E>, Serializable { private final NavigableSet<E> delegate; UnmodifiableNavigableSet(NavigableSet<E> delegate) { this.delegate = checkNotNull(delegate); } @Override protected SortedSet<E> delegate() { return Collections.unmodifiableSortedSet(delegate); } @Override public E lower(E e) { return delegate.lower(e); } @Override public E floor(E e) { return delegate.floor(e); } @Override public E ceiling(E e) { return delegate.ceiling(e); } @Override public E higher(E e) { return delegate.higher(e); } @Override public E pollFirst() { throw new UnsupportedOperationException(); } @Override public E pollLast() { throw new UnsupportedOperationException(); } private transient UnmodifiableNavigableSet<E> descendingSet; @Override public NavigableSet<E> descendingSet() { UnmodifiableNavigableSet<E> result = descendingSet; if (result == null) { result = descendingSet = new UnmodifiableNavigableSet<E>( delegate.descendingSet()); result.descendingSet = this; } return result; } @Override public Iterator<E> descendingIterator() { return Iterators.unmodifiableIterator(delegate.descendingIterator()); } @Override public NavigableSet<E> subSet( E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return unmodifiableNavigableSet(delegate.subSet( fromElement, fromInclusive, toElement, toInclusive)); } @Override public NavigableSet<E> headSet(E toElement, boolean inclusive) { return unmodifiableNavigableSet(delegate.headSet(toElement, inclusive)); } @Override public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return unmodifiableNavigableSet( delegate.tailSet(fromElement, inclusive)); } private static final long serialVersionUID = 0; } /** * Returns a synchronized (thread-safe) navigable set backed by the specified * navigable set. In order to guarantee serial access, it is critical that * <b>all</b> access to the backing navigable set is accomplished * through the returned navigable set (or its views). * * <p>It is imperative that the user manually synchronize on the returned * sorted set when iterating over it or any of its {@code descendingSet}, * {@code subSet}, {@code headSet}, or {@code tailSet} views. <pre> {@code * * NavigableSet<E> set = synchronizedNavigableSet(new TreeSet<E>()); * ... * synchronized (set) { * // Must be in the synchronized block * Iterator<E> it = set.iterator(); * while (it.hasNext()) { * foo(it.next()); * } * }}</pre> * * <p>or: <pre> {@code * * NavigableSet<E> set = synchronizedNavigableSet(new TreeSet<E>()); * NavigableSet<E> set2 = set.descendingSet().headSet(foo); * ... * synchronized (set) { // Note: set, not set2!!! * // Must be in the synchronized block * Iterator<E> it = set2.descendingIterator(); * while (it.hasNext()) * foo(it.next()); * } * }}</pre> * * <p>Failure to follow this advice may result in non-deterministic behavior. * * <p>The returned navigable set will be serializable if the specified * navigable set is serializable. * * @param navigableSet the navigable set to be "wrapped" in a synchronized * navigable set. * @return a synchronized view of the specified navigable set. * @since 13.0 */ @GwtIncompatible("NavigableSet") public static <E> NavigableSet<E> synchronizedNavigableSet( NavigableSet<E> navigableSet) { return Synchronized.navigableSet(navigableSet); } /** * Remove each element in an iterable from a set. */ static boolean removeAllImpl(Set<?> set, Iterator<?> iterator) { boolean changed = false; while (iterator.hasNext()) { changed |= set.remove(iterator.next()); } return changed; } static boolean removeAllImpl(Set<?> set, Collection<?> collection) { checkNotNull(collection); // for GWT if (collection instanceof Multiset) { collection = ((Multiset<?>) collection).elementSet(); } /* * AbstractSet.removeAll(List) has quadratic behavior if the list size * is just less than the set's size. We augment the test by * assuming that sets have fast contains() performance, and other * collections don't. See * http://code.google.com/p/guava-libraries/issues/detail?id=1013 */ if (collection instanceof Set && collection.size() > set.size()) { return Iterators.removeAll(set.iterator(), collection); } else { return removeAllImpl(set, collection.iterator()); } } @GwtIncompatible("NavigableSet") static class DescendingSet<E> extends ForwardingNavigableSet<E> { private final NavigableSet<E> forward; DescendingSet(NavigableSet<E> forward) { this.forward = forward; } @Override protected NavigableSet<E> delegate() { return forward; } @Override public E lower(E e) { return forward.higher(e); } @Override public E floor(E e) { return forward.ceiling(e); } @Override public E ceiling(E e) { return forward.floor(e); } @Override public E higher(E e) { return forward.lower(e); } @Override public E pollFirst() { return forward.pollLast(); } @Override public E pollLast() { return forward.pollFirst(); } @Override public NavigableSet<E> descendingSet() { return forward; } @Override public Iterator<E> descendingIterator() { return forward.iterator(); } @Override public NavigableSet<E> subSet( E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return forward.subSet(toElement, toInclusive, fromElement, fromInclusive).descendingSet(); } @Override public NavigableSet<E> headSet(E toElement, boolean inclusive) { return forward.tailSet(toElement, inclusive).descendingSet(); } @Override public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return forward.headSet(fromElement, inclusive).descendingSet(); } @SuppressWarnings("unchecked") @Override public Comparator<? super E> comparator() { Comparator<? super E> forwardComparator = forward.comparator(); if (forwardComparator == null) { return (Comparator) Ordering.natural().reverse(); } else { return reverse(forwardComparator); } } // If we inline this, we get a javac error. private static <T> Ordering<T> reverse(Comparator<T> forward) { return Ordering.from(forward).reverse(); } @Override public E first() { return forward.last(); } @Override public SortedSet<E> headSet(E toElement) { return standardHeadSet(toElement); } @Override public E last() { return forward.first(); } @Override public SortedSet<E> subSet(E fromElement, E toElement) { return standardSubSet(fromElement, toElement); } @Override public SortedSet<E> tailSet(E fromElement) { return standardTailSet(fromElement); } @Override public Iterator<E> iterator() { return forward.descendingIterator(); } @Override public Object[] toArray() { return standardToArray(); } @Override public <T> T[] toArray(T[] array) { return standardToArray(array); } @Override public String toString() { return standardToString(); } } }
apache-2.0
joel-costigliola/assertj-core
src/test/java/org/assertj/core/internal/ThrowablesBaseTest.java
1565
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2020 the original author or authors. */ package org.assertj.core.internal; import static org.mockito.Mockito.spy; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; /** * * Base class for {@link Throwables} tests. * <p> * Is in <code>org.assertj.core.internal</code> package to be able to set {@link Throwables#failures} appropriately. * * @author Joel Costigliola */ public class ThrowablesBaseTest { protected Failures failures; protected Throwables throwables; protected static Throwable actual; @BeforeAll public static void setUpOnce() { actual = new NullPointerException("Throwable message"); } @BeforeEach public void setUp() { failures = spy(new Failures()); throwables = new Throwables(); throwables.failures = failures; Objects.instance().failures = failures; } @AfterEach public void tearDown() { Objects.instance().failures = Failures.instance(); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-customerprofiles/src/main/java/com/amazonaws/services/customerprofiles/model/ListTagsForResourceResult.java
4866
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.customerprofiles.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/customer-profiles-2020-08-15/ListTagsForResource" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListTagsForResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The tags used to organize, track, or control access for this resource. * </p> */ private java.util.Map<String, String> tags; /** * <p> * The tags used to organize, track, or control access for this resource. * </p> * * @return The tags used to organize, track, or control access for this resource. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * The tags used to organize, track, or control access for this resource. * </p> * * @param tags * The tags used to organize, track, or control access for this resource. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * The tags used to organize, track, or control access for this resource. * </p> * * @param tags * The tags used to organize, track, or control access for this resource. * @return Returns a reference to this object so that method calls can be chained together. */ public ListTagsForResourceResult withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see ListTagsForResourceResult#withTags * @returns a reference to this object so that method calls can be chained together. */ public ListTagsForResourceResult addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public ListTagsForResourceResult clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListTagsForResourceResult == false) return false; ListTagsForResourceResult other = (ListTagsForResourceResult) obj; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public ListTagsForResourceResult clone() { try { return (ListTagsForResourceResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
matthieuvernier/Kelluwen-NLP
Kelluwen-NLP/src/main/java/cl/uach/kelluwen/nlp/engines/geolocation/GeonamesCluesSpotter.java
8485
package cl.uach.kelluwen.nlp.engines.geolocation; import org.apache.uima.UimaContext; import org.apache.uima.analysis_component.JCasAnnotator_ImplBase; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.cas.FSIterator; import org.apache.uima.cas.text.AnnotationIndex; import org.apache.uima.jcas.JCas; import org.apache.uima.resource.ResourceInitializationException; import org.bson.Document; import com.mongodb.MongoClient; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import static com.mongodb.client.model.Filters.eq; import java.util.ArrayList; import cl.uach.kelluwen.nlp.types.Token; import cl.uach.kelluwen.nlp.types.geonames.GeoClue; import cl.uach.kelluwen.nlp.types.location.City; public class GeonamesCluesSpotter extends JCasAnnotator_ImplBase { private MongoClient mongoClient; private MongoDatabase database; private MongoCollection<Document> collectionA_Country; private MongoCollection<Document> collectionH_Stream; private MongoCollection<Document> collectionL_Park; private MongoCollection<Document> collectionP_City; private MongoCollection<Document> collectionR_Road; private MongoCollection<Document> collectionS_Spot; private MongoCollection<Document> collectionT_Mountain; private MongoCollection<Document> collectionU_Undersea; private MongoCollection<Document> collectionV_Forest; private int cpt_query=0; @Override public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); mongoClient = new MongoClient("localhost" , 27017); database = mongoClient.getDatabase("geonames"); collectionA_Country= database.getCollection("A_Country"); collectionH_Stream= database.getCollection("H_Stream"); collectionL_Park= database.getCollection("L_Park"); collectionP_City= database.getCollection("P_City"); collectionR_Road= database.getCollection("R_Road"); collectionS_Spot= database.getCollection("S_Spot"); collectionT_Mountain= database.getCollection("T_Mountain"); collectionU_Undersea= database.getCollection("U_Undersea"); collectionV_Forest= database.getCollection("V_Forest"); } @Override public void process(JCas jcas) throws AnalysisEngineProcessException { //ArrayList to store all tokens and manipulate them more easily for this task (especially to manage various window sizes) ArrayList<Token> tokenList = new ArrayList<Token>(); // Browse all the tokens AnnotationIndex<Token> idxToken = jcas.getAnnotationIndex(Token.type); FSIterator<Token> itToken = idxToken.iterator(); while (itToken.hasNext()) { tokenList.add( (Token) itToken.next()); } /*Iterate tokens with a windows size=5*/ if (tokenList.size()>=5){ int window5Begin=0; int window5End=4; while (window5End<tokenList.size()){ String window5Value = ""; for (int i=0;i<5;i++){ if (i==0){window5Value = tokenList.get(window5Begin+i).getCoveredText();} else {window5Value = window5Value + " " + tokenList.get(window5Begin+i).getCoveredText();} } /*Test if this window5Value is in Geonames*/ annotateWithGeoname(window5Value,tokenList.get(window5Begin).getBegin(),tokenList.get(window5End).getEnd(),jcas); /*Move the window*/ window5Begin=window5Begin+1; window5End=window5End+1; } } /*Iterate tokens with a windows size4*/ if (tokenList.size()>=4){ int window4Begin=0; int window4End=3; while (window4End<tokenList.size()){ String window4Value = ""; for (int i=0;i<4;i++){ if (i==0){window4Value = tokenList.get(window4Begin+i).getCoveredText();} else {window4Value = window4Value + " " + tokenList.get(window4Begin+i).getCoveredText();} } /*Test if this window4Value is in Geonames*/ annotateWithGeoname(window4Value,tokenList.get(window4Begin).getBegin(),tokenList.get(window4End).getEnd(),jcas); /*Move the window*/ window4Begin=window4Begin+1; window4End=window4End+1; } } /*Iterate tokens with a windows size 3*/ if (tokenList.size()>=3){ int window3Begin=0; int window3End=2; while (window3End<tokenList.size()){ String window3Value = ""; for (int i=0;i<3;i++){ if (i==0){window3Value = tokenList.get(window3Begin+i).getCoveredText();} else {window3Value = window3Value + " " + tokenList.get(window3Begin+i).getCoveredText();} } /*Test if this window3Value is in Geonames*/ annotateWithGeoname(window3Value,tokenList.get(window3Begin).getBegin(),tokenList.get(window3End).getEnd(),jcas); /*Move the window*/ window3Begin=window3Begin+1; window3End=window3End+1; } } /*Iterate tokens with a windows size 2*/ if (tokenList.size()>=2){ int window2Begin=0; int window2End=1; while (window2End<tokenList.size()){ String window2Value = ""; for (int i=0;i<2;i++){ if (i==0){window2Value = tokenList.get(window2Begin+i).getCoveredText();} else {window2Value = window2Value + " " + tokenList.get(window2Begin+i).getCoveredText();} } /*Test if this window2Value is in Geonames*/ annotateWithGeoname(window2Value,tokenList.get(window2Begin).getBegin(),tokenList.get(window2End).getEnd(),jcas); /*Move the window*/ window2Begin=window2Begin+1; window2End=window2End+1; } } /*Iterate tokens*/ if (tokenList.size()>=1){ int token=0; while (token<tokenList.size()){ String tokenValue = tokenList.get(token).getCoveredText(); /*Test if this token is in Geonames*/ annotateWithGeoname(tokenValue,tokenList.get(token).getBegin(),tokenList.get(token).getEnd(),jcas); token=token+1; } } } private void annotateWithGeoname(String query, int begin, int end, JCas jcas){ //System.out.println("query 1"); MongoCursor<Document> cursor = collectionA_Country.find(eq("spanishname", query)).iterator(); cpt_query=cpt_query+1; try { while (cursor.hasNext()){ Document doc = cursor.next(); if (doc!=null){ //ADD ANNOTATION GeoClue geoClueAnnotation = new GeoClue(jcas); geoClueAnnotation.setBegin(begin); geoClueAnnotation.setEnd(end); geoClueAnnotation.addToIndexes(); //geoClueAnnotation.setGeonameid(doc.getInteger("geonameid")); geoClueAnnotation.setName(doc.get("name").toString()); geoClueAnnotation.setAsciiname(doc.get("asciiname").toString()); geoClueAnnotation.setSpanishname(doc.get("spanishname").toString()); geoClueAnnotation.setLatitude(doc.get("latitude").toString()); geoClueAnnotation.setLongitude(doc.get("longitude").toString()); geoClueAnnotation.setFeature_class(doc.get("feature_class").toString()); geoClueAnnotation.setFeature_code(doc.get("feature_code").toString()); geoClueAnnotation.setCountry_code(doc.get("country_code").toString()); geoClueAnnotation.setCountry_name(doc.get("country_name").toString()); //System.out.println("city: "+query); } } } finally {cursor.close();} //System.out.println("query 2"); MongoCursor<Document> cursor2 = collectionP_City.find(eq("spanishname", query)).iterator(); cpt_query=cpt_query+1; try { while (cursor2.hasNext()){ Document doc = cursor2.next(); if (doc!=null){ //ADD ANNOTATION GeoClue geoClueAnnotation = new GeoClue(jcas); geoClueAnnotation.setBegin(begin); geoClueAnnotation.setEnd(end); geoClueAnnotation.addToIndexes(); //geoClueAnnotation.setGeonameid(doc.getLong("geonameid")); geoClueAnnotation.setName(doc.get("name").toString()); geoClueAnnotation.setAsciiname(doc.get("asciiname").toString()); geoClueAnnotation.setSpanishname(doc.get("spanishname").toString()); geoClueAnnotation.setLatitude(doc.get("latitude").toString()); geoClueAnnotation.setLongitude(doc.get("longitude").toString()); geoClueAnnotation.setFeature_class(doc.get("feature_class").toString()); geoClueAnnotation.setFeature_code(doc.get("feature_code").toString()); geoClueAnnotation.setCountry_code(doc.get("country_code").toString()); geoClueAnnotation.setCountry_name(doc.get("country_name").toString()); // System.out.println("city: "+query); } } } finally {cursor2.close();} } @Override public void collectionProcessComplete() throws AnalysisEngineProcessException { mongoClient.close(); System.out.println("nb mongo queries by geonamesCluesSpotter :"+cpt_query); } }
apache-2.0
kubernetes-client/java
fluent-gen/src/main/java/io/kubernetes/client/fluent/Config.java
871
/* Copyright 2020 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.fluent; import io.sundr.builder.annotations.ExternalBuildables; @ExternalBuildables( editableEnabled = false, generateBuilderPackage = true, builderPackage = "io.kubernetes.client.fluent", value = {"io.kubernetes.client.openapi.models"}) public class Config {}
apache-2.0
ChinaQuants/OG-Platform
projects/OG-Financial/src/test/java/com/opengamma/financial/analytics/fudgemsg/ModelVolatilitySurfaceTest.java
6490
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.fudgemsg; import static org.testng.AssertJUnit.assertEquals; import static org.testng.internal.junit.ArrayAsserts.assertArrayEquals; import java.util.HashMap; import java.util.Map; import org.testng.annotations.Test; import com.opengamma.analytics.financial.model.interestrate.curve.ForwardCurve; import com.opengamma.analytics.financial.model.volatility.smile.fitting.sabr.StandardSmileSurfaceDataBundle; import com.opengamma.analytics.financial.model.volatility.surface.BlackVolatilitySurfaceMoneyness; import com.opengamma.analytics.financial.model.volatility.surface.BlackVolatilitySurfaceMoneynessFcnBackedByGrid; import com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurface; import com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurfaceInterpolator; import com.opengamma.analytics.math.interpolation.GridInterpolator2D; import com.opengamma.analytics.math.interpolation.Interpolator1D; import com.opengamma.analytics.math.interpolation.Interpolator2D; import com.opengamma.analytics.math.interpolation.LinearInterpolator1D; import com.opengamma.analytics.math.surface.ConstantDoublesSurface; import com.opengamma.analytics.math.surface.InterpolatedDoublesSurface; import com.opengamma.util.test.TestGroup; import com.opengamma.util.tuple.DoublesPair; /** * Test ConstantVolatilitySurface/InterpolatedVolatilitySurface. */ @Test(groups = TestGroup.UNIT) public class ModelVolatilitySurfaceTest extends AnalyticsTestBase { @Test public void testConstantVolatilitySurface() { final VolatilitySurface vs1 = new VolatilitySurface(ConstantDoublesSurface.from(0.2)); final VolatilitySurface vs2 = cycleObject(VolatilitySurface.class, vs1); assertEquals(vs1, vs2); } // Disabled because new implementation of InterpolatedDoublesSurface may have // different actual arrays but have same semantic meaning @Test(enabled = false) public void testInterpolatedVolatilitySurface() { final double sigma = 0.4; final Interpolator1D linear = new LinearInterpolator1D(); final Interpolator2D interpolator = new GridInterpolator2D(linear, linear); final Map<DoublesPair, Double> data = new HashMap<>(); data.put(DoublesPair.of(0., 1.), sigma); data.put(DoublesPair.of(1., 0.), sigma); data.put(DoublesPair.of(0., 0.), sigma); data.put(DoublesPair.of(1., 1.), sigma); final VolatilitySurface vs1 = new VolatilitySurface(InterpolatedDoublesSurface.from(data, interpolator)); final VolatilitySurface vs2 = cycleObject(VolatilitySurface.class, vs1); assertEquals(vs1, vs2); } @Test public void testMoneynessSurface() { final ConstantDoublesSurface surface = ConstantDoublesSurface.from(0.5); final ForwardCurve curve = new ForwardCurve(1); final BlackVolatilitySurfaceMoneyness moneyness1 = new BlackVolatilitySurfaceMoneyness(surface, curve); BlackVolatilitySurfaceMoneyness moneyness2 = cycleObject(BlackVolatilitySurfaceMoneyness.class, moneyness1); assertEquals(moneyness1, moneyness2); moneyness2 = cycleObject(BlackVolatilitySurfaceMoneyness.class, new BlackVolatilitySurfaceMoneyness(moneyness1)); assertEquals(moneyness1, moneyness2); } @Test public void testMoneynessSurfaceBackedByGrid() { final ConstantDoublesSurface surface = ConstantDoublesSurface.from(0.5); final ForwardCurve curve = new ForwardCurve(1); final StandardSmileSurfaceDataBundle gridData = new StandardSmileSurfaceDataBundle(100.0, new double[] {101,102,103}, new double[] {1,2,3}, new double[][] {{80,80},{100,100},{120,120}}, new double[][] {{.3,.25},{.2,.2},{.3,.25}}, new LinearInterpolator1D() ); final VolatilitySurfaceInterpolator interpolator = new VolatilitySurfaceInterpolator(); final BlackVolatilitySurfaceMoneynessFcnBackedByGrid moneyness1 = new BlackVolatilitySurfaceMoneynessFcnBackedByGrid(surface, curve, gridData, interpolator); BlackVolatilitySurfaceMoneynessFcnBackedByGrid moneyness2 = cycleObject(BlackVolatilitySurfaceMoneynessFcnBackedByGrid.class, moneyness1); assertArrayEquals(moneyness1.getGridData().getExpiries(), moneyness2.getGridData().getExpiries(), 0); assertArrayEquals(moneyness1.getGridData().getForwards(), moneyness2.getGridData().getForwards(), 0); assert2DArrayEquals(moneyness1.getGridData().getStrikes(), moneyness2.getGridData().getStrikes(), 0); assert2DArrayEquals(moneyness1.getGridData().getVolatilities(), moneyness2.getGridData().getVolatilities(), 0); assertCurveEquals(moneyness1.getGridData().getForwardCurve(), moneyness2.getGridData().getForwardCurve()); assertCurveEquals(moneyness1.getForwardCurve(), moneyness2.getForwardCurve()); assertEquals(moneyness1.getInterpolator(), moneyness2.getInterpolator()); assertEquals(moneyness1.getSurface(), moneyness2.getSurface()); moneyness2 = cycleObject(BlackVolatilitySurfaceMoneynessFcnBackedByGrid.class, new BlackVolatilitySurfaceMoneynessFcnBackedByGrid(moneyness1)); assertArrayEquals(moneyness1.getGridData().getExpiries(), moneyness2.getGridData().getExpiries(), 0); assertArrayEquals(moneyness1.getGridData().getForwards(), moneyness2.getGridData().getForwards(), 0); assert2DArrayEquals(moneyness1.getGridData().getStrikes(), moneyness2.getGridData().getStrikes(), 0); assert2DArrayEquals(moneyness1.getGridData().getVolatilities(), moneyness2.getGridData().getVolatilities(), 0); assertCurveEquals(moneyness1.getGridData().getForwardCurve(), moneyness2.getGridData().getForwardCurve()); assertCurveEquals(moneyness1.getForwardCurve(), moneyness2.getForwardCurve()); assertEquals(moneyness1.getInterpolator(), moneyness2.getInterpolator()); assertEquals(moneyness1.getSurface(), moneyness2.getSurface()); } private void assert2DArrayEquals(final double[][] a1, final double[][] a2, final double eps) { assertEquals(a1.length, a2.length); for (int i = 0; i < a1.length; i++) { assertArrayEquals(a1[i], a2[i], eps); } } private void assertCurveEquals(final ForwardCurve c1, final ForwardCurve c2) { assertEquals(c1.getSpot(), c2.getSpot()); if (c1 != c2) { for (double x = 0.1; x < 3.0; x += 0.02) { assertEquals(c1.getForward(x), c2.getForward(x)); assertEquals(c1.getDrift(x), c2.getDrift(x)); } } } }
apache-2.0
sarah-happy/happy-archive
archive/src/main/java/org/yi/happy/archive/IndexSearchNextMain.java
1667
package org.yi.happy.archive; import java.io.PrintStream; import java.util.List; import java.util.Set; import org.yi.happy.archive.commandLine.UsesArgs; import org.yi.happy.archive.commandLine.UsesIndexStore; import org.yi.happy.archive.commandLine.UsesInput; import org.yi.happy.archive.commandLine.UsesOutput; import org.yi.happy.archive.index.IndexEntry; import org.yi.happy.archive.index.IndexSearch; import org.yi.happy.archive.key.LocatorKey; import com.google.inject.Inject; @UsesIndexStore @UsesInput("key-list") @UsesOutput("result") @UsesArgs({ "volume-set", "volume-name" }) public class IndexSearchNextMain implements MainCommand { private List<String> args; private PrintStream out; private IndexSearch index; @Inject public IndexSearchNextMain(@EnvArgs List<String> args, IndexSearch index) { this.args = args; this.out = System.out; this.index = index; } @Override public void run() throws Exception { String volumeSet = args.get(0); String lastVolumeName = args.get(1); Set<LocatorKey> want = IndexSearchMain.loadKeyList(); for (String volumeName : index.listVolumeNames(volumeSet)) { if (volumeName.compareTo(lastVolumeName) <= 0) { continue; } List<IndexEntry> found = index.searchOne(volumeSet, volumeName, want); if (found.isEmpty()) { continue; } for (IndexEntry result : found) { out.println(volumeSet + "\t" + volumeName + "\t" + result.getName() + "\t" + result.getKey()); } break; } } }
apache-2.0
koenighotze/Hotel-Reservation-Tool
guest/src/test/java/org/koenighotze/jee7hotel/guest/business/GuestAtomFeedTest.java
1647
package org.koenighotze.jee7hotel.guest.business; import org.apache.abdera.Abdera; import org.apache.abdera.model.Entry; import org.apache.abdera.model.Feed; import org.apache.abdera.parser.stax.FOMFeed; import org.junit.Test; import org.junit.runner.RunWith; import org.koenighotze.jee7hotel.guest.domain.Guest; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import javax.ws.rs.core.UriInfo; import java.util.List; import static java.net.URI.create; import static java.time.LocalDateTime.now; import static java.util.Collections.singletonList; import static org.fest.assertions.Assertions.assertThat; import static org.mockito.Mockito.when; /** * @author David Schmitz */ @RunWith(MockitoJUnitRunner.class) public class GuestAtomFeedTest { @Mock private GuestService guestService; @Mock private Abdera abdera; @Mock private UriInfo uriInfo; @Test public void the_feed_exposes_the_publicid() { Guest guest = new Guest("123", "foo", "foo@bar.de"); guest.setLastUpdate(now()); when(abdera.newFeed()).thenReturn(new FOMFeed()); when(uriInfo.getBaseUri()).thenReturn(create("http://localhost/")); when(guestService.getAllGuests()).thenReturn(singletonList(guest)); GuestAtomFeed guestAtomFeed = new GuestAtomFeed(guestService, abdera); Feed guestFeed = guestAtomFeed.getGuestFeed(uriInfo); List<Entry> entries = guestFeed.getEntries(); assertThat(entries).isNotEmpty(); Entry entry = entries.get(0); assertThat(entry.getLinks().get(0).getHref().toASCIIString()).endsWith("/guests/123"); } }
apache-2.0
mariusj/org.openntf.domino
domino/junit4xpages/src/main/java/org/hamcrest/TypeSafeDiagnosingMatcher.java
2261
package org.hamcrest; import org.hamcrest.internal.ReflectiveTypeFinder; /** * Convenient base class for Matchers that require a non-null value of a specific type and that will report why the received value has been * rejected. This implements the null check, checks the type and then casts. To use, implement * * <pre> * matchesSafely() * </pre> * * . * * @author Neil Dunn * @author Nat Pryce * @author Steve Freeman */ public abstract class TypeSafeDiagnosingMatcher<T> extends BaseMatcher<T> { private static final ReflectiveTypeFinder TYPE_FINDER = new ReflectiveTypeFinder("matchesSafely", 2, 0); private final Class<?> expectedType; /** * Subclasses should implement this. The item will already have been checked for the specific type and will never be null. */ protected abstract boolean matchesSafely(T item, Description mismatchDescription); /** * Use this constructor if the subclass that implements <code>matchesSafely</code> is <em>not</em> the class that binds &lt;T&gt; to a * type. * * @param expectedType * The expectedType of the actual value. */ protected TypeSafeDiagnosingMatcher(final Class<?> expectedType) { this.expectedType = expectedType; } /** * Use this constructor if the subclass that implements <code>matchesSafely</code> is <em>not</em> the class that binds &lt;T&gt; to a * type. * * @param typeFinder * A type finder to extract the type */ protected TypeSafeDiagnosingMatcher(final ReflectiveTypeFinder typeFinder) { this.expectedType = typeFinder.findExpectedType(getClass()); } /** * The default constructor for simple sub types */ protected TypeSafeDiagnosingMatcher() { this(TYPE_FINDER); } @Override @SuppressWarnings("unchecked") public final boolean matches(final Object item) { return item != null && expectedType.isInstance(item) && matchesSafely((T) item, new Description.NullDescription()); } @SuppressWarnings("unchecked") @Override public final void describeMismatch(final Object item, final Description mismatchDescription) { if (item == null || !expectedType.isInstance(item)) { super.describeMismatch(item, mismatchDescription); } else { matchesSafely((T) item, mismatchDescription); } } }
apache-2.0
yavski/fab-speed-dial
samples/src/main/java/io/github/yavski/fabspeeddial/samples/EventsSampleActivity.java
1776
/* * Copyright 2016 Yavor Ivanov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.yavski.fabspeeddial.samples; import android.os.Bundle; import android.support.design.widget.Snackbar; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import io.github.yavski.fabmenu.samples.R; import io.github.yavski.fabspeeddial.FabSpeedDial; import io.github.yavski.fabspeeddial.SimpleMenuListenerAdapter; public class EventsSampleActivity extends BaseSampleActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_events_sample); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); FabSpeedDial fabSpeedDial = (FabSpeedDial) findViewById(R.id.fab_speed_dial); fabSpeedDial.setMenuListener(new SimpleMenuListenerAdapter() { @Override public boolean onMenuItemSelected(MenuItem menuItem) { Snackbar.make(findViewById(R.id.rootView), getString(R.string.selected_menu_item, menuItem.getTitle()), Snackbar.LENGTH_SHORT).show(); return false; } }); } }
apache-2.0
McLeodMoores/starling
projects/analytics/src/main/java/com/mcleodmoores/analytics/math/statistics/descriptive/WeightFunction.java
445
/** * Copyright (C) 2017 - present McLeod Moores Software Limited. All rights reserved. */ package com.mcleodmoores.analytics.math.statistics.descriptive; /** * An interface for functions that provide a stream of weights. This stream could be finite or * infinite. * @param <T> the type of the weight that is returned */ public interface WeightFunction<T> { /** * Gets the next weight. * @return the weight */ T get(); }
apache-2.0
chelu/jdal
vaadin/src/main/java/org/jdal/vaadin/data/BeanWrapperItem.java
2358
/* * Copyright 2009-2015 Jose Luis Martin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdal.vaadin.data; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessorFactory; import com.vaadin.data.Item; import com.vaadin.data.Property; /** * An adapter to use beans as {@link Item Items} delegating work to * Spring {@link BeanWrapper} * * @author Jose Luis Martin * @since 2.1 */ public class BeanWrapperItem implements Item { private List<String> properties = new ArrayList<String>(); private BeanWrapper beanWrapper; public BeanWrapperItem (Object bean) { this(bean, null); } public BeanWrapperItem(Object bean, List<String> properties) { this.beanWrapper = PropertyAccessorFactory.forBeanPropertyAccess(bean); if (properties == null) { for (PropertyDescriptor pd : BeanUtils.getPropertyDescriptors(bean.getClass())) this.properties.add(pd.getName()); } else { this.properties = properties; } } @Override @SuppressWarnings("rawtypes") public Property getItemProperty(Object id) { return new BeanWrapperProperty(this.beanWrapper, (String) id); } @Override public Collection<?> getItemPropertyIds() { return this.properties; } @Override @SuppressWarnings("rawtypes") public boolean addItemProperty(Object id, Property property) throws UnsupportedOperationException { this.properties.add((String) id); return true; } @Override public boolean removeItemProperty(Object id) throws UnsupportedOperationException { return this.properties.remove((String) id); } public Object getBean() { return this.beanWrapper.getWrappedInstance(); } }
apache-2.0
zhigangtan/cole
cole-web/src/main/java/cn/tanziquan/produce/cole/configure/WebConfig.java
564
package cn.tanziquan.produce.cole.configure; import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * WebConfig * * @version 0.0.1 */ @Configuration public class WebConfig { @Bean public JettyEmbeddedServletContainerFactory embeddedServletContainerFactory() { JettyEmbeddedServletContainerFactory factory = new JettyEmbeddedServletContainerFactory(9010); return factory; } }
apache-2.0
paulseawa/p4ic4idea
plugin/src/net/groboclown/idea/p4ic/server/exceptions/VcsInterruptedException.java
950
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.groboclown.idea.p4ic.server.exceptions; import com.intellij.openapi.vcs.VcsException; import org.jetbrains.annotations.NotNull; // FIXME find cases of InterruptedException and use this instead. public class VcsInterruptedException extends VcsException { public VcsInterruptedException(@NotNull InterruptedException ex) { super(ex); } }
apache-2.0
xiao-chen/hadoop
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
12234
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.container.common.impl; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import java.io.IOException; import java.nio.charset.Charset; import java.util.List; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos. ContainerType; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos .ContainerDataProto; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import java.util.Collections; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.yaml.snakeyaml.Yaml; import static org.apache.hadoop.ozone.OzoneConsts.CHECKSUM; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ID; import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_TYPE; import static org.apache.hadoop.ozone.OzoneConsts.LAYOUTVERSION; import static org.apache.hadoop.ozone.OzoneConsts.MAX_SIZE; import static org.apache.hadoop.ozone.OzoneConsts.METADATA; import static org.apache.hadoop.ozone.OzoneConsts.STATE; /** * ContainerData is the in-memory representation of container metadata and is * represented on disk by the .container file. */ public abstract class ContainerData { //Type of the container. // For now, we support only KeyValueContainer. private final ContainerType containerType; // Unique identifier for the container private final long containerID; // Layout version of the container data private final int layOutVersion; // Metadata of the container will be a key value pair. // This can hold information like volume name, owner etc., private final Map<String, String> metadata; // State of the Container private ContainerDataProto.State state; private final long maxSize; /** parameters for read/write statistics on the container. **/ private final AtomicLong readBytes; private final AtomicLong writeBytes; private final AtomicLong readCount; private final AtomicLong writeCount; private final AtomicLong bytesUsed; private final AtomicLong keyCount; private HddsVolume volume; private String checksum; public static final Charset CHARSET_ENCODING = Charset.forName("UTF-8"); private static final String DUMMY_CHECKSUM = new String(new byte[64], CHARSET_ENCODING); // Common Fields need to be stored in .container file. protected static final List<String> YAML_FIELDS = Collections.unmodifiableList(Lists.newArrayList( CONTAINER_TYPE, CONTAINER_ID, LAYOUTVERSION, STATE, METADATA, MAX_SIZE, CHECKSUM)); /** * Creates a ContainerData Object, which holds metadata of the container. * @param type - ContainerType * @param containerId - ContainerId * @param size - container maximum size in bytes */ protected ContainerData(ContainerType type, long containerId, long size) { this(type, containerId, ChunkLayOutVersion.getLatestVersion().getVersion(), size); } /** * Creates a ContainerData Object, which holds metadata of the container. * @param type - ContainerType * @param containerId - ContainerId * @param layOutVersion - Container layOutVersion * @param size - Container maximum size in bytes */ protected ContainerData(ContainerType type, long containerId, int layOutVersion, long size) { Preconditions.checkNotNull(type); this.containerType = type; this.containerID = containerId; this.layOutVersion = layOutVersion; this.metadata = new TreeMap<>(); this.state = ContainerDataProto.State.OPEN; this.readCount = new AtomicLong(0L); this.readBytes = new AtomicLong(0L); this.writeCount = new AtomicLong(0L); this.writeBytes = new AtomicLong(0L); this.bytesUsed = new AtomicLong(0L); this.keyCount = new AtomicLong(0L); this.maxSize = size; setChecksumTo0ByteArray(); } /** * Returns the containerID. */ public long getContainerID() { return containerID; } /** * Returns the path to base dir of the container. * @return Path to base dir. */ public abstract String getContainerPath(); /** * Returns the type of the container. * @return ContainerType */ public ContainerType getContainerType() { return containerType; } /** * Returns the state of the container. * @return ContainerLifeCycleState */ public synchronized ContainerDataProto.State getState() { return state; } /** * Set the state of the container. * @param state */ public synchronized void setState(ContainerDataProto.State state) { this.state = state; } /** * Return's maximum size of the container in bytes. * @return maxSize in bytes */ public long getMaxSize() { return maxSize; } /** * Returns the layOutVersion of the actual container data format. * @return layOutVersion */ public int getLayOutVersion() { return ChunkLayOutVersion.getChunkLayOutVersion(layOutVersion).getVersion(); } /** * Add/Update metadata. * We should hold the container lock before updating the metadata as this * will be persisted on disk. Unless, we are reconstructing ContainerData * from protoBuf or from on disk .container file in which case lock is not * required. */ public void addMetadata(String key, String value) { metadata.put(key, value); } /** * Retuns metadata of the container. * @return metadata */ public Map<String, String> getMetadata() { return Collections.unmodifiableMap(this.metadata); } /** * Set metadata. * We should hold the container lock before updating the metadata as this * will be persisted on disk. Unless, we are reconstructing ContainerData * from protoBuf or from on disk .container file in which case lock is not * required. */ public void setMetadata(Map<String, String> metadataMap) { metadata.clear(); metadata.putAll(metadataMap); } /** * checks if the container is open. * @return - boolean */ public synchronized boolean isOpen() { return ContainerDataProto.State.OPEN == state; } /** * checks if the container is invalid. * @return - boolean */ public synchronized boolean isValid() { return !(ContainerDataProto.State.INVALID == state); } /** * checks if the container is closed. * @return - boolean */ public synchronized boolean isClosed() { return ContainerDataProto.State.CLOSED == state; } /** * Marks this container as closed. */ public synchronized void closeContainer() { setState(ContainerDataProto.State.CLOSED); } /** * Get the number of bytes read from the container. * @return the number of bytes read from the container. */ public long getReadBytes() { return readBytes.get(); } /** * Increase the number of bytes read from the container. * @param bytes number of bytes read. */ public void incrReadBytes(long bytes) { this.readBytes.addAndGet(bytes); } /** * Get the number of times the container is read. * @return the number of times the container is read. */ public long getReadCount() { return readCount.get(); } /** * Increase the number of container read count by 1. */ public void incrReadCount() { this.readCount.incrementAndGet(); } /** * Get the number of bytes write into the container. * @return the number of bytes write into the container. */ public long getWriteBytes() { return writeBytes.get(); } /** * Increase the number of bytes write into the container. * @param bytes the number of bytes write into the container. */ public void incrWriteBytes(long bytes) { this.writeBytes.addAndGet(bytes); } /** * Get the number of writes into the container. * @return the number of writes into the container. */ public long getWriteCount() { return writeCount.get(); } /** * Increase the number of writes into the container by 1. */ public void incrWriteCount() { this.writeCount.incrementAndGet(); } /** * Sets the number of bytes used by the container. * @param used */ public void setBytesUsed(long used) { this.bytesUsed.set(used); } /** * Get the number of bytes used by the container. * @return the number of bytes used by the container. */ public long getBytesUsed() { return bytesUsed.get(); } /** * Increase the number of bytes used by the container. * @param used number of bytes used by the container. * @return the current number of bytes used by the container afert increase. */ public long incrBytesUsed(long used) { return this.bytesUsed.addAndGet(used); } /** * Decrease the number of bytes used by the container. * @param reclaimed the number of bytes reclaimed from the container. * @return the current number of bytes used by the container after decrease. */ public long decrBytesUsed(long reclaimed) { return this.bytesUsed.addAndGet(-1L * reclaimed); } /** * Set the Volume for the Container. * This should be called only from the createContainer. * @param hddsVolume */ public void setVolume(HddsVolume hddsVolume) { this.volume = hddsVolume; } /** * Returns the volume of the Container. * @return HddsVolume */ public HddsVolume getVolume() { return volume; } /** * Increments the number of keys in the container. */ public void incrKeyCount() { this.keyCount.incrementAndGet(); } /** * Decrements number of keys in the container. */ public void decrKeyCount() { this.keyCount.decrementAndGet(); } /** * Returns number of keys in the container. * @return key count */ public long getKeyCount() { return this.keyCount.get(); } /** * Set's number of keys in the container. * @param count */ public void setKeyCount(long count) { this.keyCount.set(count); } public void setChecksumTo0ByteArray() { this.checksum = DUMMY_CHECKSUM; } public void setChecksum(String checkSum) { this.checksum = checkSum; } public String getChecksum() { return this.checksum; } /** * Compute the checksum for ContainerData using the specified Yaml (based * on ContainerType) and set the checksum. * * Checksum of ContainerData is calculated by setting the * {@link ContainerData#checksum} field to a 64-byte array with all 0's - * {@link ContainerData#DUMMY_CHECKSUM}. After the checksum is calculated, * the checksum field is updated with this value. * * @param yaml Yaml for ContainerType to get the ContainerData as Yaml String * @throws IOException */ public void computeAndSetChecksum(Yaml yaml) throws IOException { // Set checksum to dummy value - 0 byte array, to calculate the checksum // of rest of the data. setChecksumTo0ByteArray(); // Dump yaml data into a string to compute its checksum String containerDataYamlStr = yaml.dump(this); this.checksum = ContainerUtils.getChecksum(containerDataYamlStr); } /** * Returns a ProtoBuf Message from ContainerData. * * @return Protocol Buffer Message */ public abstract ContainerProtos.ContainerDataProto getProtoBufMessage(); }
apache-2.0
spinscale/dropwizard-jobs
dropwizard-jobs-spring/src/test/java/io/dropwizard/jobs/EveryTestJob.java
476
package io.dropwizard.jobs; import io.dropwizard.jobs.Job; import io.dropwizard.jobs.annotations.Every; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import java.util.concurrent.CountDownLatch; @Every("50ms") public class EveryTestJob extends Job { final CountDownLatch latch = new CountDownLatch(5); @Override public void doJob(JobExecutionContext context) throws JobExecutionException { latch.countDown(); } }
apache-2.0
schleichardt/java-8-playground
test/java8/ch1lambdaexpressions/P_1_3_FunctionalInterfaces.java
2589
package java8.ch1lambdaexpressions; import base.Demo; import com.google.common.base.Strings; import org.junit.Test; import java.io.IOException; import java.util.function.Function; import static org.fest.assertions.Assertions.assertThat; public class P_1_3_FunctionalInterfaces { /* Places where lambdas can be used: object of an interface with a single abstract method is expected => backwards compatible with Java 7 => interface with single ABSTRACT method is a functional interface since Java 8 interfaces can have implementations => Runnable, Comparator, F.Function.X (for example F.Function in F.Promise.map), F.CallbackX Lambdas can only be used on functional interfaces. But interfaces can be used as parameters or return values. So they are just syntactic sugar. http://docs.oracle.com/javase/8/docs/api/java/util/function/package-frame.html java.util.function provides a lot of functional interfaces with default methods. It looks functional. TODO: Function Consumer Predicate Supplier */ @Test public void functionAsParameter() throws Exception { assertThat(compute("hello", s -> s.length())).isEqualTo(5); } public int compute(final String string, final Function<String, Integer> function) { return function.apply(string); } @Test public void functionAsReturnValueAndVariable() throws Exception { final Function<Integer, String> function = createFunction(); assertThat(function.apply(5)).isEqualTo("*****"); } public Function<Integer, String> createFunction() { return i -> Strings.repeat("*", i); } //function as member or variable final Function<String, Integer> function = s -> s.length(); /* Compiler checks with the annotation that you have exactly one abstract method. In addition it generates javadoc like this: This is a functional interface and can therefore be used as the assignment target for a lambda expression or method reference. */ @FunctionalInterface static interface Closeable { void close(); } @Test public void checkedExceptionsNeedToBeCatchedIfInterfaceDoesNotThrowIt() throws Exception { final Runnable task = () -> { try { throw new IOException("x"); } catch (IOException e) { throw new RuntimeException(e); } }; // does not work in Java, but Scala: // final Runnable task = () -> throw new IOException("x"); } }
apache-2.0
99soft/guartz
src/main/java/org/nnsoft/guice/guartz/SchedulerConfigurationBuilder.java
1113
package org.nnsoft.guice.guartz; /* * Copyright 2009-2012 The 99 Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Properties; /** * Contains methods to change scheduler configuration by subclasses of QuartzModule. * * @since 1.1 */ public interface SchedulerConfigurationBuilder { SchedulerConfigurationBuilder withManualStart(); /** * @since 1.3 * @param properties * @return */ SchedulerConfigurationBuilder withProperties( Properties properties ); }
apache-2.0
xalfonso/res_java
JavaRea/src/main/java/eas/com/util/criteria/CriteriaAuthor.java
1158
package eas.com.util.criteria; import eas.com.entity.Author; import javax.ws.rs.QueryParam; /** * Created by eduardo on 12/3/2016. */ public class CriteriaAuthor implements Criteria<Author> { @QueryParam("country") private String country; @QueryParam("genre") private String genre; public CriteriaAuthor(String country, String genre) { this.country = country; this.genre = genre; } /** * This constructor is needed for injecting the @QueryParam */ public CriteriaAuthor() { } @Override public boolean emptyParam() { return (this.country == null || this.country.isEmpty()) && (genre == null || genre.isEmpty()); } @Override public boolean assertCriteria(Author author) { if (this.country != null && !this.country.isEmpty()) { if (!author.getCountry().equalsIgnoreCase(this.country)) return false; } if (this.genre != null && !this.genre.isEmpty()) { if (!author.getGenre().equalsIgnoreCase(this.genre)) return false; } return true; } }
apache-2.0
CenPC434/java-tools
en16931-edifact-to-xml/src/main/java/com/altova/text/tablelike/csv/ParserStateInsideQuotedField.java
1790
//////////////////////////////////////////////////////////////////////// // // ParserStateInsideQuotedField.java // // This file was generated by MapForce 2017sp2. // // YOU SHOULD NOT MODIFY THIS FILE, BECAUSE IT WILL BE // OVERWRITTEN WHEN YOU RE-RUN CODE GENERATION. // // Refer to the MapForce Documentation for further details. // http://www.altova.com/mapforce // //////////////////////////////////////////////////////////////////////// package com.altova.text.tablelike.csv; class ParserStateInsideQuotedField extends ParserState { public ParserStateInsideQuotedField(Parser owner, ParserStateFactory states) { super(owner, states); } public ParserState process(char current) { super.getOwner().appendCharacterToToken(current); super.getOwner().moveNext(); return this; } public ParserState processFieldDelimiter(char current) { super.getOwner().appendCharacterToToken(current); super.getOwner().moveNext(); return this; } public ParserState processRecordDelimiter(char current) { super.getOwner().appendCharacterToToken(current); super.getOwner().moveNext(); return this; } public ParserState processQuoteCharacter(char current) { ParserState result = this; super.getOwner().moveNext(); if (super.getOwner().isEndOfBuffer()) { super.getOwner().notifyAboutTokenComplete(); result = super.getStates().getWaitingForField(); } else if (super.getOwner().getCurrentCharacter() == current) { super.getOwner().appendCharacterToToken(current); super.getOwner().moveNext(); } else { result = super.getStates().getInsideField(); } return result; } }
apache-2.0
caalador/VNE
vne/src/main/java/org/percepta/mgrankvi/vne/client/MyComponentWidget.java
2701
package org.percepta.mgrankvi.vne.client; import com.google.gwt.media.client.Audio; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Label; import com.vaadin.client.Util; import org.percepta.mgrankvi.preloader.client.image.ImageLoadEvent; import org.percepta.mgrankvi.preloader.client.image.ImageLoadHandler; import org.percepta.mgrankvi.preloader.client.image.ImagePreloader; import org.percepta.mgrankvi.preloader.client.music.MusicLoadEvent; import org.percepta.mgrankvi.preloader.client.music.MusicLoadHandler; import org.percepta.mgrankvi.preloader.client.music.MusicPreloader; // Extend any GWT Widget public class MyComponentWidget extends Label implements ImageLoadHandler, MusicLoadHandler { ImagePreloader preloader = new ImagePreloader(); MusicPreloader musicPreloader = new MusicPreloader(); int loaded = 0; int toLoad = 0; int musicLoaded = 0; int musicToLoad = 0; public MyComponentWidget() { // CSS class-name should not be v- prefixed setStyleName("vne"); // State is set to widget in MyComponentConnector preloader.addImageLoadListener(this); musicPreloader.addMusicLoadHandler(this); } private void updateText() { setText("Loaded: " + loaded + "/" + toLoad + " – Music: " + musicLoaded + "/" + musicToLoad); } public void preloadImage(String url) { preloader.preloadImage(Util.getAbsoluteUrl("./VAADIN/"+url)); toLoad++; updateText(); } public void preloadMusic(String url){ musicPreloader.preloadAudio(Util.getAbsoluteUrl("./VAADIN/"+url)); musicToLoad++; updateText(); } @Override public void imageLoaded(ImageLoadEvent event) { if (event.isSuccess()) loaded++; else Window.alert("Failed to load image " + event.getFile()); updateText(); } @Override public void musicLoaded(MusicLoadEvent event) { if(event.isSuccess()){ musicLoaded++; event.getAudio().play(); final Audio audio = event.getAudio(); final double duration = audio.getDuration(); Timer t = new Timer(){ @Override public void run() { setText("Loaded: " + loaded + "/" + toLoad + " – Music: " + musicLoaded + "/" + musicToLoad + "\n" + audio.getCurrentTime() + "/" + duration); if(audio.hasEnded()){ this.cancel(); } } }; t.scheduleRepeating(100); } updateText(); } }
apache-2.0
orientechnologies/orientdb
distributed/src/main/java/com/orientechnologies/orient/server/distributed/impl/task/OLockKeySource.java
463
package com.orientechnologies.orient.server.distributed.impl.task; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.tx.OTransactionId; import com.orientechnologies.orient.server.distributed.impl.task.transaction.OTransactionUniqueKey; import java.util.SortedSet; public interface OLockKeySource { SortedSet<ORID> getRids(); SortedSet<OTransactionUniqueKey> getUniqueKeys(); OTransactionId getTransactionId(); }
apache-2.0
cretz/statmantis
src/org/statmantis/annotation/XmlTopLevel.java
969
/* * Copyright 2010 Chad Retz * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.statmantis.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * TODO: doc this * * @author Chad Retz */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface XmlTopLevel { }
apache-2.0
mnovillodiaz/Miwok_App
app/src/main/java/com/example/android/miwok/Word.java
2014
package com.example.android.miwok; /** * Created by mdiaz on 17/05/17. */ public class Word { private static final int NO_IMAGE_PROVIDED = -1; //String default translation private String defaultTranslation; //String miwok translation private String miwokTranslation; //String sound private int sound; //image private int imageResourceId = NO_IMAGE_PROVIDED; /** Constructor to create a new Word object (without image) @param defaultTranslation @param miwokTranslation @param sound * */ public Word(String defaultTranslation, String miwokTranslation, int sound) { this.defaultTranslation = defaultTranslation; this.miwokTranslation = miwokTranslation; this.sound = sound; } /** Constructor to create a new Word object with image @param defaultTranslation @param miwokTranslation @param imageResourceId @param sound * */ public Word(String defaultTranslation, String miwokTranslation, int imageResourceId, int sound) { this.defaultTranslation = defaultTranslation; this.miwokTranslation = miwokTranslation; this.imageResourceId = imageResourceId; this.sound = sound; } //getters public String getDefaultTranslation() { return defaultTranslation; } public String getMiwokTranslation(){ return miwokTranslation; } public int getImageResourceId(){ return imageResourceId; } public int getSound() { return sound; } public boolean hasImage() { return imageResourceId != NO_IMAGE_PROVIDED; } @Override public String toString() { return "Word{" + "mDefaultTranslation='" + defaultTranslation + '\'' + ", mMiwokTranslation='" + miwokTranslation + '\'' + ", mSound=" + sound + ", mImageResourceId=" + imageResourceId + '}'; } }
apache-2.0
codenergic/theskeleton
src/main/java/org/codenergic/theskeleton/post/PostReactionRepository.java
1292
/* * Copyright 2018 original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codenergic.theskeleton.post; import java.util.Optional; import org.codenergic.theskeleton.core.data.AuditingEntityRepository; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Repository; @Repository public interface PostReactionRepository extends AuditingEntityRepository<PostReactionEntity> { long countByPostIdAndReactionType(String postId, PostReactionType reactionType); Page<PostReactionEntity> findByPostIdAndReactionType(String postId, PostReactionType reactionType, Pageable pageable); Optional<PostReactionEntity> findByUserIdAndPostId(String userId, String postId); }
apache-2.0
RWTH-i5-IDSG/jamocha
src/main/java/org/jamocha/function/impls/sideeffects/ExportGv.java
3930
/* * Copyright 2002-2016 The Jamocha Team * * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.jamocha.org/ * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the License. */ package org.jamocha.function.impls.sideeffects; import static org.jamocha.util.ToArray.toArray; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import org.jamocha.dn.NetworkToDot; import org.jamocha.dn.SideEffectFunctionToNetwork; import org.jamocha.dn.memory.SlotType; import org.jamocha.function.Function; import org.jamocha.function.FunctionDictionary; import org.jamocha.function.impls.FunctionVisitor; import org.jamocha.languages.common.ScopeStack.Symbol; /** * @author Fabian Ohler <fabian.ohler1@rwth-aachen.de> */ public abstract class ExportGv implements Function<Object> { public static final String IN_CLIPS = "export-gv"; static final SlotType[] PARAM_TYPES = {SlotType.STRING}; @Override public String inClips() { return IN_CLIPS; } @Override public SlotType getReturnType() { return SlotType.NIL; } @Override public <V extends FunctionVisitor> V accept(final V visitor) { visitor.visit(this); return visitor; } @Override public SlotType[] getParamTypes() { return PARAM_TYPES; } static { FunctionDictionary.addFixedArgsGeneratorWithSideEffects(IN_CLIPS, PARAM_TYPES, (final SideEffectFunctionToNetwork network, final SlotType[] paramTypes) -> new ExportGv() { @Override public Object evaluate(final Function<?>... params) { final String fileName = (String) params[0].evaluate(); try (final FileWriter fileWriter = new FileWriter(fileName)) { fileWriter.write(new NetworkToDot(network).toString()); } catch (final IOException e) { e.printStackTrace(); } return null; } }); FunctionDictionary.addVarArgsGeneratorWithSideEffects(IN_CLIPS, (final SideEffectFunctionToNetwork network, final SlotType[] paramTypes) -> { if (paramTypes.length < 2 || paramTypes[0] != SlotType.STRING) { return null; } for (int i = 1; i < paramTypes.length; ++i) { if (paramTypes[i] != SlotType.SYMBOL) { return null; } } return new ExportGv() { @Override public Object evaluate(final Function<?>... params) { final String fileName = (String) params[0].evaluate(); try (final FileWriter fileWriter = new FileWriter(fileName)) { fileWriter.write(new NetworkToDot(network, toArray(Arrays.stream(params, 1, params.length) .map(f -> ((Symbol) f.evaluate()).getImage()), String[]::new)) .toString()); } catch (final IOException e) { e.printStackTrace(); } return null; } }; }); } }
apache-2.0
a1705164/Restaurante
Restaurante/Web/src/domain/Prato.java
3268
package domain; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.validation.constraints.Digits; import javax.validation.constraints.NotNull; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; @Entity @Table(name="PRATO") @NamedQueries({ @NamedQuery(name = "Prato.findAll", query = "SELECT p FROM Prato p"), @NamedQuery(name = "Prato.findById", query = "SELECT p FROM Prato p WHERE p.id = :id"), @NamedQuery(name = "Prato.findByNome", query = "SELECT p FROM Prato p WHERE p.nome = :nome"), @NamedQuery(name = "Prato.findByPrecoVenda", query = "SELECT p FROM Prato p WHERE p.precoVenda = :precoVenda"), @NamedQuery(name = "Prato.findByTempoPreparo", query = "SELECT p FROM Prato p WHERE p.tempoPreparo = :tempoPreparo")}) public class Prato implements Serializable { private static final long serialVersionUID = 1L; public Prato(){ } public Prato(String nome, BigDecimal precoVenda, BigInteger tempoPreparo, List<Produto> produtos) { this.nome = nome; this.precoVenda = precoVenda; this.tempoPreparo = tempoPreparo; this.produtos = produtos; } public Prato(BigInteger id, String nome, BigDecimal precoVenda, BigInteger tempoPreparo, List<Produto> produtos) { this.id = id; this.nome = nome; this.precoVenda = precoVenda; this.tempoPreparo = tempoPreparo; this.produtos = produtos; } @Id @SequenceGenerator(name="pk_sequence", sequenceName="SEQ_PRATO", allocationSize=1) @GeneratedValue(strategy=GenerationType.SEQUENCE, generator="pk_sequence") private BigInteger id; @NotNull @Column(name="NOME") private String nome; @NotNull @Digits(integer=8, fraction=2) @Column(name="PRECO_VENDA") private BigDecimal precoVenda; @Column(name="TEMPO_PREPARO") private BigInteger tempoPreparo; @JoinTable(name = "PRODUTO_PRATO", joinColumns = { @JoinColumn(name = "id_prato", referencedColumnName = "id")}, inverseJoinColumns = { @JoinColumn(name = "id_produto", referencedColumnName = "id")}) @ManyToMany private List<Produto> produtos; public BigInteger getId() { return id; } public void setId(BigInteger id) { this.id = id; } public String getNome() { return nome; } public void setNome(String nome) { this.nome = nome; } public BigDecimal getPrecoVenda() { return precoVenda; } public void setPrecoVenda(BigDecimal precoVenda) { this.precoVenda = precoVenda; } public BigInteger getTempoPreparo() { return tempoPreparo; } public void setTempoPreparo(BigInteger tempoPreparo) { this.tempoPreparo = tempoPreparo; } public List<Produto> getProdutos() { return produtos; } public void setProdutos(List<Produto> produtos) { this.produtos = produtos; } }
apache-2.0
cloudiator/common
domain/src/main/java/de/uniulm/omi/cloudiator/domain/LoginNameSuppliers.java
1028
/* * Copyright 2017 University of Ulm * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.uniulm.omi.cloudiator.domain; /** * Created by daniel on 22.06.16. */ public class LoginNameSuppliers { private LoginNameSuppliers() { throw new AssertionError("static class"); } public static LoginNameSupplier nullSupplier() { return new NullLoginNameSupplier(); } public static LoginNameSupplier staticSupplier(String loginName) { return new StaticLoginNameSupplier(loginName); } }
apache-2.0
CesarPantoja/jena
jena-base/src/main/java/org/apache/jena/atlas/lib/ListUtils.java
3472
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.lib; import static java.util.Arrays.stream; import static java.util.stream.Collectors.joining; import java.util.ArrayList ; import java.util.List ; import java.util.stream.Collectors ; import java.util.stream.Stream ; import org.apache.jena.atlas.io.IndentedWriter ; import org.apache.jena.atlas.logging.Log ; /** Various things for lists */ public class ListUtils { private ListUtils() {} public static <T> List<T> unique(List<T> list) { return toList(list.stream().distinct()) ; } public static List<Integer> asList(int... values) { List<Integer> x = new ArrayList<>(values.length) ; for ( int v : values ) x.add(v) ; return x ; } // This is commonly needed public static <T> List<T> toList(Stream<T> stream) { return stream.collect(Collectors.toList()) ; } public static <T> String str(T[] array) { return stream(array).map(String::valueOf).collect(joining(", ", "[", "]")); } public static String str(int[] array) { return stream(array).mapToObj(String::valueOf).collect(joining(", ", "[", "]")); } public static String str(long[] array) { return stream(array).mapToObj(String::valueOf).collect(joining(", ", "[", "]")); } public static <T> void print(IndentedWriter out, List<T> list) { print(out, list, " ") ; } public static <T> void print(final IndentedWriter out, List<T> list, final CharSequence sep) { out.print(list.stream().map(String::valueOf).collect(joining(sep))); } /** Return a list of lists of all the elements of collection in every order * Easy to run out of heap memory. * * See also {@code org.apache.jena.ext.com.google.common.collect.Collections2#permutations} */ static public <T> List<List<T>> permute(List<T> c) { if ( c.size() > 5 ) { Log.warn(ListUtils.class, "Attempt to permute more than 5 items - think again") ; return null ; } List<List<T>> x = new ArrayList<>() ; if ( c.size() == 1 ) { x.add(c) ; return x ; } for ( T obj : c ) { List<T> c2 = new ArrayList<>(c) ; c2.remove(obj) ; List<List<T>> x2 = permute(c2) ; // For each list returned for ( List<T> x3 : x2 ) { // Gives a more expected ordering x3.add(0,obj) ; x.add(x3) ; } } return x ; } }
apache-2.0
intel-hadoop/chimera
src/test/java/com/intel/chimera/stream/CBCPKCS5PaddingCryptoStreamTest.java
1129
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.chimera.stream; import java.io.IOException; import com.intel.chimera.cipher.CipherTransformation; public class CBCPKCS5PaddingCryptoStreamTest extends AbstractCryptoStreamTest { public void setUp() throws IOException { transformation = CipherTransformation .AES_CBC_PKCS5PADDING; } }
apache-2.0
paulseawa/p4ic4idea
plugin/src/net/groboclown/idea/p4ic/v2/ui/alerts/LoginFailedHandler.java
998
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.groboclown.idea.p4ic.v2.ui.alerts; import net.groboclown.idea.p4ic.v2.server.connection.CriticalErrorHandler; import org.jetbrains.annotations.NotNull; import java.util.Date; public class LoginFailedHandler implements CriticalErrorHandler { @Override public void handleError(@NotNull final Date when) { // FIXME throw new IllegalStateException("not implemented"); } }
apache-2.0
youzigege2048/sharelove
MyApplication/app/src/main/java/youzi/com/sharelove/view/fragment_about.java
600
package youzi.com.sharelove.view; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import youzi.com.sharelove.R; /** * Created by youzi 2016/5/28. */ public class fragment_about extends Fragment { @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.about, container, false); return rootView; } }
apache-2.0
sbt/ivy
src/java/org/apache/ivy/Ivy.java
31026
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.ivy.core.IvyContext; import org.apache.ivy.core.LogOptions; import org.apache.ivy.core.cache.ResolutionCacheManager; import org.apache.ivy.core.check.CheckEngine; import org.apache.ivy.core.deliver.DeliverEngine; import org.apache.ivy.core.deliver.DeliverOptions; import org.apache.ivy.core.event.EventManager; import org.apache.ivy.core.install.InstallEngine; import org.apache.ivy.core.install.InstallOptions; import org.apache.ivy.core.module.descriptor.ModuleDescriptor; import org.apache.ivy.core.module.id.ModuleId; import org.apache.ivy.core.module.id.ModuleRevisionId; import org.apache.ivy.core.publish.PublishEngine; import org.apache.ivy.core.publish.PublishOptions; import org.apache.ivy.core.report.ResolveReport; import org.apache.ivy.core.repository.RepositoryManagementEngine; import org.apache.ivy.core.resolve.ResolveData; import org.apache.ivy.core.resolve.ResolveEngine; import org.apache.ivy.core.resolve.ResolveOptions; import org.apache.ivy.core.resolve.ResolvedModuleRevision; import org.apache.ivy.core.retrieve.RetrieveEngine; import org.apache.ivy.core.retrieve.RetrieveOptions; import org.apache.ivy.core.retrieve.RetrieveReport; import org.apache.ivy.core.search.ModuleEntry; import org.apache.ivy.core.search.OrganisationEntry; import org.apache.ivy.core.search.RevisionEntry; import org.apache.ivy.core.search.SearchEngine; import org.apache.ivy.core.settings.IvySettings; import org.apache.ivy.core.sort.SortEngine; import org.apache.ivy.core.sort.SortOptions; import org.apache.ivy.plugins.matcher.PatternMatcher; import org.apache.ivy.plugins.repository.TransferEvent; import org.apache.ivy.plugins.repository.TransferListener; import org.apache.ivy.plugins.resolver.BasicResolver; import org.apache.ivy.plugins.resolver.DependencyResolver; import org.apache.ivy.plugins.trigger.Trigger; import org.apache.ivy.util.DateUtil; import org.apache.ivy.util.HostUtil; import org.apache.ivy.util.Message; import org.apache.ivy.util.MessageLoggerEngine; /** * <a href="http://ant.apache.org/ivy/">Ivy</a> is a free java based dependency manager. * * This class is the main class of Ivy, which acts as a Facade to all services offered by Ivy: * <ul> * <li>resolve dependencies</li> * <li>retrieve artifacts to a local location</li> * <li>deliver and publish modules</li> * <li>repository search and listing</li> * </ul> * Here is one typical usage: * * <pre> * Ivy ivy = Ivy.newInstance(); * ivy.configure(new URL(&quot;ivysettings.xml&quot;)); * ivy.resolve(new URL(&quot;ivy.xml&quot;)); * </pre> * * <h2>Using Ivy engines directly</h2> * * If the methods offered by the {@link Ivy} class are not flexible enough and you want to use Ivy * engines directly, you need to call the methods within a single {@link IvyContext} associated to * the {@link Ivy} instance you use.<br> To do so, it is recommended to use the * {@link #execute(org.apache.ivy.Ivy.IvyCallback)} method like this: * <pre> * Ivy ivy = Ivy.newInstance(); * ivy.execute(new IvyCallback() { * public Object doInIvyContext(Ivy ivy, IvyContext context) { * // obviously we can use regular Ivy methods in the callback * ivy.configure(new URL(&quot;ivysettings.xml&quot;)); * // and we can safely use Ivy engines too * ivy.getResolveEngine().resolve(new URL(&quot;ivy.xml&quot;)); * return null; * } * }); * </pre> */ public class Ivy { /** * Callback used to execute a set of Ivy related methods within an {@link IvyContext}. * * @see Ivy#execute(org.apache.ivy.Ivy.IvyCallback) */ public static interface IvyCallback { /** * Executes Ivy related job within an {@link IvyContext} * * @param ivy * the {@link Ivy} instance to which this callback is related * @param context * the {@link IvyContext} in which this callback is executed * @return the result of this job, <code>null</code> if there is no result */ public Object doInIvyContext(Ivy ivy, IvyContext context); } private static final int KILO = 1024; /** * @deprecated Use the {@link DateUtil} utility class instead. */ public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat(DateUtil.DATE_FORMAT_PATTERN); /** * the current version of Ivy, as displayed on the console when * Ivy is initialized */ private static final String IVY_VERSION; /** * the date at which this version of Ivy has been built. * May be empty if unknown. */ private static final String IVY_DATE; static { // initialize IVY_VERSION and IVY_DATE Properties props = new Properties(); URL moduleURL = Message.class.getResource("/module.properties"); if (moduleURL != null) { try { InputStream module = moduleURL.openStream(); props.load(module); module.close(); } catch (IOException e) { // ignore this exception, we will initialize with default values } } IVY_VERSION = props.getProperty("version", "non official version"); IVY_DATE = props.getProperty("date", ""); } /** * Returns the current version of Ivy, as displayed on the console when * Ivy is initialized. * * @return the current version of Ivy */ public static String getIvyVersion() { return IVY_VERSION; } /** * Returns the date at which this version of Ivy has been built. * <p> * May be empty if unknown. * * @return the date at which this version of Ivy has been built */ public static String getIvyDate() { return IVY_DATE; } /** * Returns the URL at which Ivy web site can be found. * @return the URL at which Ivy web site can be found */ public static String getIvyHomeURL() { return "http://ant.apache.org/ivy/"; } public static Ivy newInstance() { Ivy ivy = new Ivy(); ivy.bind(); return ivy; } public static Ivy newInstance(IvySettings settings) { Ivy ivy = new Ivy(); ivy.setSettings(settings); ivy.bind(); return ivy; } /** * True if the current processing has been requested to be interrupted, false otherwise */ private boolean interrupted; /** * True if this instance of Ivy has already been bound to its dependencies, false otherwise. * * @see bind() */ private boolean bound; /* * Following are dependencies of the Ivy instance on instances of engines and manager which * actually do the work The attributes can be set either manually using the corresponding * setters, or all at once with the default implementations using the bind method */ private IvySettings settings; private EventManager eventManager; private SortEngine sortEngine; private SearchEngine searchEngine; private CheckEngine checkEngine; private ResolveEngine resolveEngine; private RetrieveEngine retrieveEngine; private DeliverEngine deliverEngine; private PublishEngine publishEngine; private InstallEngine installEngine; private RepositoryManagementEngine repositoryEngine; /** * The logger engine to use to log messages when using this Ivy instance. */ private MessageLoggerEngine loggerEngine = new MessageLoggerEngine(); /** * The default constructor of Ivy allows to create an instance of Ivy with none of its * dependencies (engines, settings, ...) created. If you use this constructor, it's your * responsibility to set the dependencies of Ivy using the appropriate setters * (setResolveEngine, ...). You can also call the bind method to set all the dependencies except * those that you have provided using the setters. If you want to get an instance ready to use, * prefer the use of Ivy.newInstance(). */ public Ivy() { } /** * This method is used to bind this Ivy instance to required dependencies, i.e. instance of * settings, engines, and so on. * <p> * After this call Ivy is still not configured, which means that * the settings object is still empty. * </p> */ public void bind() { pushContext(); try { if (settings == null) { settings = new IvySettings(); } if (eventManager == null) { eventManager = new EventManager(); } if (sortEngine == null) { sortEngine = new SortEngine(settings); } if (searchEngine == null) { searchEngine = new SearchEngine(settings); } if (resolveEngine == null) { resolveEngine = new ResolveEngine(settings, eventManager, sortEngine); } if (retrieveEngine == null) { retrieveEngine = new RetrieveEngine(settings, eventManager); } if (deliverEngine == null) { deliverEngine = new DeliverEngine(settings); } if (publishEngine == null) { publishEngine = new PublishEngine(settings, eventManager); } if (installEngine == null) { installEngine = new InstallEngine( settings, searchEngine, resolveEngine); } if (repositoryEngine == null) { repositoryEngine = new RepositoryManagementEngine( settings, searchEngine, resolveEngine); } eventManager.addTransferListener(new TransferListener() { public void transferProgress(TransferEvent evt) { ResolveData resolve; switch (evt.getEventType()) { case TransferEvent.TRANSFER_PROGRESS: resolve = IvyContext.getContext().getResolveData(); if (resolve == null || !LogOptions.LOG_QUIET.equals( resolve.getOptions().getLog())) { Message.progress(); } break; case TransferEvent.TRANSFER_COMPLETED: resolve = IvyContext.getContext().getResolveData(); if (resolve == null || !LogOptions.LOG_QUIET.equals( resolve.getOptions().getLog())) { Message.endProgress(" (" + (evt.getTotalLength() / KILO) + "kB)"); } break; default: break; } } }); bound = true; } finally { popContext(); } } /** * Executes the given callback in the context of this Ivy instance. * * Alternatively you can use the {@link #pushContext()} and {@link #popContext()} methods, but * this is not recommended: * * <pre> * Object result = null; * pushContext(); * try { * result = callback.doInIvyContext(this, IvyContext.getContext()); * } finally { * popContext(); * } * doSomethingWithResult(result); * </pre> * * @param callback * @return */ public Object execute(IvyCallback callback) { pushContext(); try { return callback.doInIvyContext(this, IvyContext.getContext()); } finally { popContext(); } } /** * Pushes a new IvyContext bound to this Ivy instance if the current context is not already * bound to this Ivy instance. If the current context is already bound to this Ivy instance, it * pushes the current context on the context stack, so that you can (and must) always call * {@link #popContext()} when you're done. * <p> * Alternatively, you can use the {@link #execute(org.apache.ivy.Ivy.IvyCallback)} method which * takes care of everything for you. * </p> */ public void pushContext() { if (IvyContext.getContext().peekIvy() != this) { // the current Ivy context is associated with another Ivy instance, we push a new // instance IvyContext.pushNewContext(); IvyContext.getContext().setIvy(this); } else { // the current Ivy context is already associated with this Ivy instance, we only push it // for popping consistency IvyContext.pushContext(IvyContext.getContext()); } } /** * Pops the current Ivy context. * <p> * You must call this method once and only once for each call to {@link #pushContext()}, when * you're done with the your Ivy related work. * </p> * <p> * Alternatively, you can use the {@link #execute(org.apache.ivy.Ivy.IvyCallback)} method which * takes care of everything for you. * </p> */ public void popContext() { IvyContext.popContext(); } // /////////////////////////////////////////////////////////////////////// // LOAD SETTINGS // /////////////////////////////////////////////////////////////////////// public void configure(File settingsFile) throws ParseException, IOException { pushContext(); try { assertBound(); settings.load(settingsFile); postConfigure(); } finally { popContext(); } } public void configure(URL settingsURL) throws ParseException, IOException { pushContext(); try { assertBound(); settings.load(settingsURL); postConfigure(); } finally { popContext(); } } public void configureDefault() throws ParseException, IOException { pushContext(); try { assertBound(); settings.loadDefault(); postConfigure(); } finally { popContext(); } } /** * Configures Ivy with 1.4 compatible default settings */ public void configureDefault14() throws ParseException, IOException { pushContext(); try { assertBound(); settings.loadDefault14(); postConfigure(); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // CHECK // /////////////////////////////////////////////////////////////////////// public boolean check(URL ivyFile, String resolvername) { pushContext(); try { return checkEngine.check(ivyFile, resolvername); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // RESOLVE // /////////////////////////////////////////////////////////////////////// public ResolveReport resolve(File ivySource) throws ParseException, IOException { pushContext(); try { return resolveEngine.resolve(ivySource); } finally { popContext(); } } public ResolveReport resolve(URL ivySource) throws ParseException, IOException { pushContext(); try { return resolveEngine.resolve(ivySource); } finally { popContext(); } } public ResolveReport resolve(ModuleRevisionId mrid, ResolveOptions options, boolean changing) throws ParseException, IOException { pushContext(); try { return resolveEngine.resolve(mrid, options, changing); } finally { popContext(); } } public ResolveReport resolve(URL ivySource, ResolveOptions options) throws ParseException, IOException { pushContext(); try { return resolveEngine.resolve(ivySource, options); } finally { popContext(); } } public ResolveReport resolve(ModuleDescriptor md, ResolveOptions options) throws ParseException, IOException { pushContext(); try { return resolveEngine.resolve(md, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // INSTALL // /////////////////////////////////////////////////////////////////////// public ResolveReport install(ModuleRevisionId mrid, String from, String to, InstallOptions options) throws IOException { pushContext(); try { return installEngine.install(mrid, from, to, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // RETRIEVE // /////////////////////////////////////////////////////////////////////// public int retrieve(ModuleRevisionId mrid, String destFilePattern, RetrieveOptions options) throws IOException { pushContext(); try { return retrieveEngine.retrieve(mrid, destFilePattern, options); } finally { popContext(); } } public RetrieveReport retrieve(ModuleRevisionId mrid, RetrieveOptions options) throws IOException { pushContext(); try { return retrieveEngine.retrieve(mrid, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // DELIVER // /////////////////////////////////////////////////////////////////////// public void deliver(ModuleRevisionId mrid, String revision, String destIvyPattern) throws IOException, ParseException { pushContext(); try { deliverEngine.deliver( mrid, revision, destIvyPattern, DeliverOptions.newInstance(settings)); } finally { popContext(); } } public void deliver(String revision, String destIvyPattern, DeliverOptions options) throws IOException, ParseException { pushContext(); try { deliverEngine.deliver(revision, destIvyPattern, options); } finally { popContext(); } } /** * Example of use: deliver(mrid, "1.5", "target/ivy/ivy-[revision].xml", * DeliverOptions.newInstance(settings).setStatus("release").setValidate(false)); * * @param mrid * @param revision * @param destIvyPattern * @param options * @throws IOException * @throws ParseException */ public void deliver(ModuleRevisionId mrid, String revision, String destIvyPattern, DeliverOptions options) throws IOException, ParseException { pushContext(); try { deliverEngine.deliver(mrid, revision, destIvyPattern, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // PUBLISH // /////////////////////////////////////////////////////////////////////// public Collection publish(ModuleRevisionId mrid, Collection srcArtifactPattern, String resolverName, PublishOptions options) throws IOException { pushContext(); try { return publishEngine.publish(mrid, srcArtifactPattern, resolverName, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // SORT // /////////////////////////////////////////////////////////////////////// /** * Sorts the collection of IvyNode from the less dependent to the more dependent */ public List sortNodes(Collection nodes, SortOptions options) { pushContext(); try { return getSortEngine().sortNodes(nodes, options); } finally { popContext(); } } /** * Sorts the given ModuleDescriptors from the less dependent to the more dependent. This sort * ensures that a ModuleDescriptor is always found in the list before all ModuleDescriptors * depending directly on it. * * @param moduleDescriptors * a Collection of ModuleDescriptor to sort * @param options * Options to use to sort the descriptors. * @return a List of sorted ModuleDescriptors */ public List sortModuleDescriptors(Collection moduleDescriptors, SortOptions options) { pushContext(); try { return getSortEngine().sortModuleDescriptors(moduleDescriptors, options); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // SEARCH // /////////////////////////////////////////////////////////////////////// public ResolvedModuleRevision findModule(ModuleRevisionId mrid) { pushContext(); try { ResolveOptions options = new ResolveOptions(); options.setValidate(false); return resolveEngine.findModule(mrid, options); } finally { popContext(); } } public ModuleEntry[] listModuleEntries(OrganisationEntry org) { pushContext(); try { return searchEngine.listModuleEntries(org); } finally { popContext(); } } public ModuleId[] listModules(ModuleId criteria, PatternMatcher matcher) { pushContext(); try { return searchEngine.listModules(criteria, matcher); } finally { popContext(); } } public ModuleRevisionId[] listModules(ModuleRevisionId criteria, PatternMatcher matcher) { pushContext(); try { return searchEngine.listModules(criteria, matcher); } finally { popContext(); } } public String[] listModules(String org) { pushContext(); try { return searchEngine.listModules(org); } finally { popContext(); } } public OrganisationEntry[] listOrganisationEntries() { pushContext(); try { return searchEngine.listOrganisationEntries(); } finally { popContext(); } } public String[] listOrganisations() { pushContext(); try { return searchEngine.listOrganisations(); } finally { popContext(); } } public RevisionEntry[] listRevisionEntries(ModuleEntry module) { pushContext(); try { return searchEngine.listRevisionEntries(module); } finally { popContext(); } } public String[] listRevisions(String org, String module) { pushContext(); try { return searchEngine.listRevisions(org, module); } finally { popContext(); } } public String[] listTokenValues(String token, Map otherTokenValues) { pushContext(); try { return searchEngine.listTokenValues(token, otherTokenValues); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////////// // INTERRUPTIONS // /////////////////////////////////////////////////////////////////////// /** * Interrupts the current running operation, no later than interruptTimeout milliseconds after * the call */ public void interrupt() { Thread operatingThread = IvyContext.getContext().getOperatingThread(); interrupt(operatingThread); } /** * Interrupts the current running operation in the given operating thread, no later than * interruptTimeout milliseconds after the call */ public void interrupt(Thread operatingThread) { if (operatingThread != null && operatingThread.isAlive()) { if (operatingThread == Thread.currentThread()) { throw new IllegalStateException("cannot call interrupt from ivy operating thread"); } Message.verbose("interrupting operating thread..."); operatingThread.interrupt(); synchronized (this) { interrupted = true; } try { Message.verbose("waiting clean interruption of operating thread"); operatingThread.join(settings.getInterruptTimeout()); } catch (InterruptedException e) { // reset thread interrupt status Thread.currentThread().interrupt(); } if (operatingThread.isAlive()) { Message.warn("waited clean interruption for too long: stopping operating thread"); operatingThread.stop(); } synchronized (this) { interrupted = false; } } } public synchronized boolean isInterrupted() { return interrupted; } /** * Check if the current operation has been interrupted, and if it is the case, throw a runtime * exception */ public void checkInterrupted() { if (isInterrupted()) { Message.info("operation interrupted"); throw new RuntimeException("operation interrupted"); } } public static String getWorkingRevision() { return "working@" + HostUtil.getLocalHostName(); } public ResolutionCacheManager getResolutionCacheManager() { return settings.getResolutionCacheManager(); } private void assertBound() { if (!bound) { bind(); } } private void postConfigure() { Collection triggers = settings.getTriggers(); for (Iterator iter = triggers.iterator(); iter.hasNext();) { Trigger trigger = (Trigger) iter.next(); eventManager.addIvyListener(trigger, trigger.getEventFilter()); } for (Iterator iter = settings.getResolvers().iterator(); iter.hasNext();) { DependencyResolver resolver = (DependencyResolver) iter.next(); if (resolver instanceof BasicResolver) { ((BasicResolver) resolver).setEventManager(eventManager); } } } public String getVariable(String name) { pushContext(); try { assertBound(); return settings.getVariable(name); } finally { popContext(); } } public String substitute(String str) { pushContext(); try { assertBound(); return settings.substitute(str); } finally { popContext(); } } public void setVariable(String varName, String value) { pushContext(); try { assertBound(); settings.setVariable(varName, value); } finally { popContext(); } } // /////////////////////////////////////////////////////////////////// // GETTERS / SETTERS // /////////////////////////////////////////////////////////////////// public IvySettings getSettings() { return settings; } public EventManager getEventManager() { return eventManager; } public CheckEngine getCheckEngine() { return checkEngine; } public void setCheckEngine(CheckEngine checkEngine) { this.checkEngine = checkEngine; } public DeliverEngine getDeliverEngine() { return deliverEngine; } public void setDeliverEngine(DeliverEngine deliverEngine) { this.deliverEngine = deliverEngine; } public InstallEngine getInstallEngine() { return installEngine; } public void setInstallEngine(InstallEngine installEngine) { this.installEngine = installEngine; } public PublishEngine getPublishEngine() { return publishEngine; } public void setPublishEngine(PublishEngine publishEngine) { this.publishEngine = publishEngine; } public ResolveEngine getResolveEngine() { return resolveEngine; } public void setResolveEngine(ResolveEngine resolveEngine) { this.resolveEngine = resolveEngine; } public RetrieveEngine getRetrieveEngine() { return retrieveEngine; } public void setRetrieveEngine(RetrieveEngine retrieveEngine) { this.retrieveEngine = retrieveEngine; } public SearchEngine getSearchEngine() { return searchEngine; } public void setSearchEngine(SearchEngine searchEngine) { this.searchEngine = searchEngine; } public SortEngine getSortEngine() { return sortEngine; } public void setSortEngine(SortEngine sortEngine) { this.sortEngine = sortEngine; } public RepositoryManagementEngine getRepositoryEngine() { return repositoryEngine; } public void setRepositoryEngine(RepositoryManagementEngine repositoryEngine) { this.repositoryEngine = repositoryEngine; } public void setEventManager(EventManager eventManager) { this.eventManager = eventManager; } public void setSettings(IvySettings settings) { this.settings = settings; } public MessageLoggerEngine getLoggerEngine() { return loggerEngine; } }
apache-2.0
denis-colliot/fscf-contacts
src/main/java/fr/fscf/contacts/client/ui/view/zone/AuthenticationZoneView.java
2072
package fr.fscf.contacts.client.ui.view.zone; import com.google.gwt.core.client.GWT; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiTemplate; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import fr.fscf.contacts.client.ui.presenter.zone.AuthenticationZonePresenter; import fr.fscf.contacts.client.ui.view.base.AbstractView; import org.gwtbootstrap3.client.ui.AnchorButton; import org.gwtbootstrap3.client.ui.AnchorListItem; import org.gwtbootstrap3.client.ui.ListDropDown; import org.gwtbootstrap3.client.ui.constants.Toggle; /** * Authentication zone view (not a real view, just a widgets set). * * @author Denis */ public class AuthenticationZoneView extends AbstractView implements AuthenticationZonePresenter.View { /** * {@link UiBinder} interface adapted to {@link AuthenticationZoneView}. */ @UiTemplate("AuthenticationZoneView.ui.xml") interface ViewUiBinder extends UiBinder<Widget, AuthenticationZoneView> { } @UiField protected AnchorButton usernameLabel; @UiField protected ListDropDown authenticatedMenu; @UiField protected AnchorListItem loginLink; @UiField protected AnchorListItem logoutLink; @UiField protected AnchorListItem email; @Override public void initialize() { final ViewUiBinder binder = GWT.create(ViewUiBinder.class); initWidget(binder.createAndBindUi(this)); } @Override public IsWidget getAuthenticatedMenu() { return authenticatedMenu; } @Override public void setUsername(final String username) { usernameLabel.setText(username); usernameLabel.setDataToggle(Toggle.DROPDOWN); } @Override public AnchorListItem getLoginHandler() { return loginLink; } @Override public AnchorListItem getLogoutHandler() { return logoutLink; } @Override public AnchorListItem getEmail() { return email; } }
apache-2.0
jparsec/jparsec
jparsec-examples/src/main/java/org/jparsec/examples/java/ast/declaration/ConstructorDef.java
2717
/***************************************************************************** * Copyright (C) jparsec.org * * ------------------------------------------------------------------------- * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * *****************************************************************************/ package org.jparsec.examples.java.ast.declaration; import java.util.List; import org.jparsec.examples.common.Strings; import org.jparsec.examples.common.ValueObject; import org.jparsec.examples.java.ast.statement.BlockStatement; import org.jparsec.examples.java.ast.statement.Modifier; import org.jparsec.examples.java.ast.statement.ParameterDef; import org.jparsec.examples.java.ast.type.TypeLiteral; /** * Represents a constructor definition. * * @author benyu */ public final class ConstructorDef extends ValueObject implements Member { public final List<Modifier> modifiers; public final String name; public final List<ParameterDef> parameters; public final List<TypeLiteral> exceptions; public final BlockStatement body; public ConstructorDef(List<Modifier> modifiers, String name, List<ParameterDef> parameters, List<TypeLiteral> exceptions, BlockStatement body) { this.modifiers = modifiers; this.name = name; this.parameters = parameters; this.exceptions = exceptions; this.body = body; } @Override public String toString() { StringBuilder builder = new StringBuilder(); for (Modifier modifier : modifiers) { builder.append(modifier).append(' '); } builder.append(name).append('('); Strings.join(builder, ", ", parameters); builder.append(')'); if (exceptions != null) { builder.append(" throws "); Strings.join(builder, ", ", exceptions); } builder.append(' ').append(body); return builder.toString(); } }
apache-2.0
jdeparser/jdeparser2
src/main/java/org/jboss/jdeparser/SuccessorJVarDeclaration.java
2223
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.jdeparser; import java.lang.annotation.Annotation; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ class SuccessorJVarDeclaration implements JVarDeclaration { private final FirstJVarDeclaration first; private final String name; private final JExpr value; SuccessorJVarDeclaration(final FirstJVarDeclaration first, final String name, final JExpr value) { this.first = first; this.name = name; this.value = value; } public String name() { return name; } JExpr getValue() { return value; } public JComment blockComment() { return first.blockComment(); } public JComment lineComment() { return first.lineComment(); } public JComment deprecated() { return first.deprecated(); } public JDocComment docComment() { return first.docComment(); } public JAnnotation annotate(final Class<? extends Annotation> type) { return first.annotate(type); } public JAnnotation annotate(final JType type) { return first.annotate(type); } public JAnnotation annotate(final String type) { return first.annotate(type); } public JVarDeclaration add(final String name) { return first.add(name); } public JVarDeclaration add(final String name, final JExpr init) { return first.add(name, init); } public JType type() { return first.type(); } }
apache-2.0
palessandro/activejdbc
activejdbc/src/test/java/org/javalite/activejdbc/test_models/Item.java
813
/* Copyright 2009-2016 Igor Polevoy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.javalite.activejdbc.test_models; import org.javalite.activejdbc.Model; import org.javalite.activejdbc.annotations.VersionColumn; /** * @author Igor Polevoy */ @VersionColumn("lock_version") public class Item extends Model { }
apache-2.0
quarkusio/quarkus
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/xml/orm/OrmXmlAnnotationOverrideTest.java
1767
package io.quarkus.hibernate.orm.xml.orm; import static org.assertj.core.api.Assertions.assertThat; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.transaction.Transactional; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.hibernate.orm.SchemaUtil; import io.quarkus.hibernate.orm.SmokeTestUtils; import io.quarkus.test.QuarkusUnitTest; /** * Test that assigning an orm.xml mapping file explicitly to override annotations * works as expected. */ public class OrmXmlAnnotationOverrideTest { @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClass(SmokeTestUtils.class) .addClass(SchemaUtil.class) .addClass(AnnotatedEntity.class) .addAsResource("application-mapping-files-my-orm-xml.properties", "application.properties") .addAsResource("META-INF/orm-override.xml", "my-orm.xml")); @Inject EntityManagerFactory entityManagerFactory; @Inject EntityManager entityManager; @Test @Transactional public void ormXmlTakenIntoAccount() { assertThat(SchemaUtil.getColumnNames(entityManagerFactory, AnnotatedEntity.class)) .contains("thename") .doesNotContain("name"); } @Test @Transactional public void smokeTest() { SmokeTestUtils.testSimplePersistRetrieveUpdateDelete(entityManager, AnnotatedEntity.class, AnnotatedEntity::new, AnnotatedEntity::getId, AnnotatedEntity::setName, AnnotatedEntity::getName); } }
apache-2.0
ceylon/ceylon
tools/src/org/eclipse/ceylon/tools/new_/CeylonNewTool.java
14363
/******************************************************************************** * Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * http://www.apache.org/licenses/LICENSE-2.0 * * SPDX-License-Identifier: Apache-2.0 ********************************************************************************/ package org.eclipse.ceylon.tools.new_; import java.io.File; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.PathMatcher; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.eclipse.ceylon.common.Constants; import org.eclipse.ceylon.common.Versions; import org.eclipse.ceylon.common.config.CeylonConfig; import org.eclipse.ceylon.common.config.DefaultToolOptions; import org.eclipse.ceylon.common.tool.Argument; import org.eclipse.ceylon.common.tool.CeylonBaseTool; import org.eclipse.ceylon.common.tool.Description; import org.eclipse.ceylon.common.tool.Hidden; import org.eclipse.ceylon.common.tool.OptionArgument; import org.eclipse.ceylon.common.tool.Rest; import org.eclipse.ceylon.common.tool.Subtool; import org.eclipse.ceylon.common.tool.Summary; import org.eclipse.ceylon.common.tool.ToolModel; import org.eclipse.ceylon.common.tools.CeylonTool; @Summary("Generates a new Ceylon project") @Description("Generates a new project, prompting for information as necessary") public class CeylonNewTool extends CeylonBaseTool { private ToolModel<CeylonNewTool> model; private NewSubTool project; private File from = new File(System.getProperty(Constants.PROP_CEYLON_HOME_DIR), "templates"); private Map<String, String> rest = new HashMap<String, String>(); public void setToolModel(ToolModel<CeylonNewTool> model) { this.model = model; } @Subtool(argumentName="name", classes={Simple.class, HelloWorld.class, JavaInterop.class, Module.class}, order=2) public void setProject(NewSubTool project) { this.project = project; } @Hidden @OptionArgument(argumentName="dir") public void setFrom(File from) { this.from = from; } @Rest public void setRest(List<String> rest) { for (String optionArg : rest) { if (optionArg.startsWith("--")) { int idx = optionArg.indexOf("="); String option; String arg; if (idx == -1) { option = optionArg.substring(2); arg = ""; } else { option = optionArg.substring(2, idx); arg = optionArg.substring(idx + 1); } this.rest.put(option, arg); } } } @Override public void initialize(CeylonTool mainTool) { } @Override public void run() throws Exception { File fromDir = getFromDir(); if (!fromDir.exists() || !fromDir.isDirectory()) { throw new IllegalArgumentException(Messages.msg("from.nonexistent.or.nondir", fromDir)); } Environment env = buildPromptedEnv(); List<Copy> resources = project.getResources(env); // Create base dir only once all the prompting has been done project.mkBaseDir(getCwd()); for (Copy copy : resources) { copy.run(env); } } private String getProjectName() { String projectName = model.getSubtoolModel().getToolLoader().getToolName(project.getClass().getName()); return projectName; } private File getFromDir() { return new File(applyCwd(from), getProjectName()); } private Environment buildPromptedEnv() { Environment env = new Environment(); // TODO Tidy up how we create and what's in this initial environment if (project.getDirectory() != null) { env.put("base.dir", applyCwd(project.getDirectory()).getAbsolutePath()); } env.put("ceylon.home", System.getProperty(Constants.PROP_CEYLON_HOME_DIR)); //env.put("ceylon.system.repo", System.getProperty("ceylon.system.repo")); env.put("ceylon.version.number", Versions.CEYLON_VERSION_NUMBER); env.put("ceylon.version.major", Integer.toString(Versions.CEYLON_VERSION_MAJOR)); env.put("ceylon.version.minor", Integer.toString(Versions.CEYLON_VERSION_MINOR)); env.put("ceylon.version.release", Integer.toString(Versions.CEYLON_VERSION_RELEASE)); env.put("ceylon.version.name", Versions.CEYLON_VERSION_NAME); Set<String> seenKeys = new HashSet<>(); List<Variable> vars = new LinkedList<>(project.getVariables()); while (!vars.isEmpty()) { Variable var = vars.remove(0); if (seenKeys.contains(var.getKey())) { throw new RuntimeException("Variables for project do not form a tree"); } seenKeys.add(var.getKey()); // TODO Use the value from rest if there is one, only prompting if // there is not // TODO The problem with this is: "How does the user figure out // what option they need to specify on the command line // in order to avoid being prompted for it interactively?" // Each subtool could provide their own getters and setters // but they requires we write a subtool for each project // It would be nice if we didn't have to do that, but could just // drive the whole thing from a script in the templates dir. vars.addAll(0, var.initialize(getProjectName(), env)); } String sourceFolder = Constants.DEFAULT_SOURCE_DIR; String baseDir = env.get("base.dir"); if (project.getDirectory() == null) { project.setDirectory(new File(baseDir)); } try { CeylonConfig config = CeylonConfig.createFromLocalDir(new File(baseDir)); List<File> srcs = DefaultToolOptions.getCompilerSourceDirs(config); if (!srcs.isEmpty()) { sourceFolder = srcs.get(0).getPath(); } else { sourceFolder = Constants.DEFAULT_SOURCE_DIR; } } catch (Exception e) { // Ignore any errors } env.put("source.folder", sourceFolder); log(env); return env; } public Copy substituting(final String cwd, final String src, final String dst) { return substituting(cwd, new PathMatcher() { @Override public boolean matches(Path path) { return path.endsWith(src); } }, dst); } public Copy substituting(final String cwd, PathMatcher pathMatcher, String dst) { return new Copy(new File(getFromDir(), cwd), applyCwd(project.getDirectory()), pathMatcher, dst, true); } private void log(Object msg) { if (verbose != null) { System.out.println(msg); } } public abstract class BaseProject extends Project { private final Variable baseDir; private final Variable moduleName; private final Variable moduleVersion; private final Variable eclipseProjectName; private final Variable eclipse; private final Variable ant; public BaseProject(String defBaseDir, String defModName) { baseDir = Variable.directory("base.dir", shouldCreateProject() ? defBaseDir : "."); moduleName = Variable.moduleName("module.name", defModName); moduleVersion = Variable.moduleVersion("module.version", "1.0.0"); eclipseProjectName = new Variable("eclipse.project.name", null, new PromptedValue("eclipse.project.name", "@[module.name]")); eclipse = Variable.yesNo("eclipse", Messages.msg("mnemonic.yes"), eclipseProjectName); ant = Variable.yesNo("ant", Messages.msg("mnemonic.yes")); } @OptionArgument @Description("Specifies the name for the new module.") public void setModuleName(String moduleName) { this.moduleName.setVariableValue(new GivenValue(moduleName)); } @OptionArgument @Description("Specifies the version for the new module.") public void setModuleVersion(String moduleVersion) { this.moduleVersion.setVariableValue(new GivenValue(moduleVersion)); } @OptionArgument @Description("Indicates if an Eclipse project should be generated or not.") public void setEclipse(boolean eclipse) { this.eclipse.setVariableValue(new GivenValue(Boolean.toString(eclipse))); } @OptionArgument @Description("Specifies the name for the Eclipse project.") public void setEclipseProjectName(String eclipseProjectName) { this.eclipseProjectName.setVariableValue(new GivenValue(eclipseProjectName)); } @OptionArgument @Description("Indicates if an Ant build file should be generated or not.") public void setAnt(boolean ant) { this.ant.setVariableValue(new GivenValue(Boolean.toString(ant))); } @Override public List<Variable> getVariables() { List<Variable> result = new ArrayList<>(); if (getDirectory() == null) { result.add(baseDir); } result.add(moduleName); result.add(Variable.moduleQuotedName("module.quoted.name", "module.name")); result.add(Variable.moduleDir("module.dir", "module.name")); result.add(moduleVersion); if (shouldCreateProject()) { result.add(eclipse); result.add(ant); } return result; } // Check if the target directory is not a project protected boolean shouldCreateProject() { // If it has a ".ceylon" folder we assume a project exists File ceylonDir = new File(applyCwd(getDirectory()), ".ceylon"); if (ceylonDir.isDirectory()) { return false; } // If there's no ".ceylon" folder but there is a "source" folder // we also assume a project exists File sourcesDir = new File(applyCwd(getDirectory()), Constants.DEFAULT_SOURCE_DIR); if (sourcesDir.isDirectory()) { return false; } return true; } @Override public List<Copy> getResources(Environment env) { FileSystem fs = FileSystems.getDefault(); List<Copy> result = new ArrayList<>(); result.add(substituting("source", fs.getPathMatcher("glob:**"), new Template("@[source.folder]/@[module.dir]").eval(env))); if ("true".equals(env.get("ant"))) { result.add(substituting("ant", "build.xml", ".")); } if ("true".equals(env.get("eclipse"))) { result.add(substituting("eclipse", fs.getPathMatcher("glob:**"), ".")); } return result; } } @Description("Generates a 'Hello World' style project." + "\n\n" + "Takes a `dir` argument to indicate in which directory the new project should be created.") public class HelloWorld extends BaseProject { public HelloWorld() { super("helloworld", "com.example.helloworld"); } } @Description("Generates a very simple empty project" + "\n\n" + "Takes a `dir` argument to indicate in which directory the new project should be created.") public class Simple extends BaseProject { public Simple() { super("simple", "com.example.simple"); } } @Description("Generates a project that is able to use Java legacy code" + "\n\n" + "Takes a `dir` argument to indicate in which directory the new project should be created.") public class JavaInterop extends BaseProject { public JavaInterop() { super("javainterop", "com.example.javainterop"); } } public class Module extends NewSubTool { private final Variable moduleName; private final Variable moduleVersion; public Module() { moduleName = new Variable("module.name", Variable.moduleNameValidator, new GivenValue("dummy")); moduleVersion = new Variable("module.version", Variable.moduleVersionValidator, new GivenValue("1.0.0")); } @Argument(argumentName="module", multiplicity = "1", order=1) public void setModuleName(String moduleName) { this.moduleName.setVariableValue(new GivenValue(moduleName)); } @Argument(argumentName="version", multiplicity = "?", order=2) public void setModuleVersion(String moduleVersion) { this.moduleVersion.setVariableValue(new GivenValue(moduleVersion)); } @Override public List<Variable> getVariables() { List<Variable> result = new ArrayList<>(); result.add(new Variable("base.dir", null, new GivenValue("."))); result.add(moduleName); result.add(Variable.moduleQuotedName("module.quoted.name", "module.name")); result.add(Variable.moduleDir("module.dir", "module.name")); result.add(moduleVersion); return result; } @Override public List<Copy> getResources(Environment env) { FileSystem fs = FileSystems.getDefault(); List<Copy> result = new ArrayList<>(); result.add(substituting("source", fs.getPathMatcher("glob:**"), new Template("@[source.folder]/@[module.dir]").eval(env))); return result; } } }
apache-2.0
firejack-open/Firejack-Platform
core/src/main/java/net/firejack/platform/core/validation/service/RuleService.java
2229
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package net.firejack.platform.core.validation.service; import com.sun.jersey.spi.resource.Singleton; import net.firejack.platform.core.broker.rule.ReadRuleBroker; import net.firejack.platform.core.domain.SimpleIdentifier; import net.firejack.platform.core.request.ServiceRequest; import net.firejack.platform.core.response.ServiceResponse; import net.firejack.platform.core.validation.constraint.vo.Field; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; @Component("ruleService") @Singleton @Path("rule") public class RuleService { @Autowired @Qualifier("readRuleBroker") private ReadRuleBroker readRuleBroker; /** * @param constraintSourceId * @return */ @GET @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ServiceResponse<Field> formConstructor(@QueryParam(value = "id") String constraintSourceId) { SimpleIdentifier<String> simpleIdentifier = new SimpleIdentifier<String>(constraintSourceId); return readRuleBroker.execute(new ServiceRequest<SimpleIdentifier<String>>(simpleIdentifier)); } }
apache-2.0