repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
kasungayan/carbon-analytics-common
components/event-publisher/event-output-adapters/org.wso2.carbon.event.output.adapter.wso2event/src/main/java/org/wso2/carbon/event/output/adapter/wso2event/WSO2EventAdapter.java
8571
/* * Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.wso2.carbon.event.output.adapter.wso2event; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.databridge.agent.AgentHolder; import org.wso2.carbon.databridge.agent.DataPublisher; import org.wso2.carbon.databridge.agent.exception.DataEndpointAgentConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointAuthenticationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointException; import org.wso2.carbon.databridge.commons.Event; import org.wso2.carbon.databridge.commons.exception.TransportException; import org.wso2.carbon.event.output.adapter.core.OutputEventAdapter; import org.wso2.carbon.event.output.adapter.core.OutputEventAdapterConfiguration; import org.wso2.carbon.event.output.adapter.core.exception.ConnectionUnavailableException; import org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterRuntimeException; import org.wso2.carbon.event.output.adapter.core.exception.TestConnectionNotSupportedException; import org.wso2.carbon.event.output.adapter.wso2event.internal.util.WSO2EventAdapterConstants; import java.util.Map; import static org.wso2.carbon.event.output.adapter.wso2event.internal.util.WSO2EventAdapterConstants.*; public final class WSO2EventAdapter implements OutputEventAdapter { private static final Log log = LogFactory.getLog(WSO2EventAdapter.class); private final OutputEventAdapterConfiguration eventAdapterConfiguration; private final Map<String, String> globalProperties; private DataPublisher dataPublisher = null; private boolean isBlockingMode = false; private long timeout = 0; private String streamId; public WSO2EventAdapter(OutputEventAdapterConfiguration eventAdapterConfiguration, Map<String, String> globalProperties) { this.eventAdapterConfiguration = eventAdapterConfiguration; this.globalProperties = globalProperties; } /** * Initialises the resource bundle */ @Override public void init() { streamId = eventAdapterConfiguration.getStaticProperties().get( WSO2EventAdapterConstants.ADAPTER_STATIC_CONFIG_STREAM_NAME) + ":" + eventAdapterConfiguration.getStaticProperties().get(WSO2EventAdapterConstants .ADAPTER_STATIC_CONFIG_STREAM_VERSION); String configPath = globalProperties.get(ADAPTOR_CONF_PATH); if (configPath != null) { AgentHolder.setConfigPath(configPath); } } @Override public void testConnect() throws TestConnectionNotSupportedException { connect(); } @Override public synchronized void connect() { String userName = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_USER_NAME); String password = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_PASSWORD); String authUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_AUTHENTICATOR_URL); String receiverUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_RECEIVER_URL); String protocol = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_PROTOCOL); String publishingMode = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PUBLISHING_MODE); String timeoutString = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PUBLISH_TIMEOUT_MS); if (publishingMode.equalsIgnoreCase(ADAPTER_PUBLISHING_MODE_BLOCKING)) { isBlockingMode = true; } else { try { timeout = Long.parseLong(timeoutString); } catch (RuntimeException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } } try { if (authUrl != null && authUrl.length() > 0) { dataPublisher = new DataPublisher(protocol, receiverUrl, authUrl, userName, password); } else { dataPublisher = new DataPublisher(protocol, receiverUrl, null, userName, password); } } catch (DataEndpointAgentConfigurationException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointConfigurationException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointAuthenticationException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } catch (TransportException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } } @Override public void publish(Object message, Map<String, String> dynamicProperties) { Event event = (Event) (message); //StreamDefinition streamDefinition = (StreamDefinition) ((Object[]) message)[1]; event.setStreamId(streamId); if (isBlockingMode) { dataPublisher.publish(event); } else { dataPublisher.tryPublish(event, timeout); } } @Override public void disconnect() { if (dataPublisher != null) { try { dataPublisher.shutdown(); } catch (DataEndpointException e) { String userName = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_USER_NAME); String authUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_AUTHENTICATOR_URL); String receiverUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_RECEIVER_URL); String protocol = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PROTOCOL); logException("Error in shutting down the data publisher", receiverUrl, authUrl, protocol, userName, e); } } } @Override public void destroy() { } private void throwRuntimeException(String receiverUrl, String authUrl, String protocol, String userName, Exception e) { throw new OutputEventAdapterRuntimeException( "Error in data-bridge config for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } private void logException(String message, String receiverUrl, String authUrl, String protocol, String userName, Exception e) { log.error(message + " for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } private void throwConnectionException(String receiverUrl, String authUrl, String protocol, String userName, Exception e) { throw new ConnectionUnavailableException( "Connection not available for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } }
apache-2.0
codescale/logging-log4j2
log4j-core/src/test/java/org/apache/logging/log4j/core/pattern/PatternParserTest.java
13711
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.pattern; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.MarkerManager; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.NullConfiguration; import org.apache.logging.log4j.core.impl.Log4jLogEvent; import org.apache.logging.log4j.core.util.DummyNanoClock; import org.apache.logging.log4j.core.util.SystemNanoClock; import org.apache.logging.log4j.message.SimpleMessage; import org.apache.logging.log4j.util.Strings; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; /** * */ public class PatternParserTest { static String OUTPUT_FILE = "output/PatternParser"; static String WITNESS_FILE = "witness/PatternParser"; LoggerContext ctx = LoggerContext.getContext(); Logger root = ctx.getRootLogger(); private static String msgPattern = "%m%n"; private final String mdcMsgPattern1 = "%m : %X%n"; private final String mdcMsgPattern2 = "%m : %X{key1}%n"; private final String mdcMsgPattern3 = "%m : %X{key2}%n"; private final String mdcMsgPattern4 = "%m : %X{key3}%n"; private final String mdcMsgPattern5 = "%m : %X{key1},%X{key2},%X{key3}%n"; private static String badPattern = "[%d{yyyyMMdd HH:mm:ss,SSS] %-5p [%c{10}] - %m%n"; private static String customPattern = "[%d{yyyyMMdd HH:mm:ss,SSS}] %-5p [%-25.25c{1}:%-4L] - %m%n"; private static String patternTruncateFromEnd = "%d; %-5p %5.-5c %m%n"; private static String patternTruncateFromBeginning = "%d; %-5p %5.5c %m%n"; private static String nestedPatternHighlight = "%highlight{%d{dd MMM yyyy HH:mm:ss,SSS}{GMT+0} [%t] %-5level: %msg%n%throwable}"; private static final String KEY = "Converter"; private PatternParser parser; @Before public void setup() { parser = new PatternParser(KEY); } private void validateConverter(final List<PatternFormatter> formatter, final int index, final String name) { final PatternConverter pc = formatter.get(index).getConverter(); assertEquals("Incorrect converter " + pc.getName() + " at index " + index + " expected " + name, pc.getName(), name); } /** * Test the default pattern */ @Test public void defaultPattern() { final List<PatternFormatter> formatters = parser.parse(msgPattern); assertNotNull(formatters); assertTrue(formatters.size() == 2); validateConverter(formatters, 0, "Message"); validateConverter(formatters, 1, "Line Sep"); } /** * Test the custom pattern */ @Test public void testCustomPattern() { final List<PatternFormatter> formatters = parser.parse(customPattern); assertNotNull(formatters); final Map<String, String> mdc = new HashMap<>(); mdc.put("loginId", "Fred"); final Throwable t = new Throwable(); final StackTraceElement[] elements = t.getStackTrace(); final Log4jLogEvent event = Log4jLogEvent.newBuilder() // .setLoggerName("org.apache.logging.log4j.PatternParserTest") // .setMarker(MarkerManager.getMarker("TEST")) // .setLoggerFqcn(Logger.class.getName()) // .setLevel(Level.INFO) // .setMessage(new SimpleMessage("Hello, world")) // .setContextMap(mdc) // .setThreadName("Thread1") // .setSource(elements[0]) .setTimeMillis(System.currentTimeMillis()).build(); final StringBuilder buf = new StringBuilder(); for (final PatternFormatter formatter : formatters) { formatter.format(event, buf); } final String str = buf.toString(); final String expected = "INFO [PatternParserTest :100 ] - Hello, world" + Strings.LINE_SEPARATOR; assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected)); } @Test public void testPatternTruncateFromBeginning() { final List<PatternFormatter> formatters = parser.parse(patternTruncateFromBeginning); assertNotNull(formatters); final LogEvent event = Log4jLogEvent.newBuilder() // .setLoggerName("org.apache.logging.log4j.PatternParserTest") // .setLoggerFqcn(Logger.class.getName()) // .setLevel(Level.INFO) // .setMessage(new SimpleMessage("Hello, world")) // .setThreadName("Thread1") // .setTimeMillis(System.currentTimeMillis()) // .build(); final StringBuilder buf = new StringBuilder(); for (final PatternFormatter formatter : formatters) { formatter.format(event, buf); } final String str = buf.toString(); final String expected = "INFO rTest Hello, world" + Strings.LINE_SEPARATOR; assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected)); } @Test public void testPatternTruncateFromEnd() { final List<PatternFormatter> formatters = parser.parse(patternTruncateFromEnd); assertNotNull(formatters); final LogEvent event = Log4jLogEvent.newBuilder() // .setLoggerName("org.apache.logging.log4j.PatternParserTest") // .setLoggerFqcn(Logger.class.getName()) // .setLevel(Level.INFO) // .setMessage(new SimpleMessage("Hello, world")) // .setThreadName("Thread1") // .setTimeMillis(System.currentTimeMillis()) // .build(); final StringBuilder buf = new StringBuilder(); for (final PatternFormatter formatter : formatters) { formatter.format(event, buf); } final String str = buf.toString(); final String expected = "INFO org.a Hello, world" + Strings.LINE_SEPARATOR; assertTrue("Expected to end with: " + expected + ". Actual: " + str, str.endsWith(expected)); } @Test public void testBadPattern() { final Calendar cal = Calendar.getInstance(); cal.set(2001, Calendar.FEBRUARY, 3, 4, 5, 6); cal.set(Calendar.MILLISECOND, 789); final long timestamp = cal.getTimeInMillis(); final List<PatternFormatter> formatters = parser.parse(badPattern); assertNotNull(formatters); final Throwable t = new Throwable(); final StackTraceElement[] elements = t.getStackTrace(); final LogEvent event = Log4jLogEvent.newBuilder() // .setLoggerName("a.b.c") // .setLoggerFqcn(Logger.class.getName()) // .setLevel(Level.INFO) // .setMessage(new SimpleMessage("Hello, world")) // .setThreadName("Thread1") // .setSource(elements[0]) // .setTimeMillis(timestamp) // .build(); final StringBuilder buf = new StringBuilder(); for (final PatternFormatter formatter : formatters) { formatter.format(event, buf); } final String str = buf.toString(); // eats all characters until the closing '}' character final String expected = "[2001-02-03 04:05:06,789] - Hello, world"; assertTrue("Expected to start with: " + expected + ". Actual: " + str, str.startsWith(expected)); } @Test public void testNestedPatternHighlight() { testNestedPatternHighlight(Level.TRACE, "\u001B[30m"); testNestedPatternHighlight(Level.DEBUG, "\u001B[36m"); testNestedPatternHighlight(Level.INFO, "\u001B[32m"); testNestedPatternHighlight(Level.WARN, "\u001B[33m"); testNestedPatternHighlight(Level.ERROR, "\u001B[1;31m"); testNestedPatternHighlight(Level.FATAL, "\u001B[1;31m"); } private void testNestedPatternHighlight(final Level level, final String expectedStart) { final List<PatternFormatter> formatters = parser.parse(nestedPatternHighlight); assertNotNull(formatters); final Throwable t = new Throwable(); t.getStackTrace(); final LogEvent event = Log4jLogEvent.newBuilder() // .setLoggerName("org.apache.logging.log4j.PatternParserTest") // .setMarker(MarkerManager.getMarker("TEST")) // .setLoggerFqcn(Logger.class.getName()) // .setLevel(level) // .setMessage(new SimpleMessage("Hello, world")) // .setThreadName("Thread1") // .setSource(/*stackTraceElement[0]*/ null) // .setTimeMillis(System.currentTimeMillis()) // .build(); final StringBuilder buf = new StringBuilder(); for (final PatternFormatter formatter : formatters) { formatter.format(event, buf); } final String str = buf.toString(); final String expectedEnd = String.format("] %-5s: Hello, world%s\u001B[m", level, Strings.LINE_SEPARATOR); assertTrue("Expected to start with: " + expectedStart + ". Actual: " + str, str.startsWith(expectedStart)); assertTrue("Expected to end with: \"" + expectedEnd + "\". Actual: \"" + str, str.endsWith(expectedEnd)); } @Test public void testNanoPatternShort() { final List<PatternFormatter> formatters = parser.parse("%N"); assertNotNull(formatters); assertEquals(1, formatters.size()); assertTrue(formatters.get(0).getConverter() instanceof NanoTimePatternConverter); } @Test public void testNanoPatternLong() { final List<PatternFormatter> formatters = parser.parse("%nano"); assertNotNull(formatters); assertEquals(1, formatters.size()); assertTrue(formatters.get(0).getConverter() instanceof NanoTimePatternConverter); } @Test public void testThreadNamePattern() { testThreadNamePattern("%thread"); } @Test public void testThreadNameFullPattern() { testThreadNamePattern("%threadName"); } @Test public void testThreadIdFullPattern() { testThreadIdPattern("%threadId"); } @Test public void testThreadIdShortPattern1() { testThreadIdPattern("%tid"); } @Test public void testThreadIdShortPattern2() { testThreadIdPattern("%T"); } @Test public void testThreadPriorityShortPattern() { testThreadPriorityPattern("%tp"); } @Test public void testThreadPriorityFullPattern() { testThreadPriorityPattern("%threadPriority"); } private void testThreadIdPattern(final String pattern) { testFirstConverter(pattern, ThreadIdPatternConverter.class); } private void testThreadNamePattern(final String pattern) { testFirstConverter(pattern, ThreadNamePatternConverter.class); } private void testThreadPriorityPattern(final String pattern) { testFirstConverter(pattern, ThreadPriorityPatternConverter.class); } private void testFirstConverter(final String pattern, final Class<?> checkClass) { final List<PatternFormatter> formatters = parser.parse(pattern); assertNotNull(formatters); final String msg = formatters.toString(); assertEquals(msg, 1, formatters.size()); assertTrue(msg, checkClass.isInstance(formatters.get(0).getConverter())); } @Test public void testThreadNameShortPattern() { testThreadNamePattern("%t"); } @Test public void testNanoPatternShortChangesConfigurationNanoClock() { final Configuration config = new NullConfiguration(); assertTrue(config.getNanoClock() instanceof DummyNanoClock); final PatternParser pp = new PatternParser(config, KEY, null); assertTrue(config.getNanoClock() instanceof DummyNanoClock); pp.parse("%m"); assertTrue(config.getNanoClock() instanceof DummyNanoClock); pp.parse("%nano"); // this changes the config clock assertTrue(config.getNanoClock() instanceof SystemNanoClock); } @Test public void testNanoPatternLongChangesNanoClockFactoryMode() { final Configuration config = new NullConfiguration(); assertTrue(config.getNanoClock() instanceof DummyNanoClock); final PatternParser pp = new PatternParser(config, KEY, null); assertTrue(config.getNanoClock() instanceof DummyNanoClock); pp.parse("%m"); assertTrue(config.getNanoClock() instanceof DummyNanoClock); pp.parse("%N"); assertTrue(config.getNanoClock() instanceof SystemNanoClock); } }
apache-2.0
ptupitsyn/ignite
modules/core/src/main/java/org/apache/ignite/internal/visor/verify/VisorValidateIndexesJobResult.java
4517
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.visor.verify; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collection; import java.util.Collections; import java.util.Map; import org.apache.ignite.internal.processors.cache.verify.PartitionKey; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.visor.VisorDataTransferObject; import org.jetbrains.annotations.NotNull; /** * */ public class VisorValidateIndexesJobResult extends VisorDataTransferObject { /** */ private static final long serialVersionUID = 0L; /** Results of indexes validation from node. */ private Map<PartitionKey, ValidateIndexesPartitionResult> partRes; /** Results of reverse indexes validation from node. */ private Map<String, ValidateIndexesPartitionResult> idxRes; /** Integrity check issues. */ private Collection<IndexIntegrityCheckIssue> integrityCheckFailures; /** * @param partRes Results of indexes validation from node. * @param idxRes Results of reverse indexes validation from node. * @param integrityCheckFailures Collection of indexes integrity check failures. */ public VisorValidateIndexesJobResult( @NotNull Map<PartitionKey, ValidateIndexesPartitionResult> partRes, @NotNull Map<String, ValidateIndexesPartitionResult> idxRes, @NotNull Collection<IndexIntegrityCheckIssue> integrityCheckFailures ) { this.partRes = partRes; this.idxRes = idxRes; this.integrityCheckFailures = integrityCheckFailures; } /** * For externalization only. */ public VisorValidateIndexesJobResult() { } /** {@inheritDoc} */ @Override public byte getProtocolVersion() { return V3; } /** * @return Results of indexes validation from node. */ public Map<PartitionKey, ValidateIndexesPartitionResult> partitionResult() { return partRes; } /** * @return Results of reverse indexes validation from node. */ public Map<String, ValidateIndexesPartitionResult> indexResult() { return idxRes == null ? Collections.emptyMap() : idxRes; } /** * @return Collection of failed integrity checks. */ public Collection<IndexIntegrityCheckIssue> integrityCheckFailures() { return integrityCheckFailures == null ? Collections.emptyList() : integrityCheckFailures; } /** * @return {@code true} If any indexes issues found on node, otherwise returns {@code false}. */ public boolean hasIssues() { return (integrityCheckFailures != null && !integrityCheckFailures.isEmpty()) || (partRes != null && partRes.entrySet().stream().anyMatch(e -> !e.getValue().issues().isEmpty())) || (idxRes != null && idxRes.entrySet().stream().anyMatch(e -> !e.getValue().issues().isEmpty())); } /** {@inheritDoc} */ @Override protected void writeExternalData(ObjectOutput out) throws IOException { U.writeMap(out, partRes); U.writeMap(out, idxRes); U.writeCollection(out, integrityCheckFailures); } /** {@inheritDoc} */ @Override protected void readExternalData(byte protoVer, ObjectInput in) throws IOException, ClassNotFoundException { partRes = U.readMap(in); if (protoVer >= V2) idxRes = U.readMap(in); if (protoVer >= V3) integrityCheckFailures = U.readCollection(in); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(VisorValidateIndexesJobResult.class, this); } }
apache-2.0
CloudSlang/cs-actions
cs-active-directory/src/main/java/io/cloudslang/content/active_directory/entities/CreateUserInputInterface.java
1364
/* * (c) Copyright 2021 Micro Focus * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.active_directory.entities; import java.util.List; public interface CreateUserInputInterface { String getHost(); String getDistinguishedName(); String getUserCommonName(); String getUserPassword(); String getSAMAccountName(); String getUsername(); String getPassword(); String getProtocol(); Boolean getTrustAllRoots(); String getTrustKeystore(); String getTrustPassword(); Boolean getEscapeChars(); String getTimeout(); String getProxyHost(); int getProxyPort(); String getProxyUsername(); String getProxyPassword(); String getX509HostnameVerifier(); String getTlsVersion(); List<String> getAllowedCiphers(); }
apache-2.0
huihoo/olat
olat7.8/src/main/java/org/olat/presentation/group/area/BGAreaEditController.java
10780
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br> * University of Zurich, Switzerland. * <p> */ package org.olat.presentation.group.area; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.StringEscapeUtils; import org.olat.data.group.BusinessGroup; import org.olat.data.group.area.BGArea; import org.olat.data.group.area.BGAreaDao; import org.olat.data.group.area.BGAreaDaoImpl; import org.olat.data.group.context.BGContext; import org.olat.data.group.context.BGContextDao; import org.olat.data.group.context.BGContextDaoImpl; import org.olat.lms.activitylogging.LoggingResourceable; import org.olat.lms.activitylogging.ThreadLocalUserActivityLogger; import org.olat.lms.group.BusinessGroupService; import org.olat.lms.group.GroupLoggingAction; import org.olat.presentation.framework.core.UserRequest; import org.olat.presentation.framework.core.components.Component; import org.olat.presentation.framework.core.components.choice.Choice; import org.olat.presentation.framework.core.components.tabbedpane.TabbedPane; import org.olat.presentation.framework.core.components.velocity.VelocityContainer; import org.olat.presentation.framework.core.control.Controller; import org.olat.presentation.framework.core.control.WindowControl; import org.olat.presentation.framework.core.control.controller.BasicController; import org.olat.presentation.framework.core.translator.PackageTranslator; import org.olat.presentation.framework.core.translator.PackageUtil; import org.olat.presentation.framework.core.translator.Translator; import org.olat.system.event.Event; import org.olat.system.spring.CoreSpringFactory; /** * Description:<BR> * This controller can be used to edit the business grou area metadata and associate business groups to the business group area. * <P> * Initial Date: Aug 30, 2004 * * @author gnaegi */ public class BGAreaEditController extends BasicController { private static final String PACKAGE = PackageUtil.getPackageName(BGAreaEditController.class); private static final String VELOCITY_ROOT = PackageUtil.getPackageVelocityRoot(PACKAGE); // helpers private final Translator trans; // GUI components private final TabbedPane tabbedPane; private VelocityContainer editVC, detailsTabVC, groupsTabVC; private BGAreaFormController areaController; private GroupsToAreaDataModel groupsDataModel; private Choice groupsChoice; // area, context and group references private BGArea area; private final BGContext bgContext; private List allGroups, inAreaGroups; // managers private final BGAreaDao areaManager; private final BGContextDao contextManager; /** * Constructor for the business group area edit controller * * @param ureq * The user request * @param wControl * The window control * @param area * The business group area */ public BGAreaEditController(final UserRequest ureq, final WindowControl wControl, final BGArea area) { super(ureq, wControl); this.trans = new PackageTranslator(PACKAGE, ureq.getLocale()); this.area = area; this.areaManager = BGAreaDaoImpl.getInstance(); this.bgContext = area.getGroupContext(); this.contextManager = BGContextDaoImpl.getInstance(); // tabbed pane tabbedPane = new TabbedPane("tabbedPane", ureq.getLocale()); tabbedPane.addListener(this); // details tab initAndAddDetailsTab(ureq, wControl); // groups tab initAndAddGroupsTab(); // initialize main view initEditVC(); putInitialPanel(this.editVC); } /** * initialize the main velocity wrapper container */ private void initEditVC() { editVC = new VelocityContainer("edit", VELOCITY_ROOT + "/edit.html", trans, this); editVC.put("tabbedpane", tabbedPane); editVC.contextPut("title", trans.translate("area.edit.title", new String[] { StringEscapeUtils.escapeHtml(this.area.getName()).toString() })); } /** * initialize the area details tab */ private void initAndAddDetailsTab(final UserRequest ureq, final WindowControl wControl) { this.detailsTabVC = new VelocityContainer("detailstab", VELOCITY_ROOT + "/detailstab.html", this.trans, this); // TODO:pb: refactor BGControllerFactory.create..AreaController to be // usefull here if (this.areaController != null) { removeAsListenerAndDispose(this.areaController); } this.areaController = new BGAreaFormController(ureq, wControl, this.area, false); listenTo(this.areaController); this.detailsTabVC.put("areaForm", this.areaController.getInitialComponent()); this.tabbedPane.addTab(this.trans.translate("tab.details"), this.detailsTabVC); } /** * initalize the group to area association tab */ private void initAndAddGroupsTab() { groupsTabVC = new VelocityContainer("groupstab", VELOCITY_ROOT + "/groupstab.html", trans, this); tabbedPane.addTab(trans.translate("tab.groups"), groupsTabVC); this.allGroups = contextManager.getGroupsOfBGContext(this.bgContext); this.inAreaGroups = areaManager.findBusinessGroupsOfArea(this.area); this.groupsDataModel = new GroupsToAreaDataModel(this.allGroups, this.inAreaGroups); groupsChoice = new Choice("groupsChoice", trans); groupsChoice.setSubmitKey("submit"); groupsChoice.setCancelKey("cancel"); groupsChoice.setTableDataModel(groupsDataModel); groupsChoice.addListener(this); groupsTabVC.put(groupsChoice); groupsTabVC.contextPut("noGroupsFound", (allGroups.size() > 0 ? Boolean.FALSE : Boolean.TRUE)); } /** */ @Override protected void event(final UserRequest ureq, final Component source, final Event event) { if (source == this.groupsChoice) { if (event == Choice.EVNT_VALIDATION_OK) { doUpdateGroupAreaRelations(); // do logging if (this.inAreaGroups.size() == 0) { ThreadLocalUserActivityLogger.log(GroupLoggingAction.BGAREA_UPDATED_NOW_EMPTY, getClass()); } else { for (final Iterator it = inAreaGroups.iterator(); it.hasNext();) { final BusinessGroup aGroup = (BusinessGroup) it.next(); ThreadLocalUserActivityLogger.log(GroupLoggingAction.BGAREA_UPDATED_MEMBER_GROUP, getClass(), LoggingResourceable.wrap(aGroup)); } } } } } @Override protected void event(final UserRequest ureq, final Controller source, final Event event) { if (source == this.areaController) { if (event == Event.DONE_EVENT) { final BGArea updatedArea = doAreaUpdate(); if (updatedArea == null) { this.areaController.resetAreaName(); getWindowControl().setWarning(this.trans.translate("error.area.name.exists")); } else { this.area = updatedArea; this.editVC.contextPut("title", this.trans.translate("area.edit.title", new String[] { StringEscapeUtils.escapeHtml(this.area.getName()).toString() })); } } else if (event == Event.CANCELLED_EVENT) { // area might have been changed, reload from db this.area = this.areaManager.reloadArea(this.area); // TODO:pb: refactor BGControllerFactory.create..AreaController to be // usefull here if (this.areaController != null) { removeAsListenerAndDispose(this.areaController); } this.areaController = new BGAreaFormController(ureq, getWindowControl(), this.area, false); listenTo(this.areaController); this.detailsTabVC.put("areaForm", this.areaController.getInitialComponent()); } } } /** * Update a group area * * @return the updated area */ public BGArea doAreaUpdate() { this.area.setName(this.areaController.getAreaName()); this.area.setDescription(this.areaController.getAreaDescription()); return this.areaManager.updateBGArea(this.area); } /** * Update the groups associated to this area: remove and add groups */ private void doUpdateGroupAreaRelations() { BusinessGroupService businessGroupService = (BusinessGroupService) CoreSpringFactory.getBean(BusinessGroupService.class); // 1) add groups to area final List addedGroups = groupsChoice.getAddedRows(); Iterator iterator = addedGroups.iterator(); while (iterator.hasNext()) { final Integer position = (Integer) iterator.next(); BusinessGroup group = groupsDataModel.getGroup(position.intValue()); // refresh group to prevent stale object exception and context proxy // issues group = businessGroupService.loadBusinessGroup(group); // refresh group also in table model this.allGroups.set(position.intValue(), group); // add group now to area and update in area group list areaManager.addBGToBGArea(group, area); this.inAreaGroups.add(group); } // 2) remove groups from area final List removedGroups = groupsChoice.getRemovedRows(); iterator = removedGroups.iterator(); while (iterator.hasNext()) { final Integer position = (Integer) iterator.next(); final BusinessGroup group = groupsDataModel.getGroup(position.intValue()); areaManager.removeBGFromArea(group, area); this.inAreaGroups.remove(group); } } /** */ @Override protected void doDispose() { // don't dispose anything } }
apache-2.0
pfirmstone/JGDMS
qa/src/org/apache/river/test/spec/javaspace/conformance/TransactionWriteTakeIfExistsTest.java
3650
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.river.test.spec.javaspace.conformance; import java.util.logging.Level; // net.jini import net.jini.core.transaction.Transaction; // org.apache.river import org.apache.river.qa.harness.TestException; import org.apache.river.qa.harness.QAConfig; /** * TransactionWriteTakeIfExistsTest asserts that if the entry is written and * after that is taken by takeIfExists method within the non null * transaction, the entry will never be visible outside the transaction and * will not be added to the space when the transaction commits. * * @author Mikhail A. Markov */ public class TransactionWriteTakeIfExistsTest extends TransactionTest { /** * This method asserts that if the entry is written and * after that is taken by takeIfExists method within the non null * transaction, the entry will never be visible outside the transaction and * will not be added to the space when the transaction commits. * * <P>Notes:<BR>For more information see the JavaSpaces specification * section 3.1</P> */ public void run() throws Exception { SimpleEntry sampleEntry1 = new SimpleEntry("TestEntry #1", 1); SimpleEntry sampleEntry2 = new SimpleEntry("TestEntry #2", 2); SimpleEntry result; Transaction txn; // first check that space is empty if (!checkSpace(space)) { throw new TestException( "Space is not empty in the beginning."); } // create the non null transaction txn = getTransaction(); /* * write 1-st sample and 2-nd sample entries twice * to the space within the transaction */ space.write(sampleEntry1, txn, leaseForeverTime); space.write(sampleEntry1, txn, leaseForeverTime); space.write(sampleEntry2, txn, leaseForeverTime); space.write(sampleEntry2, txn, leaseForeverTime); /* * takeIfExists all written entries from the space * within the transaction */ space.takeIfExists(sampleEntry1, txn, checkTime); space.takeIfExists(sampleEntry1, txn, checkTime); space.takeIfExists(sampleEntry2, txn, checkTime); space.takeIfExists(sampleEntry2, txn, checkTime); // commit the transaction txnCommit(txn); // check that there are no entries in the space result = (SimpleEntry) space.read(null, null, checkTime); if (result != null) { throw new TestException( "there is " + result + " still available in the" + " space after transaction's committing" + " but null is expected."); } logDebugText("There are no entries in the space after" + " transaction's committing, as expected."); } }
apache-2.0
bitgilde/HyperImage3
hi3-editor/src/org/hyperimage/connector/fedora3/ws/ObjectFactory.java
21077
package org.hyperimage.connector.fedora3.ws; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the org.hyperimage.connector.fedora3.ws package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _AssetURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "assetURN"); private final static QName _Token_QNAME = new QName("http://connector.ws.hyperimage.org/", "token"); private final static QName _GetAssetPreviewDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewDataResponse"); private final static QName _ParentURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "parentURN"); private final static QName _Username_QNAME = new QName("http://connector.ws.hyperimage.org/", "username"); private final static QName _GetAssetData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetData"); private final static QName _GetAssetPreviewData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewData"); private final static QName _GetHierarchyLevelResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevelResponse"); private final static QName _Authenticate_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticate"); private final static QName _HIWSLoggedException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSLoggedException"); private final static QName _GetMetadataRecord_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecord"); private final static QName _HIWSNotBinaryException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSNotBinaryException"); private final static QName _Session_QNAME = new QName("http://connector.ws.hyperimage.org/", "session"); private final static QName _HIWSDCMetadataException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSDCMetadataException"); private final static QName _HIWSAuthException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAuthException"); private final static QName _HIWSAssetNotFoundException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAssetNotFoundException"); private final static QName _GetWSVersion_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersion"); private final static QName _GetMetadataRecordResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecordResponse"); private final static QName _HIWSUTF8EncodingException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSUTF8EncodingException"); private final static QName _GetWSVersionResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersionResponse"); private final static QName _GetReposInfo_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfo"); private final static QName _HIWSXMLParserException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSXMLParserException"); private final static QName _AuthenticateResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticateResponse"); private final static QName _GetAssetDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetDataResponse"); private final static QName _GetHierarchyLevel_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevel"); private final static QName _GetReposInfoResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfoResponse"); private final static QName _GetAssetPreviewDataResponseReturn_QNAME = new QName("", "return"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.hyperimage.connector.fedora3.ws * */ public ObjectFactory() { } /** * Create an instance of {@link HIWSDCMetadataException } * */ public HIWSDCMetadataException createHIWSDCMetadataException() { return new HIWSDCMetadataException(); } /** * Create an instance of {@link GetAssetDataResponse } * */ public GetAssetDataResponse createGetAssetDataResponse() { return new GetAssetDataResponse(); } /** * Create an instance of {@link HIWSAuthException } * */ public HIWSAuthException createHIWSAuthException() { return new HIWSAuthException(); } /** * Create an instance of {@link HIWSAssetNotFoundException } * */ public HIWSAssetNotFoundException createHIWSAssetNotFoundException() { return new HIWSAssetNotFoundException(); } /** * Create an instance of {@link HIWSNotBinaryException } * */ public HIWSNotBinaryException createHIWSNotBinaryException() { return new HIWSNotBinaryException(); } /** * Create an instance of {@link GetHierarchyLevelResponse } * */ public GetHierarchyLevelResponse createGetHierarchyLevelResponse() { return new GetHierarchyLevelResponse(); } /** * Create an instance of {@link Authenticate } * */ public Authenticate createAuthenticate() { return new Authenticate(); } /** * Create an instance of {@link HiHierarchyLevel } * */ public HiHierarchyLevel createHiHierarchyLevel() { return new HiHierarchyLevel(); } /** * Create an instance of {@link HIWSLoggedException } * */ public HIWSLoggedException createHIWSLoggedException() { return new HIWSLoggedException(); } /** * Create an instance of {@link GetHierarchyLevel } * */ public GetHierarchyLevel createGetHierarchyLevel() { return new GetHierarchyLevel(); } /** * Create an instance of {@link AuthenticateResponse } * */ public AuthenticateResponse createAuthenticateResponse() { return new AuthenticateResponse(); } /** * Create an instance of {@link GetReposInfoResponse } * */ public GetReposInfoResponse createGetReposInfoResponse() { return new GetReposInfoResponse(); } /** * Create an instance of {@link GetAssetPreviewDataResponse } * */ public GetAssetPreviewDataResponse createGetAssetPreviewDataResponse() { return new GetAssetPreviewDataResponse(); } /** * Create an instance of {@link GetWSVersion } * */ public GetWSVersion createGetWSVersion() { return new GetWSVersion(); } /** * Create an instance of {@link GetMetadataRecordResponse } * */ public GetMetadataRecordResponse createGetMetadataRecordResponse() { return new GetMetadataRecordResponse(); } /** * Create an instance of {@link HiMetadataRecord } * */ public HiMetadataRecord createHiMetadataRecord() { return new HiMetadataRecord(); } /** * Create an instance of {@link HiTypedDatastream } * */ public HiTypedDatastream createHiTypedDatastream() { return new HiTypedDatastream(); } /** * Create an instance of {@link HIWSXMLParserException } * */ public HIWSXMLParserException createHIWSXMLParserException() { return new HIWSXMLParserException(); } /** * Create an instance of {@link GetMetadataRecord } * */ public GetMetadataRecord createGetMetadataRecord() { return new GetMetadataRecord(); } /** * Create an instance of {@link GetAssetPreviewData } * */ public GetAssetPreviewData createGetAssetPreviewData() { return new GetAssetPreviewData(); } /** * Create an instance of {@link HIWSUTF8EncodingException } * */ public HIWSUTF8EncodingException createHIWSUTF8EncodingException() { return new HIWSUTF8EncodingException(); } /** * Create an instance of {@link GetReposInfo } * */ public GetReposInfo createGetReposInfo() { return new GetReposInfo(); } /** * Create an instance of {@link GetWSVersionResponse } * */ public GetWSVersionResponse createGetWSVersionResponse() { return new GetWSVersionResponse(); } /** * Create an instance of {@link GetAssetData } * */ public GetAssetData createGetAssetData() { return new GetAssetData(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "assetURN") public JAXBElement<String> createAssetURN(String value) { return new JAXBElement<String>(_AssetURN_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "token") public JAXBElement<String> createToken(String value) { return new JAXBElement<String>(_Token_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewDataResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewDataResponse") public JAXBElement<GetAssetPreviewDataResponse> createGetAssetPreviewDataResponse(GetAssetPreviewDataResponse value) { return new JAXBElement<GetAssetPreviewDataResponse>(_GetAssetPreviewDataResponse_QNAME, GetAssetPreviewDataResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "parentURN") public JAXBElement<String> createParentURN(String value) { return new JAXBElement<String>(_ParentURN_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "username") public JAXBElement<String> createUsername(String value) { return new JAXBElement<String>(_Username_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetData }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetData") public JAXBElement<GetAssetData> createGetAssetData(GetAssetData value) { return new JAXBElement<GetAssetData>(_GetAssetData_QNAME, GetAssetData.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewData }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewData") public JAXBElement<GetAssetPreviewData> createGetAssetPreviewData(GetAssetPreviewData value) { return new JAXBElement<GetAssetPreviewData>(_GetAssetPreviewData_QNAME, GetAssetPreviewData.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevelResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevelResponse") public JAXBElement<GetHierarchyLevelResponse> createGetHierarchyLevelResponse(GetHierarchyLevelResponse value) { return new JAXBElement<GetHierarchyLevelResponse>(_GetHierarchyLevelResponse_QNAME, GetHierarchyLevelResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link Authenticate }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticate") public JAXBElement<Authenticate> createAuthenticate(Authenticate value) { return new JAXBElement<Authenticate>(_Authenticate_QNAME, Authenticate.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSLoggedException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSLoggedException") public JAXBElement<HIWSLoggedException> createHIWSLoggedException(HIWSLoggedException value) { return new JAXBElement<HIWSLoggedException>(_HIWSLoggedException_QNAME, HIWSLoggedException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecord }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecord") public JAXBElement<GetMetadataRecord> createGetMetadataRecord(GetMetadataRecord value) { return new JAXBElement<GetMetadataRecord>(_GetMetadataRecord_QNAME, GetMetadataRecord.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSNotBinaryException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSNotBinaryException") public JAXBElement<HIWSNotBinaryException> createHIWSNotBinaryException(HIWSNotBinaryException value) { return new JAXBElement<HIWSNotBinaryException>(_HIWSNotBinaryException_QNAME, HIWSNotBinaryException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}} * */ @XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "session") public JAXBElement<String> createSession(String value) { return new JAXBElement<String>(_Session_QNAME, String.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSDCMetadataException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSDCMetadataException") public JAXBElement<HIWSDCMetadataException> createHIWSDCMetadataException(HIWSDCMetadataException value) { return new JAXBElement<HIWSDCMetadataException>(_HIWSDCMetadataException_QNAME, HIWSDCMetadataException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSAuthException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAuthException") public JAXBElement<HIWSAuthException> createHIWSAuthException(HIWSAuthException value) { return new JAXBElement<HIWSAuthException>(_HIWSAuthException_QNAME, HIWSAuthException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSAssetNotFoundException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAssetNotFoundException") public JAXBElement<HIWSAssetNotFoundException> createHIWSAssetNotFoundException(HIWSAssetNotFoundException value) { return new JAXBElement<HIWSAssetNotFoundException>(_HIWSAssetNotFoundException_QNAME, HIWSAssetNotFoundException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersion }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersion") public JAXBElement<GetWSVersion> createGetWSVersion(GetWSVersion value) { return new JAXBElement<GetWSVersion>(_GetWSVersion_QNAME, GetWSVersion.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecordResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecordResponse") public JAXBElement<GetMetadataRecordResponse> createGetMetadataRecordResponse(GetMetadataRecordResponse value) { return new JAXBElement<GetMetadataRecordResponse>(_GetMetadataRecordResponse_QNAME, GetMetadataRecordResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSUTF8EncodingException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSUTF8EncodingException") public JAXBElement<HIWSUTF8EncodingException> createHIWSUTF8EncodingException(HIWSUTF8EncodingException value) { return new JAXBElement<HIWSUTF8EncodingException>(_HIWSUTF8EncodingException_QNAME, HIWSUTF8EncodingException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersionResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersionResponse") public JAXBElement<GetWSVersionResponse> createGetWSVersionResponse(GetWSVersionResponse value) { return new JAXBElement<GetWSVersionResponse>(_GetWSVersionResponse_QNAME, GetWSVersionResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfo }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfo") public JAXBElement<GetReposInfo> createGetReposInfo(GetReposInfo value) { return new JAXBElement<GetReposInfo>(_GetReposInfo_QNAME, GetReposInfo.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link HIWSXMLParserException }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSXMLParserException") public JAXBElement<HIWSXMLParserException> createHIWSXMLParserException(HIWSXMLParserException value) { return new JAXBElement<HIWSXMLParserException>(_HIWSXMLParserException_QNAME, HIWSXMLParserException.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link AuthenticateResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticateResponse") public JAXBElement<AuthenticateResponse> createAuthenticateResponse(AuthenticateResponse value) { return new JAXBElement<AuthenticateResponse>(_AuthenticateResponse_QNAME, AuthenticateResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetAssetDataResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetDataResponse") public JAXBElement<GetAssetDataResponse> createGetAssetDataResponse(GetAssetDataResponse value) { return new JAXBElement<GetAssetDataResponse>(_GetAssetDataResponse_QNAME, GetAssetDataResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevel }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevel") public JAXBElement<GetHierarchyLevel> createGetHierarchyLevel(GetHierarchyLevel value) { return new JAXBElement<GetHierarchyLevel>(_GetHierarchyLevel_QNAME, GetHierarchyLevel.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfoResponse }{@code >}} * */ @XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfoResponse") public JAXBElement<GetReposInfoResponse> createGetReposInfoResponse(GetReposInfoResponse value) { return new JAXBElement<GetReposInfoResponse>(_GetReposInfoResponse_QNAME, GetReposInfoResponse.class, null, value); } /** * Create an instance of {@link JAXBElement }{@code <}{@link byte[]}{@code >}} * */ @XmlElementDecl(namespace = "", name = "return", scope = GetAssetPreviewDataResponse.class) public JAXBElement<byte[]> createGetAssetPreviewDataResponseReturn(byte[] value) { return new JAXBElement<byte[]>(_GetAssetPreviewDataResponseReturn_QNAME, byte[].class, GetAssetPreviewDataResponse.class, ((byte[]) value)); } }
apache-2.0
apache/pig
src/org/apache/pig/backend/hadoop/hbase/HBaseTableInputFormat.java
7754
/* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.hbase; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.ListIterator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableRecordReader; import org.apache.hadoop.hbase.mapreduce.TableSplit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.InputSplit; public class HBaseTableInputFormat extends TableInputFormat { private static final Log LOG = LogFactory.getLog(HBaseTableInputFormat.class); protected final byte[] gt_; protected final byte[] gte_; protected final byte[] lt_; protected final byte[] lte_; public HBaseTableInputFormat() { this(-1, null, null, null, null); } protected HBaseTableInputFormat(long limit, byte[] gt, byte[] gte, byte[] lt, byte[] lte) { super(); setTableRecordReader(new HBaseTableRecordReader(limit)); gt_ = gt; gte_ = gte; lt_ = lt; lte_ = lte; } public static class HBaseTableIFBuilder { protected byte[] gt_; protected byte[] gte_; protected byte[] lt_; protected byte[] lte_; protected long limit_; protected Configuration conf_; public HBaseTableIFBuilder withGt(byte[] gt) { gt_ = gt; return this; } public HBaseTableIFBuilder withGte(byte[] gte) { gte_ = gte; return this; } public HBaseTableIFBuilder withLt(byte[] lt) { lt_ = lt; return this; } public HBaseTableIFBuilder withLte(byte[] lte) { lte_ = lte; return this; } public HBaseTableIFBuilder withLimit(long limit) { limit_ = limit; return this; } public HBaseTableIFBuilder withConf(Configuration conf) { conf_ = conf; return this; } public HBaseTableInputFormat build() { HBaseTableInputFormat inputFormat = new HBaseTableInputFormat(limit_, gt_, gte_, lt_, lte_); if (conf_ != null) inputFormat.setConf(conf_); return inputFormat; } } @Override public List<InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context) throws IOException { List<InputSplit> splits = super.getSplits(context); ListIterator<InputSplit> splitIter = splits.listIterator(); while (splitIter.hasNext()) { TableSplit split = (TableSplit) splitIter.next(); byte[] startKey = split.getStartRow(); byte[] endKey = split.getEndRow(); // Skip if the region doesn't satisfy configured options. if ((skipRegion(CompareOp.LESS, startKey, lt_)) || (skipRegion(CompareOp.GREATER, endKey, gt_)) || (skipRegion(CompareOp.GREATER, endKey, gte_)) || (skipRegion(CompareOp.LESS_OR_EQUAL, startKey, lte_)) ) { splitIter.remove(); } } return splits; } private boolean skipRegion(CompareOp op, byte[] key, byte[] option ) throws IOException { if (key.length == 0 || option == null) return false; BinaryComparator comp = new BinaryComparator(option); RowFilter rowFilter = new RowFilter(op, comp); return rowFilter.filterRowKey(key, 0, key.length); } protected class HBaseTableRecordReader extends TableRecordReader { private long recordsSeen = 0; private final long limit_; private byte[] startRow_; private byte[] endRow_; private transient byte[] currRow_; private int maxRowLength; private BigInteger bigStart_; private BigInteger bigEnd_; private BigDecimal bigRange_; private transient float progressSoFar_ = 0; public HBaseTableRecordReader(long limit) { limit_ = limit; } @Override public void setScan(Scan scan) { super.setScan(scan); startRow_ = scan.getStartRow(); endRow_ = scan.getStopRow(); byte[] startPadded; byte[] endPadded; if (startRow_.length < endRow_.length) { startPadded = Bytes.padTail(startRow_, endRow_.length - startRow_.length); endPadded = endRow_; } else if (endRow_.length < startRow_.length) { startPadded = startRow_; endPadded = Bytes.padTail(endRow_, startRow_.length - endRow_.length); } else { startPadded = startRow_; endPadded = endRow_; } currRow_ = startRow_; byte [] prependHeader = {1, 0}; bigStart_ = new BigInteger(Bytes.add(prependHeader, startPadded)); bigEnd_ = new BigInteger(Bytes.add(prependHeader, endPadded)); bigRange_ = new BigDecimal(bigEnd_.subtract(bigStart_)); maxRowLength = endRow_.length > startRow_.length ? endRow_.length : startRow_.length; LOG.info("setScan with ranges: " + bigStart_ + " - " + bigEnd_ + " ( " + bigRange_ + ")"); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (limit_ > 0 && ++recordsSeen > limit_) { return false; } boolean hasMore = super.nextKeyValue(); if (hasMore) { currRow_ = getCurrentKey().get(); } return hasMore; } @Override public float getProgress() { if (currRow_ == null || currRow_.length == 0 || endRow_.length == 0 || endRow_ == HConstants.LAST_ROW) { return 0; } byte[] lastPadded = currRow_; if(maxRowLength > currRow_.length) { lastPadded = Bytes.padTail(currRow_, maxRowLength - currRow_.length); } byte [] prependHeader = {1, 0}; BigInteger bigLastRow = new BigInteger(Bytes.add(prependHeader, lastPadded)); if (bigLastRow.compareTo(bigEnd_) > 0) { return progressSoFar_; } BigDecimal processed = new BigDecimal(bigLastRow.subtract(bigStart_)); try { BigDecimal progress = processed.setScale(3).divide(bigRange_, BigDecimal.ROUND_HALF_DOWN); progressSoFar_ = progress.floatValue(); return progressSoFar_; } catch (java.lang.ArithmeticException e) { return 0; } } } }
apache-2.0
NiteshKant/RxJava
src/main/java/io/reactivex/internal/operators/maybe/MaybeSwitchIfEmptySingle.java
3840
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.maybe; import io.reactivex.*; import io.reactivex.disposables.Disposable; import io.reactivex.internal.disposables.DisposableHelper; import io.reactivex.internal.fuseable.HasUpstreamMaybeSource; import java.util.concurrent.atomic.AtomicReference; /** * Subscribes to the other source if the main source is empty. * * @param <T> the value type */ public final class MaybeSwitchIfEmptySingle<T> extends Single<T> implements HasUpstreamMaybeSource<T> { final MaybeSource<T> source; final SingleSource<? extends T> other; public MaybeSwitchIfEmptySingle(MaybeSource<T> source, SingleSource<? extends T> other) { this.source = source; this.other = other; } @Override public MaybeSource<T> source() { return source; } @Override protected void subscribeActual(SingleObserver<? super T> observer) { source.subscribe(new SwitchIfEmptyMaybeObserver<T>(observer, other)); } static final class SwitchIfEmptyMaybeObserver<T> extends AtomicReference<Disposable> implements MaybeObserver<T>, Disposable { private static final long serialVersionUID = 4603919676453758899L; final SingleObserver<? super T> downstream; final SingleSource<? extends T> other; SwitchIfEmptyMaybeObserver(SingleObserver<? super T> actual, SingleSource<? extends T> other) { this.downstream = actual; this.other = other; } @Override public void dispose() { DisposableHelper.dispose(this); } @Override public boolean isDisposed() { return DisposableHelper.isDisposed(get()); } @Override public void onSubscribe(Disposable d) { if (DisposableHelper.setOnce(this, d)) { downstream.onSubscribe(this); } } @Override public void onSuccess(T value) { downstream.onSuccess(value); } @Override public void onError(Throwable e) { downstream.onError(e); } @Override public void onComplete() { Disposable d = get(); if (d != DisposableHelper.DISPOSED) { if (compareAndSet(d, null)) { other.subscribe(new OtherSingleObserver<T>(downstream, this)); } } } static final class OtherSingleObserver<T> implements SingleObserver<T> { final SingleObserver<? super T> downstream; final AtomicReference<Disposable> parent; OtherSingleObserver(SingleObserver<? super T> actual, AtomicReference<Disposable> parent) { this.downstream = actual; this.parent = parent; } @Override public void onSubscribe(Disposable d) { DisposableHelper.setOnce(parent, d); } @Override public void onSuccess(T value) { downstream.onSuccess(value); } @Override public void onError(Throwable e) { downstream.onError(e); } } } }
apache-2.0
gocd/gocd
plugin-infra/go-plugin-config-repo/src/test/java/com/thoughtworks/go/plugin/configrepo/contract/material/CRConfigMaterialTest.java
2880
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.plugin.configrepo.contract.material; import com.google.gson.JsonObject; import com.thoughtworks.go.plugin.configrepo.contract.AbstractCRTest; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; public class CRConfigMaterialTest extends AbstractCRTest<CRConfigMaterial> { private final CRConfigMaterial named; private final CRConfigMaterial namedDest; private final CRConfigMaterial materialWithIgnores; private final CRConfigMaterial invalidList; public CRConfigMaterialTest() { named = new CRConfigMaterial("primary", null,null); namedDest = new CRConfigMaterial("primary", "folder",null); List<String> patterns = new ArrayList<>(); patterns.add("externals"); patterns.add("tools"); materialWithIgnores = new CRConfigMaterial("primary", "folder",new CRFilter(patterns,false)); CRFilter badFilter = new CRFilter(patterns,false); badFilter.setIncludesNoCheck(patterns); invalidList = new CRConfigMaterial("primary", "folder",badFilter); } @Override public void addGoodExamples(Map<String, CRConfigMaterial> examples) { examples.put("namedExample", named); examples.put("namedDest", namedDest); examples.put("ignoreFilter", materialWithIgnores); } @Override public void addBadExamples(Map<String, CRConfigMaterial> examples) { examples.put("invalidList",invalidList); } @Test public void shouldAppendTypeFieldWhenSerializingMaterials() { CRMaterial value = named; JsonObject jsonObject = (JsonObject)gson.toJsonTree(value); assertThat(jsonObject.get("type").getAsString(), is(CRConfigMaterial.TYPE_NAME)); } @Test public void shouldHandlePolymorphismWhenDeserializing() { CRMaterial value = named; String json = gson.toJson(value); CRConfigMaterial deserializedValue = (CRConfigMaterial)gson.fromJson(json,CRMaterial.class); assertThat("Deserialized value should equal to value before serialization", deserializedValue,is(value)); } }
apache-2.0
Gaduo/hapi-fhir
hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/ObservationStatusEnumFactory.java
3374
package org.hl7.fhir.dstu3.model.codesystems; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0 import org.hl7.fhir.dstu3.model.EnumFactory; public class ObservationStatusEnumFactory implements EnumFactory<ObservationStatus> { public ObservationStatus fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) return null; if ("registered".equals(codeString)) return ObservationStatus.REGISTERED; if ("preliminary".equals(codeString)) return ObservationStatus.PRELIMINARY; if ("final".equals(codeString)) return ObservationStatus.FINAL; if ("amended".equals(codeString)) return ObservationStatus.AMENDED; if ("cancelled".equals(codeString)) return ObservationStatus.CANCELLED; if ("entered-in-error".equals(codeString)) return ObservationStatus.ENTEREDINERROR; if ("unknown".equals(codeString)) return ObservationStatus.UNKNOWN; throw new IllegalArgumentException("Unknown ObservationStatus code '"+codeString+"'"); } public String toCode(ObservationStatus code) { if (code == ObservationStatus.REGISTERED) return "registered"; if (code == ObservationStatus.PRELIMINARY) return "preliminary"; if (code == ObservationStatus.FINAL) return "final"; if (code == ObservationStatus.AMENDED) return "amended"; if (code == ObservationStatus.CANCELLED) return "cancelled"; if (code == ObservationStatus.ENTEREDINERROR) return "entered-in-error"; if (code == ObservationStatus.UNKNOWN) return "unknown"; return "?"; } public String toSystem(ObservationStatus code) { return code.getSystem(); } }
apache-2.0
gsheldon/optaplanner
optaplanner-examples/src/test/java/org/optaplanner/examples/nqueens/app/BrokenNQueensBenchmarkTest.java
2156
/* * Copyright 2013 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.examples.nqueens.app; import java.io.File; import org.junit.Test; import org.optaplanner.benchmark.api.PlannerBenchmarkException; import org.optaplanner.benchmark.api.PlannerBenchmarkFactory; import org.optaplanner.benchmark.config.PlannerBenchmarkConfig; import org.optaplanner.examples.common.app.PlannerBenchmarkTest; public class BrokenNQueensBenchmarkTest extends PlannerBenchmarkTest { @Override protected String createBenchmarkConfigResource() { return "org/optaplanner/examples/nqueens/benchmark/nqueensBenchmarkConfig.xml"; } @Override protected PlannerBenchmarkFactory buildPlannerBenchmarkFactory(File unsolvedDataFile) { PlannerBenchmarkFactory benchmarkFactory = super.buildPlannerBenchmarkFactory(unsolvedDataFile); PlannerBenchmarkConfig benchmarkConfig = benchmarkFactory.getPlannerBenchmarkConfig(); benchmarkConfig.setWarmUpSecondsSpentLimit(0L); benchmarkConfig.getInheritedSolverBenchmarkConfig().getSolverConfig().getTerminationConfig() .setStepCountLimit(-100); // Intentionally crash the solver return benchmarkFactory; } // ************************************************************************ // Tests // ************************************************************************ @Test(timeout = 100000, expected = PlannerBenchmarkException.class) public void benchmarkBroken8queens() { runBenchmarkTest(new File("data/nqueens/unsolved/8queens.xml")); } }
apache-2.0
adamjshook/accumulo
server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/CellType.java
1314
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.monitor.util.celltypes; import java.io.Serializable; import java.util.Comparator; public abstract class CellType<T> implements Comparator<T>, Serializable { private static final long serialVersionUID = 1L; private boolean sortable = true; abstract public String alignment(); abstract public String format(Object obj); public final void setSortable(boolean sortable) { this.sortable = sortable; } public final boolean isSortable() { return sortable; } }
apache-2.0
cloudnautique/cloud-cattle
code/iaas/logic/src/main/java/io/cattle/platform/process/dao/impl/AccountDaoImpl.java
551
package io.cattle.platform.process.dao.impl; import static io.cattle.platform.core.model.tables.AccountTable.*; import io.cattle.platform.core.model.Account; import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao; import io.cattle.platform.process.dao.AccountDao; public class AccountDaoImpl extends AbstractJooqDao implements AccountDao { @Override public Account findByUuid(String uuid) { return create() .selectFrom(ACCOUNT) .where(ACCOUNT.UUID.eq(uuid)) .fetchOne(); } }
apache-2.0
mantal/Qbar
content/logistic/src/main/java/net/ros/client/render/ModelPipeInventory.java
4971
package net.ros.client.render; import com.google.common.collect.ImmutableList; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.block.model.BakedQuad; import net.minecraft.client.renderer.block.model.IBakedModel; import net.minecraft.client.renderer.block.model.ItemCameraTransforms; import net.minecraft.client.renderer.block.model.ItemOverrideList; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.entity.EntityLivingBase; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.world.World; import net.ros.client.render.model.ModelCacheManager; import net.ros.client.render.model.obj.PipeOBJStates; import net.ros.client.render.model.obj.ROSOBJState; import net.ros.common.block.BlockPipeBase; import org.apache.commons.lang3.tuple.Pair; import javax.annotation.Nonnull; import javax.vecmath.Matrix4f; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class ModelPipeInventory implements IBakedModel { private final Map<ROSOBJState, CompositeBakedModel> CACHE = new HashMap<>(); private final BlockPipeBase pipeBlock; public ModelPipeInventory(BlockPipeBase pipeBlock) { this.pipeBlock = pipeBlock; } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return Collections.emptyList(); } private CompositeBakedModel getModel(ROSOBJState pipeState) { if (CACHE.containsKey(pipeState)) return CACHE.get(pipeState); else { CompositeBakedModel model = new CompositeBakedModel(ModelCacheManager.getPipeQuads(pipeBlock, pipeState), Minecraft.getMinecraft().getBlockRendererDispatcher() .getModelForState(pipeBlock.getDefaultState())); CACHE.put(pipeState, model); return model; } } @Nonnull @Override public ItemOverrideList getOverrides() { return itemHandler; } @Override public boolean isAmbientOcclusion() { return false; } @Override public boolean isGui3d() { return true; } @Override public boolean isBuiltInRenderer() { return false; } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return Minecraft.getMinecraft().getTextureMapBlocks().getAtlasSprite("minecraft:blocks/dirt"); } @Nonnull @Override public ItemCameraTransforms getItemCameraTransforms() { return ItemCameraTransforms.DEFAULT; } private static class CompositeBakedModel implements IBakedModel { private IBakedModel pipeModel; private final List<BakedQuad> genQuads; CompositeBakedModel(List<BakedQuad> pipeQuads, IBakedModel pipeModel) { this.pipeModel = pipeModel; ImmutableList.Builder<BakedQuad> genBuilder = ImmutableList.builder(); genBuilder.addAll(pipeQuads); genQuads = genBuilder.build(); } @Nonnull @Override public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand) { return face == null ? genQuads : Collections.emptyList(); } @Override public boolean isAmbientOcclusion() { return pipeModel.isAmbientOcclusion(); } @Override public boolean isGui3d() { return pipeModel.isGui3d(); } @Override public boolean isBuiltInRenderer() { return pipeModel.isBuiltInRenderer(); } @Nonnull @Override public TextureAtlasSprite getParticleTexture() { return pipeModel.getParticleTexture(); } @Nonnull @Override public ItemOverrideList getOverrides() { return ItemOverrideList.NONE; } @Override public Pair<? extends IBakedModel, Matrix4f> handlePerspective(ItemCameraTransforms.TransformType cameraTransformType) { return Pair.of(this, pipeModel.handlePerspective(cameraTransformType).getRight()); } } private final ItemOverrideList itemHandler = new ItemOverrideList(ImmutableList.of()) { @Nonnull @Override public IBakedModel handleItemState(@Nonnull IBakedModel model, ItemStack stack, World world, EntityLivingBase entity) { return ModelPipeInventory.this.getModel(PipeOBJStates.getVisibilityState( pipeBlock.getPipeType().getSize(), EnumFacing.WEST, EnumFacing.EAST)); } }; }
apache-2.0
mdaniel/intellij-community
plugins/kotlin/fir-low-level-api/test/org/jetbrains/kotlin/idea/fir/low/level/api/sessions/SessionsInvalidationTestGenerated.java
2951
/* * Copyright 2010-2020 JetBrains s.r.o. and Kotlin Programming Language contributors. * Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file. */ package org.jetbrains.kotlin.idea.fir.low.level.api.sessions; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.junit.runner.RunWith; import java.io.File; import java.util.regex.Pattern; /** This class is generated by {@link org.jetbrains.kotlin.generators.tests.TestsPackage}. DO NOT MODIFY MANUALLY */ @SuppressWarnings("all") @TestMetadata("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation") @TestDataPath("$PROJECT_ROOT") @RunWith(JUnit3RunnerWithInners.class) public class SessionsInvalidationTestGenerated extends AbstractSessionsInvalidationTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } public void testAllFilesPresentInSessionInvalidation() throws Exception { KotlinTestUtils.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation"), Pattern.compile("^([^\\.]+)$"), null, false); } @TestMetadata("binaryTree") public void testBinaryTree() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTree/"); } @TestMetadata("binaryTreeNoInvalidated") public void testBinaryTreeNoInvalidated() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeNoInvalidated/"); } @TestMetadata("binaryTreeWithAdditionalEdge") public void testBinaryTreeWithAdditionalEdge() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithAdditionalEdge/"); } @TestMetadata("binaryTreeWithInvalidInRoot") public void testBinaryTreeWithInvalidInRoot() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithInvalidInRoot/"); } @TestMetadata("linear") public void testLinear() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/linear/"); } @TestMetadata("rhombus") public void testRhombus() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombus/"); } @TestMetadata("rhombusWithTwoInvalid") public void testRhombusWithTwoInvalid() throws Exception { runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombusWithTwoInvalid/"); } }
apache-2.0
ifpb-disciplinas-2015-2/locadora-jpa-web
src/main/java/io/github/jass2125/locadora/jpa/EntityManagerJPA.java
740
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package io.github.jass2125.locadora.jpa; import javax.persistence.EntityManager; import javax.persistence.Persistence; /** * * @author Anderson Souza * @email jair_anderson_bs@hotmail.com * @since 2015, Feb 9, 2016 */ public class EntityManagerJPA { private static EntityManager em; private EntityManagerJPA() { } public static EntityManager getEntityManager(){ if(em == null) { em = Persistence.createEntityManagerFactory("default").createEntityManager(); } return em; } }
apache-2.0
futur/usergrid-stack
core/src/main/java/org/usergrid/persistence/query/tree/ContainsOperand.java
1697
/******************************************************************************* * Copyright 2012 Apigee Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.usergrid.persistence.query.tree; import org.antlr.runtime.Token; import org.usergrid.persistence.exceptions.PersistenceException; /** * @author tnine * */ public class ContainsOperand extends Operand { /** * @param property * @param literal */ public ContainsOperand(Token t) { super(t); } /* (non-Javadoc) * @see org.usergrid.persistence.query.tree.Operand#visit(org.usergrid.persistence.query.tree.QueryVisitor) */ @Override public void visit(QueryVisitor visitor) throws PersistenceException { visitor.visit(this); } public void setProperty(String name){ setChild(0, new Property(name)); } public void setValue(String value){ setChild(1, new StringLiteral(value)); } public Property getProperty(){ return (Property) this.children.get(0); } public StringLiteral getString(){ return (StringLiteral) this.children.get(1); } }
apache-2.0
apache/directory-server
core-api/src/main/java/org/apache/directory/server/core/api/subtree/SubtreeEvaluator.java
6471
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.api.subtree; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.model.subtree.SubtreeSpecification; import org.apache.directory.server.core.api.event.Evaluator; import org.apache.directory.server.core.api.event.ExpressionEvaluator; /** * An evaluator used to determine if an entry is included in the collection * represented by a subtree specification. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class SubtreeEvaluator { /** A refinement filter evaluator */ private final Evaluator evaluator; /** * Creates a subtreeSpecification evaluatior which can be used to determine * if an entry is included within the collection of a subtree. * * @param schemaManager The server schemaManager */ public SubtreeEvaluator( SchemaManager schemaManager ) { evaluator = new ExpressionEvaluator( schemaManager ); } /** * Determines if an entry is selected by a subtree specification. * * @param subtree the subtree specification * @param apDn the distinguished name of the administrative point containing the subentry * @param entryDn the distinguished name of the candidate entry * @param entry The entry to evaluate * @return true if the entry is selected by the specification, false if it is not * @throws LdapException if errors are encountered while evaluating selection */ public boolean evaluate( SubtreeSpecification subtree, Dn apDn, Dn entryDn, Entry entry ) throws LdapException { /* ===================================================================== * NOTE: Regarding the overall approach, we try to narrow down the * possibilities by slowly pruning relative names off of the entryDn. * For example we check first if the entry is a descendant of the AP. * If so we use the relative name thereafter to calculate if it is * a descendant of the base. This means shorter names to compare and * less work to do while we continue to deduce inclusion by the subtree * specification. * ===================================================================== */ // First construct the subtree base, which is the concatenation of the // AP Dn and the subentry base Dn subentryBaseDn = apDn; subentryBaseDn = subentryBaseDn.add( subtree.getBase() ); if ( !entryDn.isDescendantOf( subentryBaseDn ) ) { // The entry Dn is not part of the subtree specification, get out return false; } /* * Evaluate based on minimum and maximum chop values. Here we simply * need to compare the distances respectively with the size of the * baseRelativeRdn. For the max distance entries with a baseRelativeRdn * size greater than the max distance are rejected. For the min distance * entries with a baseRelativeRdn size less than the minimum distance * are rejected. */ int entryRelativeDnSize = entryDn.size() - subentryBaseDn.size(); if ( ( subtree.getMaxBaseDistance() != SubtreeSpecification.UNBOUNDED_MAX ) && ( entryRelativeDnSize > subtree.getMaxBaseDistance() ) ) { return false; } if ( ( subtree.getMinBaseDistance() > 0 ) && ( entryRelativeDnSize < subtree.getMinBaseDistance() ) ) { return false; } /* * For specific exclusions we must iterate through the set and check * if the baseRelativeRdn is a descendant of the exclusion. The * isDescendant() function will return true if the compared names * are equal so for chopAfter exclusions we must check for equality * as well and reject if the relative names are equal. */ // Now, get the entry's relative part if ( !subtree.getChopBeforeExclusions().isEmpty() || !subtree.getChopAfterExclusions().isEmpty() ) { Dn entryRelativeDn = entryDn.getDescendantOf( apDn ).getDescendantOf( subtree.getBase() ); for ( Dn chopBeforeDn : subtree.getChopBeforeExclusions() ) { if ( entryRelativeDn.isDescendantOf( chopBeforeDn ) ) { return false; } } for ( Dn chopAfterDn : subtree.getChopAfterExclusions() ) { if ( entryRelativeDn.isDescendantOf( chopAfterDn ) && !chopAfterDn.equals( entryRelativeDn ) ) { return false; } } } /* * The last remaining step is to check and see if the refinement filter * selects the entry candidate based on objectClass attribute values. * To do this we invoke the refinement evaluator members evaluate() method. */ if ( subtree.getRefinement() != null ) { return evaluator.evaluate( subtree.getRefinement(), entryDn, entry ); } /* * If nothing has rejected the candidate entry and there is no refinement * filter then the entry is included in the collection represented by the * subtree specification so we return true. */ return true; } }
apache-2.0
apache/logging-log4j2
log4j-core/src/test/java/org/apache/logging/log4j/core/appender/SocketAppenderBuilderTest.java
1293
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.appender; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; public class SocketAppenderBuilderTest { /** * Tests https://issues.apache.org/jira/browse/LOG4J2-1620 */ @Test public void testDefaultImmediateFlush() { assertTrue(SocketAppender.newBuilder().isImmediateFlush(), "Regression of LOG4J2-1620: default value for immediateFlush should be true"); } }
apache-2.0
leveyj/ignite
modules/core/src/main/java/org/apache/ignite/marshaller/jdk/JdkMarshaller.java
5576
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.marshaller.jdk; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.util.io.GridByteArrayInputStream; import org.apache.ignite.internal.util.io.GridByteArrayOutputStream; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.marshaller.AbstractNodeNameAwareMarshaller; import org.jetbrains.annotations.Nullable; /** * Implementation of {@link org.apache.ignite.marshaller.Marshaller} based on JDK serialization mechanism. * <p> * <h1 class="header">Configuration</h1> * <h2 class="header">Mandatory</h2> * This marshaller has no mandatory configuration parameters. * <h2 class="header">Java Example</h2> * {@code JdkMarshaller} needs to be explicitly configured to override default {@link org.apache.ignite.marshaller.optimized.OptimizedMarshaller}. * <pre name="code" class="java"> * JdkMarshaller marshaller = new JdkMarshaller(); * * IgniteConfiguration cfg = new IgniteConfiguration(); * * // Override default marshaller. * cfg.setMarshaller(marshaller); * * // Starts grid. * G.start(cfg); * </pre> * <h2 class="header">Spring Example</h2> * JdkMarshaller can be configured from Spring XML configuration file: * <pre name="code" class="xml"> * &lt;bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration" singleton="true"&gt; * ... * &lt;property name="marshaller"&gt; * &lt;bean class="org.apache.ignite.marshaller.jdk.JdkMarshaller"/&gt; * &lt;/property&gt; * ... * &lt;/bean&gt; * </pre> * <p> * <img src="http://ignite.apache.org/images/spring-small.png"> * <br> * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a> */ public class JdkMarshaller extends AbstractNodeNameAwareMarshaller { /** {@inheritDoc} */ @Override protected void marshal0(@Nullable Object obj, OutputStream out) throws IgniteCheckedException { assert out != null; ObjectOutputStream objOut = null; try { objOut = new JdkMarshallerObjectOutputStream(new JdkMarshallerOutputStreamWrapper(out)); // Make sure that we serialize only task, without class loader. objOut.writeObject(obj); objOut.flush(); } catch (Exception e) { throw new IgniteCheckedException("Failed to serialize object: " + obj, e); } finally{ U.closeQuiet(objOut); } } /** {@inheritDoc} */ @Override protected byte[] marshal0(@Nullable Object obj) throws IgniteCheckedException { GridByteArrayOutputStream out = null; try { out = new GridByteArrayOutputStream(DFLT_BUFFER_SIZE); marshal(obj, out); return out.toByteArray(); } finally { U.close(out, null); } } /** {@inheritDoc} */ @SuppressWarnings({"unchecked"}) @Override protected <T> T unmarshal0(InputStream in, @Nullable ClassLoader clsLdr) throws IgniteCheckedException { assert in != null; if (clsLdr == null) clsLdr = getClass().getClassLoader(); ObjectInputStream objIn = null; try { objIn = new JdkMarshallerObjectInputStream(new JdkMarshallerInputStreamWrapper(in), clsLdr); return (T)objIn.readObject(); } catch (ClassNotFoundException e) { throw new IgniteCheckedException("Failed to find class with given class loader for unmarshalling " + "(make sure same versions of all classes are available on all nodes or enable peer-class-loading): " + clsLdr, e); } catch (Exception e) { throw new IgniteCheckedException("Failed to deserialize object with given class loader: " + clsLdr, e); } finally{ U.closeQuiet(objIn); } } /** {@inheritDoc} */ @Override protected <T> T unmarshal0(byte[] arr, @Nullable ClassLoader clsLdr) throws IgniteCheckedException { GridByteArrayInputStream in = null; try { in = new GridByteArrayInputStream(arr, 0, arr.length); return unmarshal(in, clsLdr); } finally { U.close(in, null); } } /** {@inheritDoc} */ @Override public void onUndeploy(ClassLoader ldr) { // No-op. } /** {@inheritDoc} */ @Override public String toString() { return S.toString(JdkMarshaller.class, this); } }
apache-2.0
alena1108/cattle
code/iaas/config-item/server/src/main/java/io/cattle/platform/configitem/server/model/impl/AbstractResourceRootConfigItem.java
945
package io.cattle.platform.configitem.server.model.impl; import java.io.IOException; import io.cattle.platform.configitem.server.model.RefreshableConfigItem; import io.cattle.platform.configitem.server.resource.ResourceRoot; import io.cattle.platform.configitem.version.ConfigItemStatusManager; public abstract class AbstractResourceRootConfigItem extends AbstractConfigItem implements RefreshableConfigItem { ResourceRoot resourceRoot; public AbstractResourceRootConfigItem(String name, ConfigItemStatusManager versionManager, ResourceRoot resourceRoot) { super(name, versionManager); this.resourceRoot = resourceRoot; } @Override public String getSourceRevision() { return resourceRoot.getSourceRevision(); } @Override public void refresh() throws IOException { resourceRoot.scan(); } public ResourceRoot getResourceRoot() { return resourceRoot; } }
apache-2.0
objectiser/camel
tooling/camel-util-json/src/main/java/org/apache/camel/util/json/JsonArray.java
19042
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util.json; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; /** * JsonArray is a common non-thread safe data format for a collection of data. * The contents of a JsonArray are only validated as JSON values on * serialization. * * @see Jsoner * @since 2.0.0 */ public class JsonArray extends ArrayList<Object> implements Jsonable { /** * The serialization version this class is compatible with. This value * doesn't need to be incremented if and only if the only changes to occur * were updating comments, updating javadocs, adding new fields to the * class, changing the fields from static to non-static, or changing the * fields from transient to non transient. All other changes require this * number be incremented. */ private static final long serialVersionUID = 1L; /** Instantiates an empty JsonArray. */ public JsonArray() { } /** * Instantiate a new JsonArray using ArrayList's constructor of the same * type. * * @param collection represents the elements to produce the JsonArray with. */ public JsonArray(final Collection<?> collection) { super(collection); } /** * A convenience method that assumes every element of the JsonArray is * castable to T before adding it to a collection of Ts. * * @param <T> represents the type that all of the elements of the JsonArray * should be cast to and the type the collection will contain. * @param destination represents where all of the elements of the JsonArray * are added to after being cast to the generic type provided. * @throws ClassCastException if the unchecked cast of an element to T * fails. */ @SuppressWarnings("unchecked") public <T> void asCollection(final Collection<T> destination) { for (final Object o : this) { destination.add((T)o); } } /** * A convenience method that assumes there is a BigDecimal, Number, or * String at the given index. If a Number or String is there it is used to * construct a new BigDecimal. * * @param index representing where the value is expected to be at. * @return the value stored at the key or the default provided if the key * doesn't exist. * @throws ClassCastException if there was a value but didn't match the * assumed return types. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal. * @see BigDecimal * @see Number#doubleValue() */ public BigDecimal getBigDecimal(final int index) { Object returnable = this.get(index); if (returnable instanceof BigDecimal) { /* Success there was a BigDecimal. */ } else if (returnable instanceof Number) { /* A number can be used to construct a BigDecimal. */ returnable = new BigDecimal(returnable.toString()); } else if (returnable instanceof String) { /* A number can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return (BigDecimal)returnable; } /** * A convenience method that assumes there is a Boolean or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a boolean. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. */ public Boolean getBoolean(final int index) { Object returnable = this.get(index); if (returnable instanceof String) { returnable = Boolean.valueOf((String)returnable); } return (Boolean)returnable; } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a byte. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Byte getByte(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).byteValue(); } /** * A convenience method that assumes there is a Collection value at the * given index. * * @param <T> the kind of collection to expect at the index. Note unless * manually added, collection values will be a JsonArray. * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a Collection. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Collection */ @SuppressWarnings("unchecked") public <T extends Collection<?>> T getCollection(final int index) { /* * The unchecked warning is suppressed because there is no way of * guaranteeing at compile time the cast will work. */ return (T)this.get(index); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a double. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Double getDouble(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).doubleValue(); } /** * A convenience method that assumes there is a String value at the given * index representing a fully qualified name in dot notation of an enum. * * @param index representing where the value is expected to be at. * @param <T> the Enum type the value at the index is expected to belong to. * @return the enum based on the string found at the index, or null if the * value at the index was null. * @throws ClassNotFoundException if the element was a String but the * declaring enum type couldn't be determined with it. * @throws ClassCastException if the element at the index was not a String * or if the fully qualified enum name is of the wrong type. * @throws IllegalArgumentException if an enum type was dynamically * determined but it doesn't define an enum with the dynamically * determined name. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Enum#valueOf(Class, String) */ @SuppressWarnings("unchecked") public <T extends Enum<T>> T getEnum(final int index) throws ClassNotFoundException { /* * Supressing the unchecked warning because the returnType is * dynamically identified and could lead to a ClassCastException when * returnType is cast to Class<T>, which is expected by the method's * contract. */ T returnable; final String element; final String[] splitValues; final int numberOfValues; final StringBuilder returnTypeName; final StringBuilder enumName; final Class<T> returnType; /* Make sure the element at the index is a String. */ element = this.getString(index); if (element == null) { return null; } /* Get the package, class, and enum names. */ splitValues = element.split("\\."); numberOfValues = splitValues.length; returnTypeName = new StringBuilder(); enumName = new StringBuilder(); for (int i = 0; i < numberOfValues; i++) { if (i == (numberOfValues - 1)) { /* * If it is the last split value then it should be the name of * the Enum since dots are not allowed in enum names. */ enumName.append(splitValues[i]); } else if (i == (numberOfValues - 2)) { /* * If it is the penultimate split value then it should be the * end of the package/enum type and not need a dot appended to * it. */ returnTypeName.append(splitValues[i]); } else { /* * Must be part of the package/enum type and will need a dot * appended to it since they got removed in the split. */ returnTypeName.append(splitValues[i]); returnTypeName.append("."); } } /* Use the package/class and enum names to get the Enum<T>. */ returnType = (Class<T>)Class.forName(returnTypeName.toString()); returnable = Enum.valueOf(returnType, enumName.toString()); return returnable; } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a float. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Float getFloat(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).floatValue(); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a int. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Integer getInteger(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).intValue(); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a long. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Long getLong(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).longValue(); } /** * A convenience method that assumes there is a Map value at the given * index. * * @param <T> the kind of map to expect at the index. Note unless manually * added, Map values will be a JsonObject. * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a Map. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Map */ @SuppressWarnings("unchecked") public <T extends Map<?, ?>> T getMap(final int index) { /* * The unchecked warning is suppressed because there is no way of * guaranteeing at compile time the cast will work. */ return (T)this.get(index); } /** * A convenience method that assumes there is a Number or String value at * the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a short. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws NumberFormatException if a String isn't a valid representation of * a BigDecimal or if the Number represents the double or float * Infinity or NaN. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. * @see Number */ public Short getShort(final int index) { Object returnable = this.get(index); if (returnable == null) { return null; } if (returnable instanceof String) { /* A String can be used to construct a BigDecimal. */ returnable = new BigDecimal((String)returnable); } return ((Number)returnable).shortValue(); } /** * A convenience method that assumes there is a Boolean, Number, or String * value at the given index. * * @param index represents where the value is expected to be at. * @return the value at the index provided cast to a String. * @throws ClassCastException if there was a value but didn't match the * assumed return type. * @throws IndexOutOfBoundsException if the index is outside of the range of * element indexes in the JsonArray. */ public String getString(final int index) { Object returnable = this.get(index); if (returnable instanceof Boolean) { returnable = returnable.toString(); } else if (returnable instanceof Number) { returnable = returnable.toString(); } return (String)returnable; } /* * (non-Javadoc) * @see org.apache.camel.util.json.Jsonable#asJsonString() */ @Override public String toJson() { final StringWriter writable = new StringWriter(); try { this.toJson(writable); } catch (final IOException caught) { /* See java.io.StringWriter. */ } return writable.toString(); } /* * (non-Javadoc) * @see org.apache.camel.util.json.Jsonable#toJsonString(java.io.Writer) */ @Override public void toJson(final Writer writable) throws IOException { boolean isFirstElement = true; final Iterator<Object> elements = this.iterator(); writable.write('['); while (elements.hasNext()) { if (isFirstElement) { isFirstElement = false; } else { writable.write(','); } writable.write(Jsoner.serialize(elements.next())); } writable.write(']'); } }
apache-2.0
Jasig/NotificationPortlet
notification-portlet-webapp/src/main/java/org/jasig/portlet/notice/util/UsernameFinder.java
3039
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portlet.notice.util; import javax.portlet.PortletRequest; import javax.servlet.http.HttpServletRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Component; @Component("usernameFinder") public final class UsernameFinder { @Value("${UsernameFinder.unauthenticatedUsername}") private String unauthenticatedUsername = "guest"; private Logger logger = LoggerFactory.getLogger(getClass()); /** * @deprecated Prefer interactions that are not based on the Portlet API */ @Deprecated public String findUsername(PortletRequest req) { return req.getRemoteUser() != null ? req.getRemoteUser() : unauthenticatedUsername; } /** * @since 4.0 */ public String findUsername(HttpServletRequest request) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); logger.trace("Processing the following Authentication object: {}", authentication); final String rslt = (String) authentication.getPrincipal(); logger.debug("Found username '{}' based on the contents of the SecurityContextHolder", rslt); // Identification based on Spring Security is required to access Servlet-based APIs if (rslt == null) { throw new SecurityException("User not identified"); } return rslt; } /** * @deprecated Prefer interactions that are not based on the Portlet API */ @Deprecated public boolean isAuthenticated(PortletRequest req) { return !findUsername(req).equalsIgnoreCase(unauthenticatedUsername); } public boolean isAuthenticated(HttpServletRequest request) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); logger.trace("Processing the following Authentication object: {}", authentication); return authentication != null && authentication.isAuthenticated(); } }
apache-2.0
gianm/druid
indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java
17743
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.input; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterators; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.AbstractInputSource; import org.apache.druid.data.input.InputFileAttribute; import org.apache.druid.data.input.InputFormat; import org.apache.druid.data.input.InputRowSchema; import org.apache.druid.data.input.InputSourceReader; import org.apache.druid.data.input.InputSplit; import org.apache.druid.data.input.MaxSizeSplitHintSpec; import org.apache.druid.data.input.SegmentsSplitHintSpec; import org.apache.druid.data.input.SplitHintSpec; import org.apache.druid.data.input.impl.InputEntityIteratingReader; import org.apache.druid.data.input.impl.SplittableInputSource; import org.apache.druid.indexing.common.ReingestionTimelineUtils; import org.apache.druid.indexing.common.RetryPolicy; import org.apache.druid.indexing.common.RetryPolicyFactory; import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.firehose.WindowedSegmentId; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.guava.Comparators; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.loading.SegmentLoader; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; import org.apache.druid.timeline.partition.PartitionHolder; import org.apache.druid.utils.Streams; import org.joda.time.Duration; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Stream; public class DruidInputSource extends AbstractInputSource implements SplittableInputSource<List<WindowedSegmentId>> { private static final Logger LOG = new Logger(DruidInputSource.class); /** * A Comparator that orders {@link WindowedSegmentId} mainly by segmentId (which is important), and then by intervals * (which is arbitrary, and only here for totality of ordering). */ private static final Comparator<WindowedSegmentId> WINDOWED_SEGMENT_ID_COMPARATOR = Comparator.comparing(WindowedSegmentId::getSegmentId) .thenComparing(windowedSegmentId -> windowedSegmentId.getIntervals().size()) .thenComparing( (WindowedSegmentId a, WindowedSegmentId b) -> { // Same segmentId, same intervals list size. Compare each interval. int cmp = 0; for (int i = 0; i < a.getIntervals().size(); i++) { cmp = Comparators.intervalsByStartThenEnd() .compare(a.getIntervals().get(i), b.getIntervals().get(i)); if (cmp != 0) { return cmp; } } return cmp; } ); private final String dataSource; // Exactly one of interval and segmentIds should be non-null. Typically 'interval' is specified directly // by the user creating this firehose and 'segmentIds' is used for sub-tasks if it is split for parallel // batch ingestion. @Nullable private final Interval interval; @Nullable private final List<WindowedSegmentId> segmentIds; private final DimFilter dimFilter; private final List<String> dimensions; private final List<String> metrics; private final IndexIO indexIO; private final CoordinatorClient coordinatorClient; private final SegmentLoaderFactory segmentLoaderFactory; private final RetryPolicyFactory retryPolicyFactory; @JsonCreator public DruidInputSource( @JsonProperty("dataSource") final String dataSource, @JsonProperty("interval") @Nullable Interval interval, // Specifying "segments" is intended only for when this FirehoseFactory has split itself, // not for direct end user use. @JsonProperty("segments") @Nullable List<WindowedSegmentId> segmentIds, @JsonProperty("filter") DimFilter dimFilter, @Nullable @JsonProperty("dimensions") List<String> dimensions, @Nullable @JsonProperty("metrics") List<String> metrics, @JacksonInject IndexIO indexIO, @JacksonInject CoordinatorClient coordinatorClient, @JacksonInject SegmentLoaderFactory segmentLoaderFactory, @JacksonInject RetryPolicyFactory retryPolicyFactory ) { Preconditions.checkNotNull(dataSource, "dataSource"); if ((interval == null && segmentIds == null) || (interval != null && segmentIds != null)) { throw new IAE("Specify exactly one of 'interval' and 'segments'"); } this.dataSource = dataSource; this.interval = interval; this.segmentIds = segmentIds; this.dimFilter = dimFilter; this.dimensions = dimensions; this.metrics = metrics; this.indexIO = Preconditions.checkNotNull(indexIO, "null IndexIO"); this.coordinatorClient = Preconditions.checkNotNull(coordinatorClient, "null CoordinatorClient"); this.segmentLoaderFactory = Preconditions.checkNotNull(segmentLoaderFactory, "null SegmentLoaderFactory"); this.retryPolicyFactory = Preconditions.checkNotNull(retryPolicyFactory, "null RetryPolicyFactory"); } @JsonProperty public String getDataSource() { return dataSource; } @Nullable @JsonProperty public Interval getInterval() { return interval; } @Nullable @JsonProperty("segments") @JsonInclude(Include.NON_NULL) public List<WindowedSegmentId> getSegmentIds() { return segmentIds; } @JsonProperty("filter") public DimFilter getDimFilter() { return dimFilter; } @JsonProperty public List<String> getDimensions() { return dimensions; } @JsonProperty public List<String> getMetrics() { return metrics; } @Override protected InputSourceReader fixedFormatReader(InputRowSchema inputRowSchema, @Nullable File temporaryDirectory) { final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory); final List<TimelineObjectHolder<String, DataSegment>> timeline = createTimeline(); final Iterator<DruidSegmentInputEntity> entityIterator = FluentIterable .from(timeline) .transformAndConcat(holder -> { //noinspection ConstantConditions final PartitionHolder<DataSegment> partitionHolder = holder.getObject(); //noinspection ConstantConditions return FluentIterable .from(partitionHolder) .transform(chunk -> new DruidSegmentInputEntity(segmentLoader, chunk.getObject(), holder.getInterval())); }).iterator(); final List<String> effectiveDimensions = ReingestionTimelineUtils.getDimensionsToReingest( dimensions, inputRowSchema.getDimensionsSpec(), timeline ); List<String> effectiveMetrics; if (metrics == null) { effectiveMetrics = ReingestionTimelineUtils.getUniqueMetrics(timeline); } else { effectiveMetrics = metrics; } final DruidSegmentInputFormat inputFormat = new DruidSegmentInputFormat( indexIO, dimFilter, effectiveDimensions, effectiveMetrics ); return new InputEntityIteratingReader( inputRowSchema, inputFormat, entityIterator, temporaryDirectory ); } private List<TimelineObjectHolder<String, DataSegment>> createTimeline() { if (interval == null) { return getTimelineForSegmentIds(coordinatorClient, dataSource, segmentIds); } else { return getTimelineForInterval(coordinatorClient, retryPolicyFactory, dataSource, interval); } } @Override public Stream<InputSplit<List<WindowedSegmentId>>> createSplits( InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec ) { // segmentIds is supposed to be specified by the supervisor task during the parallel indexing. // If it's not null, segments are already split by the supervisor task and further split won't happen. if (segmentIds == null) { return Streams.sequentialStreamFrom( createSplits( coordinatorClient, retryPolicyFactory, dataSource, interval, splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec ) ); } else { return Stream.of(new InputSplit<>(segmentIds)); } } @Override public int estimateNumSplits(InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec) { // segmentIds is supposed to be specified by the supervisor task during the parallel indexing. // If it's not null, segments are already split by the supervisor task and further split won't happen. if (segmentIds == null) { return Iterators.size( createSplits( coordinatorClient, retryPolicyFactory, dataSource, interval, splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec ) ); } else { return 1; } } @Override public SplittableInputSource<List<WindowedSegmentId>> withSplit(InputSplit<List<WindowedSegmentId>> split) { return new DruidInputSource( dataSource, null, split.get(), dimFilter, dimensions, metrics, indexIO, coordinatorClient, segmentLoaderFactory, retryPolicyFactory ); } @Override public boolean needsFormat() { return false; } public static Iterator<InputSplit<List<WindowedSegmentId>>> createSplits( CoordinatorClient coordinatorClient, RetryPolicyFactory retryPolicyFactory, String dataSource, Interval interval, SplitHintSpec splitHintSpec ) { final SplitHintSpec convertedSplitHintSpec; if (splitHintSpec instanceof SegmentsSplitHintSpec) { final SegmentsSplitHintSpec segmentsSplitHintSpec = (SegmentsSplitHintSpec) splitHintSpec; convertedSplitHintSpec = new MaxSizeSplitHintSpec( segmentsSplitHintSpec.getMaxInputSegmentBytesPerTask(), segmentsSplitHintSpec.getMaxNumSegments() ); } else { convertedSplitHintSpec = splitHintSpec; } final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = getTimelineForInterval( coordinatorClient, retryPolicyFactory, dataSource, interval ); final Map<WindowedSegmentId, Long> segmentIdToSize = createWindowedSegmentIdFromTimeline(timelineSegments); //noinspection ConstantConditions return Iterators.transform( convertedSplitHintSpec.split( // segmentIdToSize is sorted by segment ID; useful for grouping up segments from the same time chunk into // the same input split. segmentIdToSize.keySet().iterator(), segmentId -> new InputFileAttribute( Preconditions.checkNotNull(segmentIdToSize.get(segmentId), "segment size for [%s]", segmentId) ) ), InputSplit::new ); } /** * Returns a map of {@link WindowedSegmentId} to size, sorted by {@link WindowedSegmentId#getSegmentId()}. */ private static SortedMap<WindowedSegmentId, Long> createWindowedSegmentIdFromTimeline( List<TimelineObjectHolder<String, DataSegment>> timelineHolders ) { Map<DataSegment, WindowedSegmentId> windowedSegmentIds = new HashMap<>(); for (TimelineObjectHolder<String, DataSegment> holder : timelineHolders) { for (PartitionChunk<DataSegment> chunk : holder.getObject()) { windowedSegmentIds.computeIfAbsent( chunk.getObject(), segment -> new WindowedSegmentId(segment.getId().toString(), new ArrayList<>()) ).addInterval(holder.getInterval()); } } // It is important to create this map after windowedSegmentIds is completely filled, because WindowedSegmentIds // can be updated while being constructed. (Intervals are added.) SortedMap<WindowedSegmentId, Long> segmentSizeMap = new TreeMap<>(WINDOWED_SEGMENT_ID_COMPARATOR); windowedSegmentIds.forEach((segment, segmentId) -> segmentSizeMap.put(segmentId, segment.getSize())); return segmentSizeMap; } public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForInterval( CoordinatorClient coordinatorClient, RetryPolicyFactory retryPolicyFactory, String dataSource, Interval interval ) { Preconditions.checkNotNull(interval); // This call used to use the TaskActionClient, so for compatibility we use the same retry configuration // as TaskActionClient. final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); Collection<DataSegment> usedSegments; while (true) { try { usedSegments = coordinatorClient.fetchUsedSegmentsInDataSourceForIntervals( dataSource, Collections.singletonList(interval) ); break; } catch (Throwable e) { LOG.warn(e, "Exception getting database segments"); final Duration delay = retryPolicy.getAndIncrementRetryDelay(); if (delay == null) { throw e; } else { final long sleepTime = jitter(delay.getMillis()); LOG.info("Will try again in [%s].", new Duration(sleepTime).toString()); try { Thread.sleep(sleepTime); } catch (InterruptedException e2) { throw new RuntimeException(e2); } } } } return VersionedIntervalTimeline.forSegments(usedSegments).lookup(interval); } public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForSegmentIds( CoordinatorClient coordinatorClient, String dataSource, List<WindowedSegmentId> segmentIds ) { final SortedMap<Interval, TimelineObjectHolder<String, DataSegment>> timeline = new TreeMap<>( Comparators.intervalsByStartThenEnd() ); for (WindowedSegmentId windowedSegmentId : Preconditions.checkNotNull(segmentIds, "segmentIds")) { final DataSegment segment = coordinatorClient.fetchUsedSegment( dataSource, windowedSegmentId.getSegmentId() ); for (Interval interval : windowedSegmentId.getIntervals()) { final TimelineObjectHolder<String, DataSegment> existingHolder = timeline.get(interval); if (existingHolder != null) { if (!existingHolder.getVersion().equals(segment.getVersion())) { throw new ISE("Timeline segments with the same interval should have the same version: " + "existing version[%s] vs new segment[%s]", existingHolder.getVersion(), segment); } existingHolder.getObject().add(segment.getShardSpec().createChunk(segment)); } else { timeline.put( interval, new TimelineObjectHolder<>( interval, segment.getInterval(), segment.getVersion(), new PartitionHolder<>(segment.getShardSpec().createChunk(segment)) ) ); } } } // Validate that none of the given windows overlaps (except for when multiple segments share exactly the // same interval). Interval lastInterval = null; for (Interval interval : timeline.keySet()) { if (lastInterval != null && interval.overlaps(lastInterval)) { throw new IAE( "Distinct intervals in input segments may not overlap: [%s] vs [%s]", lastInterval, interval ); } lastInterval = interval; } return new ArrayList<>(timeline.values()); } private static long jitter(long input) { final double jitter = ThreadLocalRandom.current().nextGaussian() * input / 4.0; long retval = input + (long) jitter; return retval < 0 ? 0 : retval; } }
apache-2.0
joewalnes/idea-community
java/java-tests/testData/codeInsight/completion/style/AfterNew15-out.java
329
import java.io.File; import java.io.FilenameFilter; class A { { new java.io.File("aaa").list(new FilenameFilter() { public boolean accept(File dir, String name) { <selection>return false; //To change body of implemented methods use File | Settings | File Templates.</selection> } }); } }
apache-2.0
adessaigne/camel
core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithLambdaTest.java
5390
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.interceptor; import org.apache.camel.CamelExecutionException; import org.apache.camel.ContextTestSupport; import org.apache.camel.builder.AdviceWithRouteBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.model.RouteDefinition; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; public class AdviceWithLambdaTest extends ContextTestSupport { @Test public void testNoAdvised() throws Exception { getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testAdvised() throws Exception { AdviceWithRouteBuilder.adviceWith(context, null, a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } // END SNIPPET: e1 @Test public void testAdvisedNoLog() throws Exception { AdviceWithRouteBuilder.adviceWith(context, null, false, a -> { a.weaveByToUri("mock:result").remove(); a.weaveAddLast().transform().constant("Bye World"); }); getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(0); Object out = template.requestBody("direct:start", "Hello World"); assertEquals("Bye World", out); assertMockEndpointsSatisfied(); } @Test public void testAdvisedNoNewRoutesAllowed() throws Exception { try { AdviceWithRouteBuilder.adviceWith(context, 0, a -> { a.from("direct:bar").to("mock:bar"); a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); fail("Should have thrown exception"); } catch (IllegalArgumentException e) { // expected } } @Test public void testAdvisedThrowException() throws Exception { AdviceWithRouteBuilder.adviceWith(context, "myRoute", a -> { a.interceptSendToEndpoint("mock:foo").to("mock:advised").throwException(new IllegalArgumentException("Damn")); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(0); try { template.sendBody("direct:start", "Hello World"); fail("Should have thrown exception"); } catch (CamelExecutionException e) { assertIsInstanceOf(IllegalArgumentException.class, e.getCause()); assertEquals("Damn", e.getCause().getMessage()); } assertMockEndpointsSatisfied(); } @Test public void testAdvisedRouteDefinition() throws Exception { AdviceWithRouteBuilder.adviceWith(context, context.getRouteDefinitions().get(0), a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); getMockEndpoint("mock:foo").expectedMessageCount(0); getMockEndpoint("mock:advised").expectedMessageCount(1); getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testAdvisedEmptyRouteDefinition() throws Exception { try { AdviceWithRouteBuilder.adviceWith(context, new RouteDefinition(), a -> { a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised"); }); fail("Should throw exception"); } catch (IllegalArgumentException e) { // expected } } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").id("myRoute").to("mock:foo").to("mock:result"); } }; } }
apache-2.0
YAJATapps/FlickLauncher
src/com/android/launcher3/FolderInfo.java
4146
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher3; import android.content.ContentValues; import android.content.Context; import com.android.launcher3.compat.UserHandleCompat; import java.util.ArrayList; /** * Represents a folder containing shortcuts or apps. */ public class FolderInfo extends ItemInfo { public static final int NO_FLAGS = 0x00000000; /** * The folder is locked in sorted mode */ public static final int FLAG_ITEMS_SORTED = 0x00000001; /** * It is a work folder */ public static final int FLAG_WORK_FOLDER = 0x00000002; /** * The multi-page animation has run for this folder */ public static final int FLAG_MULTI_PAGE_ANIMATION = 0x00000004; /** * Whether this folder has been opened */ public boolean opened; public int options; /** * The apps and shortcuts */ public ArrayList<ShortcutInfo> contents = new ArrayList<ShortcutInfo>(); ArrayList<FolderListener> listeners = new ArrayList<FolderListener>(); public FolderInfo() { itemType = LauncherSettings.Favorites.ITEM_TYPE_FOLDER; user = UserHandleCompat.myUserHandle(); } /** * Add an app or shortcut * * @param item */ public void add(ShortcutInfo item, boolean animate) { contents.add(item); for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onAdd(item); } itemsChanged(animate); } /** * Remove an app or shortcut. Does not change the DB. * * @param item */ public void remove(ShortcutInfo item, boolean animate) { contents.remove(item); for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onRemove(item); } itemsChanged(animate); } public void setTitle(CharSequence title) { this.title = title; for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onTitleChanged(title); } } @Override void onAddToDatabase(Context context, ContentValues values) { super.onAddToDatabase(context, values); values.put(LauncherSettings.Favorites.TITLE, title.toString()); values.put(LauncherSettings.Favorites.OPTIONS, options); } public void addListener(FolderListener listener) { listeners.add(listener); } public void removeListener(FolderListener listener) { listeners.remove(listener); } public void itemsChanged(boolean animate) { for (int i = 0; i < listeners.size(); i++) { listeners.get(i).onItemsChanged(animate); } } public interface FolderListener { public void onAdd(ShortcutInfo item); public void onRemove(ShortcutInfo item); public void onTitleChanged(CharSequence title); public void onItemsChanged(boolean animate); } public boolean hasOption(int optionFlag) { return (options & optionFlag) != 0; } /** * @param option flag to set or clear * @param isEnabled whether to set or clear the flag * @param context if not null, save changes to the db. */ public void setOption(int option, boolean isEnabled, Context context) { int oldOptions = options; if (isEnabled) { options |= option; } else { options &= ~option; } if (context != null && oldOptions != options) { LauncherModel.updateItemInDatabase(context, this); } } }
apache-2.0
BrightTag/agathon
agathon-manager/src/main/java/com/brighttag/agathon/dao/CassandraInstanceDao.java
2133
/* * Copyright 2014 BrightTag, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.brighttag.agathon.dao; import javax.annotation.Nullable; import com.google.common.collect.ImmutableSet; import com.brighttag.agathon.model.CassandraInstance; /** * DAO for Cassandra Instances. * * @author codyaray * @since 5/12/2012 */ public interface CassandraInstanceDao { /** * Returns the set of Cassandra instances in a ring. * * @param ring name of the Cassandra ring * @return set of Cassandra instances in the ring * @throws BackingStoreException if there was a problem communicating with the backing store. */ ImmutableSet<CassandraInstance> findAll(String ring) throws BackingStoreException; /** * Returns the Cassandra instance with the given {@code id} or {@code null} if not found. * * @param ring name of the Cassandra ring * @param id the Cassandra instance ID * @return the Cassandra instance or {@code null} if not found * @throws BackingStoreException if there was a problem communicating with the backing store. */ @Nullable CassandraInstance findById(String ring, int id) throws BackingStoreException; /** * Saves the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void save(String ring, CassandraInstance instance); /** * Deletes the Cassandra {@code instance}. * * @param ring name of the Cassandra ring * @param instance the Cassandra instance */ void delete(String ring, CassandraInstance instance); }
apache-2.0
yadihaoku/android-sliding-layer-lib
SlidingLayerSample/src/main/java/com/wunderlist/slidinglayersample/MainActivity.java
6768
/* * MainActivity.java * * Copyright (C) 2013 6 Wunderkinder GmbH. * * @author Jose L Ugia - @Jl_Ugia * @author Antonio Consuegra - @aconsuegra * @author Cesar Valiente - @CesarValiente * @author Benedikt Lehnert - @blehnert * @author Timothy Achumba - @iam_timm * @version 1.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wunderlist.slidinglayersample; import android.annotation.SuppressLint; import android.app.Activity; import android.content.SharedPreferences; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.MenuItem; import android.view.View; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; import com.wunderlist.slidinglayer.LayerTransformer; import com.wunderlist.slidinglayer.SlidingLayer; import com.wunderlist.slidinglayer.transformer.AlphaTransformer; import com.wunderlist.slidinglayer.transformer.RotationTransformer; import com.wunderlist.slidinglayer.transformer.SlideJoyTransformer; public class MainActivity extends Activity { private SlidingLayer mSlidingLayer; private TextView swipeText; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); bindViews(); initState(); } @SuppressLint("NewApi") @Override protected void onResume() { super.onResume(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { getActionBar().setDisplayHomeAsUpEnabled(true); } } /** * View binding */ private void bindViews() { mSlidingLayer = (SlidingLayer) findViewById(R.id.slidingLayer1); swipeText = (TextView) findViewById(R.id.swipeText); } /** * Initializes the origin state of the layer */ private void initState() { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); setupSlidingLayerPosition(prefs.getString("layer_location", "right")); setupSlidingLayerTransform(prefs.getString("layer_transform", "none")); setupShadow(prefs.getBoolean("layer_has_shadow", false)); setupLayerOffset(prefs.getBoolean("layer_has_offset", false)); setupPreviewMode(prefs.getBoolean("preview_mode_enabled", false)); } private void setupSlidingLayerPosition(String layerPosition) { LayoutParams rlp = (LayoutParams) mSlidingLayer.getLayoutParams(); int textResource; Drawable d; switch (layerPosition) { case "right": textResource = R.string.swipe_right_label; d = getResources().getDrawable(R.drawable.container_rocket_right); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_RIGHT); break; case "left": textResource = R.string.swipe_left_label; d = getResources().getDrawable(R.drawable.container_rocket_left); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_LEFT); break; case "top": textResource = R.string.swipe_up_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_TOP); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); break; default: textResource = R.string.swipe_down_label; d = getResources().getDrawable(R.drawable.container_rocket); mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_BOTTOM); rlp.width = LayoutParams.MATCH_PARENT; rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size); } d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight()); swipeText.setCompoundDrawables(null, d, null, null); swipeText.setText(getResources().getString(textResource)); mSlidingLayer.setLayoutParams(rlp); } private void setupSlidingLayerTransform(String layerTransform) { LayerTransformer transformer; switch (layerTransform) { case "alpha": transformer = new AlphaTransformer(); break; case "rotation": transformer = new RotationTransformer(); break; case "slide": transformer = new SlideJoyTransformer(); break; default: return; } mSlidingLayer.setLayerTransformer(transformer); } private void setupShadow(boolean enabled) { if (enabled) { mSlidingLayer.setShadowSizeRes(R.dimen.shadow_size); mSlidingLayer.setShadowDrawable(R.drawable.sidebar_shadow); } else { mSlidingLayer.setShadowSize(0); mSlidingLayer.setShadowDrawable(null); } } private void setupLayerOffset(boolean enabled) { int offsetDistance = enabled ? getResources().getDimensionPixelOffset(R.dimen.offset_distance) : 0; mSlidingLayer.setOffsetDistance(offsetDistance); } private void setupPreviewMode(boolean enabled) { int previewOffset = enabled ? getResources().getDimensionPixelOffset(R.dimen.preview_offset_distance) : -1; mSlidingLayer.setPreviewOffsetDistance(previewOffset); } public void buttonClicked(View v) { switch (v.getId()) { case R.id.buttonOpen: mSlidingLayer.openLayer(true); break; case R.id.buttonClose: mSlidingLayer.closeLayer(true); break; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (mSlidingLayer.isOpened()) { mSlidingLayer.closeLayer(true); return true; } default: return super.onKeyDown(keyCode, event); } } @Override public boolean onOptionsItemSelected(MenuItem item) { finish(); return true; } }
apache-2.0
intalio/axis2
modules/adb/test/org/apache/axis2/databinding/ClientInfo.java
6169
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * ClientInfo.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: #axisVersion# #today# */ package org.apache.axis2.databinding; import org.apache.axiom.om.OMFactory; import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; /** ClientInfo bean class */ public class ClientInfo implements org.apache.axis2.databinding.ADBBean { /* This type was generated from the piece of schema that had name = ClientInfo Namespace URI = http://www.wso2.com/types Namespace Prefix = ns1 */ public ClientInfo(String localName, String localSsn) { this.localName = localName; this.localSsn = localSsn; } public ClientInfo() { } /** field for Name */ protected java.lang.String localName; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getName() { return localName; } /** * Auto generated setter method * * @param param Name */ public void setName(java.lang.String param) { this.localName = param; } /** field for Ssn */ protected java.lang.String localSsn; /** * Auto generated getter method * * @return java.lang.String */ public java.lang.String getSsn() { return localSsn; } /** * Auto generated setter method * * @param param Ssn */ public void setSsn(java.lang.String param) { this.localSsn = param; } /** databinding method to get an XML representation of this object */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) { java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "name")); elementList .add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName)); elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types", "ssn")); elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSsn)); return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl (qName, elementList.toArray(), attribList.toArray()); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException, ADBException { serialize(parentQName,factory,xmlWriter,false); } public void serialize(final QName parentQName, final OMFactory factory, MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws XMLStreamException, ADBException { throw new UnsupportedOperationException("Un implemented method"); } /** Factory class that keeps the parse method */ public static class Factory { /** static method to create the object */ public static ClientInfo parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { ClientInfo object = new ClientInfo(); try { int event = reader.getEventType(); int count = 0; int argumentCount = 2; boolean done = false; //event better be a START_ELEMENT. if not we should go up to the start element here while (!reader.isStartElement()) { event = reader.next(); } while (!done) { if (javax.xml.stream.XMLStreamConstants.START_ELEMENT == event) { if ("name".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setName( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } if ("ssn".equals(reader.getLocalName())) { String content = reader.getElementText(); object.setSsn( org.apache.axis2.databinding.utils.ConverterUtil.convertToString( content)); count++; } } if (argumentCount == count) { done = true; } if (!done) { event = reader.next(); } } } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
apache-2.0
deeplearning4j/deeplearning4j
datavec/datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java
15108
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.datavec.api.transform.transform.time; import lombok.Data; import lombok.EqualsAndHashCode; import org.datavec.api.transform.ColumnType; import org.datavec.api.transform.Transform; import org.datavec.api.transform.metadata.ColumnMetaData; import org.datavec.api.transform.metadata.IntegerMetaData; import org.datavec.api.transform.metadata.StringMetaData; import org.datavec.api.transform.metadata.TimeMetaData; import org.datavec.api.transform.schema.Schema; import org.datavec.api.util.jackson.DateTimeFieldTypeDeserializer; import org.datavec.api.util.jackson.DateTimeFieldTypeSerializer; import org.datavec.api.writable.IntWritable; import org.datavec.api.writable.Text; import org.datavec.api.writable.Writable; import org.joda.time.DateTime; import org.joda.time.DateTimeFieldType; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.nd4j.shade.jackson.annotation.JsonIgnore; import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties; import org.nd4j.shade.jackson.annotation.JsonInclude; import org.nd4j.shade.jackson.annotation.JsonProperty; import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize; import org.nd4j.shade.jackson.databind.annotation.JsonSerialize; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * Create a number of new columns by deriving their values from a Time column. * Can be used for example to create new columns with the year, month, day, hour, minute, second etc. * * @author Alex Black */ @JsonIgnoreProperties({"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @EqualsAndHashCode(exclude = {"inputSchema", "insertAfterIdx", "deriveFromIdx"}) @Data public class DeriveColumnsFromTimeTransform implements Transform { private final String columnName; private final String insertAfter; private DateTimeZone inputTimeZone; private final List<DerivedColumn> derivedColumns; private Schema inputSchema; private int insertAfterIdx = -1; private int deriveFromIdx = -1; private DeriveColumnsFromTimeTransform(Builder builder) { this.derivedColumns = builder.derivedColumns; this.columnName = builder.columnName; this.insertAfter = builder.insertAfter; } public DeriveColumnsFromTimeTransform(@JsonProperty("columnName") String columnName, @JsonProperty("insertAfter") String insertAfter, @JsonProperty("inputTimeZone") DateTimeZone inputTimeZone, @JsonProperty("derivedColumns") List<DerivedColumn> derivedColumns) { this.columnName = columnName; this.insertAfter = insertAfter; this.inputTimeZone = inputTimeZone; this.derivedColumns = derivedColumns; } @Override public Schema transform(Schema inputSchema) { List<ColumnMetaData> oldMeta = inputSchema.getColumnMetaData(); List<ColumnMetaData> newMeta = new ArrayList<>(oldMeta.size() + derivedColumns.size()); List<String> oldNames = inputSchema.getColumnNames(); for (int i = 0; i < oldMeta.size(); i++) { String current = oldNames.get(i); newMeta.add(oldMeta.get(i)); if (insertAfter.equals(current)) { //Insert the derived columns here for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: newMeta.add(new StringMetaData(d.columnName)); break; case Integer: newMeta.add(new IntegerMetaData(d.columnName)); //TODO: ranges... if it's a day, we know it must be 1 to 31, etc... break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return inputSchema.newSchema(newMeta); } @Override public void setInputSchema(Schema inputSchema) { insertAfterIdx = inputSchema.getColumnNames().indexOf(insertAfter); if (insertAfterIdx == -1) { throw new IllegalStateException( "Invalid schema/insert after column: input schema does not contain column \"" + insertAfter + "\""); } deriveFromIdx = inputSchema.getColumnNames().indexOf(columnName); if (deriveFromIdx == -1) { throw new IllegalStateException( "Invalid source column: input schema does not contain column \"" + columnName + "\""); } this.inputSchema = inputSchema; if (!(inputSchema.getMetaData(columnName) instanceof TimeMetaData)) throw new IllegalStateException("Invalid state: input column \"" + columnName + "\" is not a time column. Is: " + inputSchema.getMetaData(columnName)); TimeMetaData meta = (TimeMetaData) inputSchema.getMetaData(columnName); inputTimeZone = meta.getTimeZone(); } @Override public Schema getInputSchema() { return inputSchema; } @Override public List<Writable> map(List<Writable> writables) { if (writables.size() != inputSchema.numColumns()) { throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size() + ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns() + "). Transform = " + toString()); } int i = 0; Writable source = writables.get(deriveFromIdx); List<Writable> list = new ArrayList<>(writables.size() + derivedColumns.size()); for (Writable w : writables) { list.add(w); if (i++ == insertAfterIdx) { for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: list.add(new Text(d.dateTimeFormatter.print(source.toLong()))); break; case Integer: DateTime dt = new DateTime(source.toLong(), inputTimeZone); list.add(new IntWritable(dt.get(d.fieldType))); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } } } return list; } @Override public List<List<Writable>> mapSequence(List<List<Writable>> sequence) { List<List<Writable>> out = new ArrayList<>(sequence.size()); for (List<Writable> step : sequence) { out.add(map(step)); } return out; } /** * Transform an object * in to another object * * @param input the record to transform * @return the transformed writable */ @Override public Object map(Object input) { List<Object> ret = new ArrayList<>(); Long l = (Long) input; for (DerivedColumn d : derivedColumns) { switch (d.columnType) { case String: ret.add(d.dateTimeFormatter.print(l)); break; case Integer: DateTime dt = new DateTime(l, inputTimeZone); ret.add(dt.get(d.fieldType)); break; default: throw new IllegalStateException("Unexpected column type: " + d.columnType); } } return ret; } /** * Transform a sequence * * @param sequence */ @Override public Object mapSequence(Object sequence) { List<Long> longs = (List<Long>) sequence; List<List<Object>> ret = new ArrayList<>(); for (Long l : longs) ret.add((List<Object>) map(l)); return ret; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DeriveColumnsFromTimeTransform(timeColumn=\"").append(columnName).append("\",insertAfter=\"") .append(insertAfter).append("\",derivedColumns=("); boolean first = true; for (DerivedColumn d : derivedColumns) { if (!first) sb.append(","); sb.append(d); first = false; } sb.append("))"); return sb.toString(); } /** * The output column name * after the operation has been applied * * @return the output column name */ @Override public String outputColumnName() { return outputColumnNames()[0]; } /** * The output column names * This will often be the same as the input * * @return the output column names */ @Override public String[] outputColumnNames() { String[] ret = new String[derivedColumns.size()]; for (int i = 0; i < ret.length; i++) ret[i] = derivedColumns.get(i).columnName; return ret; } /** * Returns column names * this op is meant to run on * * @return */ @Override public String[] columnNames() { return new String[] {columnName()}; } /** * Returns a singular column name * this op is meant to run on * * @return */ @Override public String columnName() { return columnName; } public static class Builder { private final String columnName; private String insertAfter; private final List<DerivedColumn> derivedColumns = new ArrayList<>(); /** * @param timeColumnName The name of the time column from which to derive the new values */ public Builder(String timeColumnName) { this.columnName = timeColumnName; this.insertAfter = timeColumnName; } /** * Where should the new columns be inserted? * By default, they will be inserted after the source column * * @param columnName Name of the column to insert the derived columns after */ public Builder insertAfter(String columnName) { this.insertAfter = columnName; return this; } /** * Add a String column (for example, human readable format), derived from the time * * @param columnName Name of the new/derived column * @param format Joda time format, as per <a href="http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html">http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html</a> * @param timeZone Timezone to use for formatting */ public Builder addStringDerivedColumn(String columnName, String format, DateTimeZone timeZone) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.String, format, timeZone, null)); return this; } /** * Add an integer derived column - for example, the hour of day, etc. Uses timezone from the time column metadata * * @param columnName Name of the column * @param type Type of field (for example, DateTimeFieldType.hourOfDay() etc) */ public Builder addIntegerDerivedColumn(String columnName, DateTimeFieldType type) { derivedColumns.add(new DerivedColumn(columnName, ColumnType.Integer, null, null, type)); return this; } /** * Create the transform instance */ public DeriveColumnsFromTimeTransform build() { return new DeriveColumnsFromTimeTransform(this); } } @JsonInclude(JsonInclude.Include.NON_NULL) @EqualsAndHashCode(exclude = "dateTimeFormatter") @Data @JsonIgnoreProperties({"dateTimeFormatter"}) public static class DerivedColumn implements Serializable { private final String columnName; private final ColumnType columnType; private final String format; private final DateTimeZone dateTimeZone; @JsonSerialize(using = DateTimeFieldTypeSerializer.class) @JsonDeserialize(using = DateTimeFieldTypeDeserializer.class) private final DateTimeFieldType fieldType; private transient DateTimeFormatter dateTimeFormatter; // public DerivedColumn(String columnName, ColumnType columnType, String format, DateTimeZone dateTimeZone, DateTimeFieldType fieldType) { public DerivedColumn(@JsonProperty("columnName") String columnName, @JsonProperty("columnType") ColumnType columnType, @JsonProperty("format") String format, @JsonProperty("dateTimeZone") DateTimeZone dateTimeZone, @JsonProperty("fieldType") DateTimeFieldType fieldType) { this.columnName = columnName; this.columnType = columnType; this.format = format; this.dateTimeZone = dateTimeZone; this.fieldType = fieldType; if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(this.format).withZone(dateTimeZone); } @Override public String toString() { return "(name=" + columnName + ",type=" + columnType + ",derived=" + (format != null ? format : fieldType) + ")"; } //Custom serialization methods, because Joda Time doesn't allow DateTimeFormatter objects to be serialized :( private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); if (format != null) dateTimeFormatter = DateTimeFormat.forPattern(format).withZone(dateTimeZone); } } }
apache-2.0
Humbinal/java-items
hum-web/hum-ssm/src/test/java/com/humbinal/ssm/test/User.java
616
package com.humbinal.ssm.test; public class User { private long user_Id; private String user_name; private int user_age; public User() { } public long getUser_Id() { return user_Id; } public void setUser_Id(long user_Id) { this.user_Id = user_Id; } public String getUser_name() { return user_name; } public void setUser_name(String user_name) { this.user_name = user_name; } public int getUser_age() { return user_age; } public void setUser_age(int user_age) { this.user_age = user_age; } }
apache-2.0
apache/tapestry3
tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/impl/OperationsBean.java
33019
// Copyright 2004 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry.vlib.ejb.impl; import java.rmi.RemoteException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ejb.CreateException; import javax.ejb.FinderException; import javax.ejb.RemoveException; import javax.ejb.SessionBean; import javax.ejb.SessionContext; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.rmi.PortableRemoteObject; import javax.sql.DataSource; import org.apache.tapestry.Tapestry; import org.apache.tapestry.contrib.ejb.XCreateException; import org.apache.tapestry.contrib.ejb.XEJBException; import org.apache.tapestry.contrib.ejb.XRemoveException; import org.apache.tapestry.contrib.jdbc.IStatement; import org.apache.tapestry.contrib.jdbc.StatementAssembly; import org.apache.tapestry.vlib.ejb.Book; import org.apache.tapestry.vlib.ejb.BorrowException; import org.apache.tapestry.vlib.ejb.IBook; import org.apache.tapestry.vlib.ejb.IBookHome; import org.apache.tapestry.vlib.ejb.IPerson; import org.apache.tapestry.vlib.ejb.IPersonHome; import org.apache.tapestry.vlib.ejb.IPublisher; import org.apache.tapestry.vlib.ejb.IPublisherHome; import org.apache.tapestry.vlib.ejb.LoginException; import org.apache.tapestry.vlib.ejb.Person; import org.apache.tapestry.vlib.ejb.Publisher; import org.apache.tapestry.vlib.ejb.RegistrationException; import org.apache.tapestry.vlib.ejb.SortColumn; import org.apache.tapestry.vlib.ejb.SortOrdering; /** * Implementation of the {@link org.apache.tapestry.vlib.ejb.IOperations} * stateless session bean. * * <p>Implenents a number of stateless operations for the front end. * * @version $Id$ * @author Howard Lewis Ship * **/ public class OperationsBean implements SessionBean { private SessionContext _context; private transient Context _environment; private transient IBookHome _bookHome; private transient IPersonHome _personHome; private transient IPublisherHome _publisherHome; /** * Data source, retrieved from the ENC property * "jdbc/dataSource". * **/ private transient DataSource _dataSource; /** * Sets up the bean. Locates the {@link DataSource} for the bean * as <code>jdbc/dataSource</code> within the ENC; this data source is * later used by {@link #getConnection()}. * **/ public void ejbCreate() { Context initial; try { initial = new InitialContext(); _environment = (Context) initial.lookup("java:comp/env"); } catch (NamingException e) { throw new XEJBException("Could not lookup environment.", e); } try { _dataSource = (DataSource) _environment.lookup("jdbc/dataSource"); } catch (NamingException e) { e.printStackTrace(); throw new XEJBException("Could not lookup data source.", e); } } public void ejbRemove() { } /** * Does nothing, not invoked in stateless session beans. **/ public void ejbPassivate() { } public void setSessionContext(SessionContext value) { _context = value; } /** * Does nothing, not invoked in stateless session beans. * **/ public void ejbActivate() { } /** * Finds the book and borrower (by thier primary keys) and updates the book. * * <p>The {@link Book} value object is returned. * **/ public Book borrowBook(Integer bookId, Integer borrowerId) throws FinderException, RemoteException, BorrowException { IBookHome bookHome = getBookHome(); IPersonHome personHome = getPersonHome(); IBook book = bookHome.findByPrimaryKey(bookId); if (!book.getLendable()) throw new BorrowException("Book may not be borrowed."); // Verify that the borrower exists. personHome.findByPrimaryKey(borrowerId); // TBD: Check that borrower has authenticated // findByPrimaryKey() throws an exception if the EJB doesn't exist, // so we're safe. personHome.findByPrimaryKey(book.getOwnerId()); // Here's the real work; just setting the holder of the book // to be the borrower. book.setHolderId(borrowerId); return getBook(bookId); } /** * Adds a new book, verifying that the publisher and holder actually exist. * **/ public Integer addBook(Map attributes) throws CreateException, RemoteException { IBookHome home = getBookHome(); attributes.put("dateAdded", new Timestamp(System.currentTimeMillis())); IBook book = home.create(attributes); return (Integer) book.getPrimaryKey(); } /** * Adds a book, which will be owned and held by the specified owner. * * <p>The publisherName may either be the name of a known publisher, or * a new name. A new {@link IPublisher} will be created as necessary. * * <p>Returns the newly created book, as a {@link Map} of attributes. * **/ public Integer addBook(Map attributes, String publisherName) throws CreateException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); // Find or create the publisher. try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means that no publisher with the given name already exists. } if (publisher == null) publisher = publisherHome.create(publisherName); attributes.put("publisherId", publisher.getPrimaryKey()); return addBook(attributes); } /** * Updates a book. * * <p>Returns the updated book. * * @param bookId The primary key of the book to update. * **/ public void updateBook(Integer bookId, Map attributes) throws FinderException, RemoteException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); book.updateEntityAttributes(attributes); } /** * Updates a book, adding a new Publisher at the same time. * * * @param bookPK The primary key of the book to update. * @param attributes attributes to change * @param publisherName The name of the new publisher. * @throws FinderException if the book, holder or publisher can not be located. * @throws CreateException if the {@link IPublisher} can not be created. **/ public void updateBook(Integer bookId, Map attributes, String publisherName) throws CreateException, FinderException, RemoteException { IPublisher publisher = null; IPublisherHome publisherHome = getPublisherHome(); try { publisher = publisherHome.findByName(publisherName); } catch (FinderException e) { // Ignore, means we need to create the Publisher } if (publisher == null) publisher = publisherHome.create(publisherName); // Don't duplicate all that other code! attributes.put("publisherId", publisher.getPrimaryKey()); updateBook(bookId, attributes); } public void updatePerson(Integer personId, Map attributes) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); person.updateEntityAttributes(attributes); } public Publisher[] getPublishers() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PUBLISHER_ID, NAME"); assembly.newLine("FROM PUBLISHER"); assembly.newLine("ORDER BY NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); while (set.next()) { Integer primaryKey = (Integer) set.getObject(1); String name = set.getString(2); list.add(new Publisher(primaryKey, name)); } } catch (SQLException ex) { ex.printStackTrace(); throw new XEJBException("Could not fetch all Publishers.", ex); } finally { close(connection, statement, set); } // Convert from List to Publisher[] return (Publisher[]) list.toArray(new Publisher[list.size()]); } /** * Fetchs all {@link IPerson} beans in the database and converts them * to {@link Person} objects. * * Returns the {@link Person}s sorted by last name, then first. **/ public Person[] getPersons() { Connection connection = null; IStatement statement = null; ResultSet set = null; List list = new ArrayList(); try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); Object[] columns = new Object[Person.N_COLUMNS]; while (set.next()) { list.add(convertRowToPerson(set, columns)); } } catch (SQLException ex) { throw new XEJBException("Could not fetch all Persons.", ex); } finally { close(connection, statement, set); } return (Person[]) list.toArray(new Person[list.size()]); } /** * Gets the {@link Person} for primary key. * * @throws FinderException if the Person does not exist. **/ public Person getPerson(Integer personId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Person result = null; try { connection = getConnection(); StatementAssembly assembly = buildBasePersonQuery(); assembly.newLine("WHERE "); assembly.add("PERSON_ID = "); assembly.addParameter(personId); assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME"); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Person #" + personId + " does not exist."); Object[] columns = new Object[Person.N_COLUMNS]; result = convertRowToPerson(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Person login(String email, String password) throws RemoteException, LoginException { IPersonHome home = getPersonHome(); IPerson person = null; Person result = null; try { person = home.findByEmail(email); } catch (FinderException ex) { throw new LoginException("Unknown e-mail address.", false); } if (!person.getPassword().equals(password)) throw new LoginException("Invalid password.", true); try { result = getPerson((Integer) person.getPrimaryKey()); } catch (FinderException ex) { throw new LoginException("Could not read person.", false); } if (result.isLockedOut()) throw new LoginException("You have been locked out of the Virtual Library.", false); // Set the last access time for any subsequent login. person.setLastAccess(new Timestamp(System.currentTimeMillis())); return result; } public Map getPersonAttributes(Integer personId) throws FinderException, RemoteException { IPersonHome home = getPersonHome(); IPerson person = home.findByPrimaryKey(personId); return person.getEntityAttributes(); } /** * Retrieves a single {@link Book} by its primary key. * * @throws FinderException if the Book does not exist. * **/ public Book getBook(Integer bookId) throws FinderException { Connection connection = null; IStatement statement = null; ResultSet set = null; Book result = null; try { connection = getConnection(); StatementAssembly assembly = buildBaseBookQuery(); assembly.addSep(" AND "); assembly.add("book.BOOK_ID = "); assembly.addParameter(bookId); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (!set.next()) throw new FinderException("Book " + bookId + " does not exist."); Object[] columns = new Object[Book.N_COLUMNS]; result = convertRowToBook(set, columns); } catch (SQLException ex) { throw new XEJBException("Unable to perform database query.", ex); } finally { close(connection, statement, set); } return result; } public Map getBookAttributes(Integer bookId) throws FinderException, RemoteException { IBookHome home = getBookHome(); IBook book = home.findByPrimaryKey(bookId); return book.getEntityAttributes(); } /** * Attempts to register a new user, first checking that the * e-mail and names are unique. Returns the primary key of the * new {@link IPerson}. * **/ public Person registerNewUser(String firstName, String lastName, String email, String password) throws RegistrationException, CreateException, RemoteException { IPersonHome home; if (password == null || password.trim().length() == 0) throw new RegistrationException("Must specify a password."); validateUniquePerson(firstName, lastName, email); home = getPersonHome(); Map attributes = new HashMap(); attributes.put("lastName", lastName.trim()); attributes.put("firstName", firstName.trim()); attributes.put("email", email.trim()); attributes.put("password", password.trim()); attributes.put("lastAccess", new Timestamp(System.currentTimeMillis())); IPerson person = home.create(attributes); Integer personId = (Integer) person.getPrimaryKey(); try { return getPerson(personId); } catch (FinderException ex) { throw new XCreateException("Unable to find newly created Person.", ex); } } public Book deleteBook(Integer bookId) throws RemoveException, RemoteException { IBookHome home = getBookHome(); Book result = null; try { result = getBook(bookId); } catch (FinderException ex) { throw new XRemoveException(ex); } home.remove(bookId); return result; } /** * Transfers a number of books to a new owner. * **/ public void transferBooks(Integer newOwnerId, Integer[] bookIds) throws FinderException, RemoteException { if (bookIds == null) throw new RemoteException("Must supply non-null list of books to transfer."); if (newOwnerId == null) throw new RemoteException("Must provide an owner for the books."); // Verify that the new owner exists. IPersonHome personHome = getPersonHome(); personHome.findByPrimaryKey(newOwnerId); // Direct SQL would be more efficient, but this'll probably do. IBookHome home = getBookHome(); for (int i = 0; i < bookIds.length; i++) { IBook book = home.findByPrimaryKey(bookIds[i]); book.setOwnerId(newOwnerId); } } public void updatePublishers(Publisher[] updated, Integer[] deleted) throws FinderException, RemoveException, RemoteException { IPublisherHome home = getPublisherHome(); if (updated != null) { for (int i = 0; i < updated.length; i++) { IPublisher publisher = home.findByPrimaryKey(updated[i].getId()); publisher.setName(updated[i].getName()); } } if (deleted != null) { for (int i = 0; i < deleted.length; i++) { home.remove(deleted[i]); } } } public void updatePersons( Person[] updated, Integer[] resetPassword, String newPassword, Integer[] deleted, Integer adminId) throws FinderException, RemoveException, RemoteException { IPersonHome home = getPersonHome(); int count = Tapestry.size(updated); for (int i = 0; i < count; i++) { Person u = updated[i]; IPerson person = home.findByPrimaryKey(u.getId()); person.setAdmin(u.isAdmin()); person.setLockedOut(u.isLockedOut()); } count = Tapestry.size(resetPassword); for (int i = 0; i < count; i++) { IPerson person = home.findByPrimaryKey(resetPassword[i]); person.setPassword(newPassword); } count = Tapestry.size(deleted); if (count > 0) { returnBooksFromDeletedPersons(deleted); moveBooksFromDeletedPersons(deleted, adminId); } for (int i = 0; i < count; i++) home.remove(deleted[i]); } /** * Invoked to update all books owned by people about to be deleted, to * reassign the books holder back to the owner. * **/ private void returnBooksFromDeletedPersons(Integer deletedPersonIds[]) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET HOLDER_ID = OWNER_ID"); assembly.newLine("WHERE HOLDER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } /** * Invoked to execute a bulk update that moves books to the new admin. * **/ private void moveBooksFromDeletedPersons(Integer deletedPersonIds[], Integer adminId) throws RemoveException { StatementAssembly assembly = new StatementAssembly(); assembly.add("UPDATE BOOK"); assembly.newLine("SET OWNER_ID = "); assembly.addParameter(adminId); assembly.newLine("WHERE OWNER_ID IN ("); assembly.addParameterList(deletedPersonIds, ", "); assembly.add(")"); executeUpdate(assembly); } private void executeUpdate(StatementAssembly assembly) throws XRemoveException { Connection connection = null; IStatement statement = null; try { connection = getConnection(); statement = assembly.createStatement(connection); statement.executeUpdate(); statement.close(); statement = null; connection.close(); connection = null; } catch (SQLException ex) { throw new XRemoveException( "Unable to execute " + assembly + ": " + ex.getMessage(), ex); } finally { close(connection, statement, null); } } /** * Translates the next row from the result set into a {@link Book}. * * <p>This works with queries generated by {@link #buildBaseBookQuery()}. * **/ protected Book convertRowToBook(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Book.ID_COLUMN] = set.getObject(column++); columns[Book.TITLE_COLUMN] = set.getString(column++); columns[Book.DESCRIPTION_COLUMN] = set.getString(column++); columns[Book.ISBN_COLUMN] = set.getString(column++); columns[Book.OWNER_ID_COLUMN] = set.getObject(column++); columns[Book.OWNER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.HOLDER_ID_COLUMN] = set.getObject(column++); columns[Book.HOLDER_NAME_COLUMN] = buildName(set.getString(column++), set.getString(column++)); columns[Book.PUBLISHER_ID_COLUMN] = set.getObject(column++); columns[Book.PUBLISHER_NAME_COLUMN] = set.getString(column++); columns[Book.AUTHOR_COLUMN] = set.getString(column++); columns[Book.HIDDEN_COLUMN] = getBoolean(set, column++); columns[Book.LENDABLE_COLUMN] = getBoolean(set, column++); columns[Book.DATE_ADDED_COLUMN] = set.getTimestamp(column++); return new Book(columns); } private String buildName(String firstName, String lastName) { if (firstName == null) return lastName; return firstName + " " + lastName; } /** * All queries must use this exact set of select columns, so that * {@link #convertRow(ResultSet, Object[])} can build * the correct {@link Book} from each row. * **/ private static final String[] BOOK_SELECT_COLUMNS = { "book.BOOK_ID", "book.TITLE", "book.DESCRIPTION", "book.ISBN", "owner.PERSON_ID", "owner.FIRST_NAME", "owner.LAST_NAME", "holder.PERSON_ID", "holder.FIRST_NAME", "holder.LAST_NAME", "publisher.PUBLISHER_ID", "publisher.NAME", "book.AUTHOR", "book.HIDDEN", "book.LENDABLE", "book.DATE_ADDED" }; private static final String[] BOOK_ALIAS_COLUMNS = { "BOOK book", "PERSON owner", "PERSON holder", "PUBLISHER publisher" }; private static final String[] BOOK_JOINS = { "book.OWNER_ID = owner.PERSON_ID", "book.HOLDER_ID = holder.PERSON_ID", "book.PUBLISHER_ID = publisher.PUBLISHER_ID" }; private static final Map BOOK_SORT_ASCENDING = new HashMap(); private static final Map BOOK_SORT_DESCENDING = new HashMap(); static { BOOK_SORT_ASCENDING.put(SortColumn.TITLE, "book.TITLE"); BOOK_SORT_ASCENDING.put(SortColumn.HOLDER, "holder.LAST_NAME, holder.FIRST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME, owner.LAST_NAME"); BOOK_SORT_ASCENDING.put(SortColumn.PUBLISHER, "publisher.NAME"); BOOK_SORT_ASCENDING.put(SortColumn.AUTHOR, "book.AUTHOR"); BOOK_SORT_DESCENDING.put(SortColumn.TITLE, "book.TITLE DESC"); BOOK_SORT_DESCENDING.put( SortColumn.HOLDER, "holder.LAST_NAME DESC, holder.FIRST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME DESC, owner.LAST_NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.PUBLISHER, "publisher.NAME DESC"); BOOK_SORT_DESCENDING.put(SortColumn.AUTHOR, "book.AUTHOR DESC"); } protected StatementAssembly buildBaseBookQuery() { StatementAssembly result = new StatementAssembly(); result.newLine("SELECT "); result.addList(BOOK_SELECT_COLUMNS, ", "); result.newLine("FROM "); result.addList(BOOK_ALIAS_COLUMNS, ", "); result.newLine("WHERE "); result.addList(BOOK_JOINS, " AND "); return result; } /** * Adds a sort ordering clause to the statement. If ordering is null, * orders by book title. * * @param assembly to update * @param ordering defines the column to sort on, and the order (ascending or descending) * @since 3.0 * * **/ protected void addSortOrdering(StatementAssembly assembly, SortOrdering ordering) { if (ordering == null) { assembly.newLine("ORDER BY book.TITLE"); return; } Map sorts = ordering.isDescending() ? BOOK_SORT_DESCENDING : BOOK_SORT_ASCENDING; String term = (String) sorts.get(ordering.getColumn()); assembly.newLine("ORDER BY "); assembly.add(term); } protected void addSubstringSearch(StatementAssembly assembly, String column, String value) { if (value == null) return; String trimmed = value.trim(); if (trimmed.length() == 0) return; // Here's the McKoi dependency: LOWER() is a database-specific // SQL function. assembly.addSep(" AND LOWER("); assembly.add(column); assembly.add(") LIKE"); assembly.addParameter("%" + trimmed.toLowerCase() + "%"); } /** * Closes the resultSet (if not null), then the statement (if not null), * then the Connection (if not null). Exceptions are written to System.out. * **/ protected void close(Connection connection, IStatement statement, ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { System.out.println("Exception closing result set."); ex.printStackTrace(); } } if (statement != null) { try { statement.close(); } catch (SQLException ex) { System.out.println("Exception closing statement."); ex.printStackTrace(); } } if (connection != null) { try { connection.close(); } catch (SQLException ex) { System.out.println("Exception closing connection."); ex.printStackTrace(); } } } private IPersonHome getPersonHome() { if (_personHome == null) { try { Object raw = _environment.lookup("ejb/Person"); _personHome = (IPersonHome) PortableRemoteObject.narrow(raw, IPersonHome.class); } catch (NamingException ex) { throw new XEJBException("Could not lookup Person home interface.", ex); } } return _personHome; } private IPublisherHome getPublisherHome() { if (_publisherHome == null) { try { Object raw = _environment.lookup("ejb/Publisher"); _publisherHome = (IPublisherHome) PortableRemoteObject.narrow(raw, IPublisherHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Publisher home interface.", e); } } return _publisherHome; } private IBookHome getBookHome() { if (_bookHome == null) { try { Object raw = _environment.lookup("ejb/Book"); _bookHome = (IBookHome) PortableRemoteObject.narrow(raw, IBookHome.class); } catch (NamingException e) { throw new XEJBException("Could not lookup Book home interface.", e); } } return _bookHome; } /** * Gets a new connection from the data source. * **/ protected Connection getConnection() { try { return _dataSource.getConnection(); } catch (SQLException e) { throw new XEJBException("Unable to get database connection from pool.", e); } } protected StatementAssembly buildBasePersonQuery() { StatementAssembly result; result = new StatementAssembly(); result.newLine("SELECT PERSON_ID, FIRST_NAME, LAST_NAME, EMAIL, "); result.newLine(" LOCKED_OUT, ADMIN, LAST_ACCESS"); result.newLine("FROM PERSON"); return result; } /** * Translates the next row from the result set into a {@link Person}. * * <p>This works with queries generated by {@link #buildBasePersonQuery()}. * **/ protected Person convertRowToPerson(ResultSet set, Object[] columns) throws SQLException { int column = 1; columns[Person.ID_COLUMN] = set.getObject(column++); columns[Person.FIRST_NAME_COLUMN] = set.getString(column++); columns[Person.LAST_NAME_COLUMN] = set.getString(column++); columns[Person.EMAIL_COLUMN] = set.getString(column++); columns[Person.LOCKED_OUT_COLUMN] = getBoolean(set, column++); columns[Person.ADMIN_COLUMN] = getBoolean(set, column++); columns[Person.LAST_ACCESS_COLUMN] = set.getTimestamp(column++); return new Person(columns); } private Boolean getBoolean(ResultSet set, int index) throws SQLException { return set.getBoolean(index) ? Boolean.TRUE : Boolean.FALSE; } private void validateUniquePerson(String firstName, String lastName, String email) throws RegistrationException { Connection connection = null; IStatement statement = null; ResultSet set = null; String trimmedEmail = email.trim().toLowerCase(); String trimmedLastName = lastName.trim().toLowerCase(); String trimmedFirstName = firstName.trim().toLowerCase(); try { connection = getConnection(); StatementAssembly assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(EMAIL) = "); assembly.addParameter(trimmedEmail); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Email address is already in use by another user."); close(null, statement, set); assembly = new StatementAssembly(); assembly.newLine("SELECT PERSON_ID"); assembly.newLine("FROM PERSON"); assembly.newLine("WHERE "); assembly.add("LOWER(FIRST_NAME) = "); assembly.addParameter(trimmedFirstName); assembly.addSep(" AND "); assembly.add("LOWER(LAST_NAME) = "); assembly.addParameter(trimmedLastName); statement = assembly.createStatement(connection); set = statement.executeQuery(); if (set.next()) throw new RegistrationException("Name provided is already in use by another user."); } catch (SQLException e) { throw new RegistrationException("Could not access database: " + e.getMessage(), e); } finally { close(connection, statement, set); } } public Book returnBook(Integer bookId) throws RemoteException, FinderException { IBookHome bookHome = getBookHome(); IBook book = bookHome.findByPrimaryKey(bookId); Integer ownerPK = book.getOwnerId(); book.setHolderId(ownerPK); return getBook(bookId); } }
apache-2.0
niklasteichmann/gradoop
gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/transactional/function/package-info.java
802
/* * Copyright © 2014 - 2018 Leipzig University (Database Research Group) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Contains implementations graph pattern matching on a single input graph. */ package org.gradoop.flink.model.impl.operators.matching.transactional.function;
apache-2.0
marktriggs/nyu-sakai-10.4
scorm/scorm-tool/src/java/org/sakaiproject/scorm/ui/player/behaviors/SCORM13API.java
3425
package org.sakaiproject.scorm.ui.player.behaviors; import org.adl.api.ecmascript.SCORM13APIInterface; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.scorm.model.api.ScoBean; import org.sakaiproject.scorm.model.api.SessionBean; import org.sakaiproject.scorm.navigation.INavigable; import org.sakaiproject.scorm.navigation.INavigationEvent; import org.sakaiproject.scorm.service.api.ScormApplicationService; import org.sakaiproject.scorm.service.api.ScormSequencingService; public abstract class SCORM13API implements SCORM13APIInterface { private static Log log = LogFactory.getLog(SCORM13API.class); // String value of FALSE for JavaScript returns. protected static final String STRING_FALSE = "false"; // String value of TRUE for JavaScript returns. protected static final String STRING_TRUE = "true"; public abstract SessionBean getSessionBean(); public abstract ScormApplicationService getApplicationService(); public abstract ScormSequencingService getSequencingService(); public abstract ScoBean getScoBean(); public abstract INavigable getAgent(); public abstract Object getTarget(); // Implementation of SCORM13APIInterface public String Commit(String parameter) { // TODO: Disable UI controls -- or throttle them on server -- don't mess with js // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); } if (getApplicationService().commit(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; // TODO: Enable UI controls return result; } public String GetDiagnostic(String errorCode) { return getApplicationService().getDiagnostic(errorCode, getSessionBean()); } public String GetErrorString(String errorCode) { return getApplicationService().getErrorString(errorCode, getSessionBean()); } public String GetLastError() { return getApplicationService().getLastError(getSessionBean()); } public String GetValue(String parameter) { return getApplicationService().getValue(parameter, getSessionBean(), getScoBean()); } public String Initialize(String parameter) { // Assume failure String result = STRING_FALSE; if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean())) result = STRING_TRUE; return result; } public String SetValue(String dataModelElement, String value) { // Assume failure String result = STRING_FALSE; if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) { result = STRING_TRUE; } return result; } public String Terminate(String parameter) { // Assume failure String result = STRING_FALSE; if (null == getSessionBean()) { log.error("Null run state!"); return result; } INavigationEvent navigationEvent = getApplicationService().newNavigationEvent(); boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent, getSessionBean(), getScoBean()); if (isSuccessful) { result = STRING_TRUE; if (navigationEvent.isChoiceEvent()) { getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget()); } else { getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget()); } } return result; } }
apache-2.0
chicagozer/rheosoft
camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java
17444
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import junit.framework.TestCase; /** * @version */ public class CaseInsensitiveMapTest extends TestCase { public void testLookupCaseAgnostic() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); } public void testLookupCaseAgnosticAddHeader() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertEquals("beer", map.get("BAR")); assertNull(map.get("unknown")); } public void testLookupCaseAgnosticAddHeader2() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("BAR")); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertNull(map.get("unknown")); } public void testLookupCaseAgnosticAddHeaderRemoveHeader() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals("cheese", map.get("FOO")); assertNull(map.get("unknown")); map.put("bar", "beer"); assertEquals("beer", map.get("bar")); assertEquals("beer", map.get("Bar")); assertEquals("beer", map.get("BAR")); assertNull(map.get("unknown")); map.remove("bar"); assertNull(map.get("bar")); assertNull(map.get("unknown")); } public void testSetWithDifferentCase() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); map.put("Foo", "bar"); assertEquals("bar", map.get("FOO")); assertEquals("bar", map.get("foo")); assertEquals("bar", map.get("Foo")); } public void testRemoveWithDifferentCase() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); map.put("foo", "cheese"); map.put("Foo", "bar"); assertEquals("bar", map.get("FOO")); assertEquals("bar", map.get("foo")); assertEquals("bar", map.get("Foo")); map.remove("FOO"); assertEquals(null, map.get("foo")); assertEquals(null, map.get("Foo")); assertEquals(null, map.get("FOO")); assertTrue(map.isEmpty()); } public void testPutAll() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); Map<String, Object> other = new CaseInsensitiveMap(); other.put("Foo", "cheese"); other.put("bar", 123); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); // key case should be preserved Map<String, Object> keys = new HashMap<String, Object>(); keys.putAll(map); assertEquals("cheese", keys.get("Foo")); assertNull(keys.get("foo")); assertNull(keys.get("FOO")); assertEquals(123, keys.get("bar")); assertNull(keys.get("Bar")); assertNull(keys.get("BAR")); } public void testPutAllOther() { Map<String, Object> map = new CaseInsensitiveMap(); assertNull(map.get("foo")); Map<String, Object> other = new HashMap<String, Object>(); other.put("Foo", "cheese"); other.put("bar", 123); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); } public void testPutAllEmpty() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("foo", "cheese"); Map<String, Object> other = new HashMap<String, Object>(); map.putAll(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(1, map.size()); } public void testConstructFromOther() { Map<String, Object> other = new HashMap<String, Object>(); other.put("Foo", "cheese"); other.put("bar", 123); Map<String, Object> map = new CaseInsensitiveMap(other); assertEquals("cheese", map.get("FOO")); assertEquals("cheese", map.get("foo")); assertEquals("cheese", map.get("Foo")); assertEquals(123, map.get("BAR")); assertEquals(123, map.get("bar")); assertEquals(123, map.get("BaR")); } public void testKeySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", 123); map.put("baZ", "beer"); Set keys = map.keySet(); // we should be able to lookup no matter what case assertTrue(keys.contains("Foo")); assertTrue(keys.contains("foo")); assertTrue(keys.contains("FOO")); assertTrue(keys.contains("BAR")); assertTrue(keys.contains("bar")); assertTrue(keys.contains("Bar")); assertTrue(keys.contains("baZ")); assertTrue(keys.contains("baz")); assertTrue(keys.contains("Baz")); assertTrue(keys.contains("BAZ")); } public void testRetainKeysCopyToAnotherMap() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", 123); map.put("baZ", "beer"); Map<String, Object> other = new HashMap<String, Object>(map); // we should retain the cases of the original keys // when its copied to another map assertTrue(other.containsKey("Foo")); assertFalse(other.containsKey("foo")); assertFalse(other.containsKey("FOO")); assertTrue(other.containsKey("BAR")); assertFalse(other.containsKey("bar")); assertFalse(other.containsKey("Bar")); assertTrue(other.containsKey("baZ")); assertFalse(other.containsKey("baz")); assertFalse(other.containsKey("Baz")); assertFalse(other.containsKey("BAZ")); } public void testValues() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "123"); map.put("baZ", "Beer"); Iterator it = map.values().iterator(); // should be String values assertEquals("String", it.next().getClass().getSimpleName()); assertEquals("String", it.next().getClass().getSimpleName()); assertEquals("String", it.next().getClass().getSimpleName()); Collection values = map.values(); assertEquals(3, values.size()); assertTrue(values.contains("cheese")); assertTrue(values.contains("123")); assertTrue(values.contains("Beer")); } public void testRomeks() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("foo", "cheese"); assertEquals(1, map.size()); assertEquals("cheese", map.get("fOo")); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals(true, map.keySet().contains("FOO")); map.put("FOO", "cake"); assertEquals(1, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals("cake", map.get("fOo")); } public void testRomeksUsingRegularHashMap() { Map<String, Object> map = new HashMap<String, Object>(); map.put("foo", "cheese"); assertEquals(1, map.size()); assertEquals(null, map.get("fOo")); assertEquals(true, map.containsKey("foo")); assertEquals(false, map.containsKey("FOO")); assertEquals(false, map.keySet().contains("FOO")); map.put("FOO", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); assertEquals(null, map.get("fOo")); assertEquals("cheese", map.get("foo")); assertEquals("cake", map.get("FOO")); } public void testRomeksTransferredToHashMapAfterwards() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("FOO", "cake"); assertEquals(1, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("FOO")); Map<String, Object> other = new HashMap<String, Object>(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("FOO")); assertEquals(1, other.size()); } public void testSerialization() throws Exception { CaseInsensitiveMap testMap = new CaseInsensitiveMap(); testMap.put("key", "value"); // force entry set to be created which could cause the map to be non serializable testMap.entrySet(); ByteArrayOutputStream bStream = new ByteArrayOutputStream(); ObjectOutputStream objStream = new ObjectOutputStream(bStream); objStream.writeObject(testMap); ObjectInputStream inStream = new ObjectInputStream(new ByteArrayInputStream(bStream.toByteArray())); CaseInsensitiveMap testMapCopy = (CaseInsensitiveMap) inStream.readObject(); assertTrue(testMapCopy.containsKey("key")); } public void testCopyToAnotherMapPreserveKeyCaseEntrySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); for (Map.Entry<String, Object> entry : map.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); other.put(key, value); } assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyCasePutAll() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); other.putAll(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyCaseCtr() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(map); assertEquals(false, other.containsKey("foo")); assertEquals(true, other.containsKey("Foo")); assertEquals(false, other.containsKey("bar")); assertEquals(true, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testCopyToAnotherMapPreserveKeyKeySet() { Map<String, Object> map = new CaseInsensitiveMap(); map.put("Foo", "cheese"); map.put("BAR", "cake"); assertEquals(2, map.size()); assertEquals(true, map.containsKey("foo")); assertEquals(true, map.containsKey("bar")); Map<String, Object> other = new HashMap<String, Object>(); // this is wrong!!! you should use entrySet for (String key : map.keySet()) { Object value = map.get(key); other.put(key, value); } // now the keys will be in lower case assertEquals(true, other.containsKey("foo")); assertEquals(false, other.containsKey("Foo")); assertEquals(true, other.containsKey("bar")); assertEquals(false, other.containsKey("BAR")); assertEquals(2, other.size()); } public void testConcurrent() throws Exception { ExecutorService service = Executors.newFixedThreadPool(5); final CountDownLatch latch = new CountDownLatch(1000); final Map<String, Object> map = new CaseInsensitiveMap(); // do some stuff concurrently for (int i = 0; i < 1000; i++) { final int count = i; service.submit(new Runnable() { public void run() { Map<String, Object> foo = new CaseInsensitiveMap(); foo.put("counter" + count, count); foo.put("foo", 123); foo.put("bar", 456); foo.put("cake", "cheese"); // copy foo to map as map is a shared resource map.putAll(foo); latch.countDown(); } }); } latch.await(10, TimeUnit.SECONDS); assertEquals(1003, map.size()); assertEquals(true, map.containsKey("counter0")); assertEquals(true, map.containsKey("counter500")); assertEquals(true, map.containsKey("counter999")); assertEquals(123, map.get("FOO")); assertEquals(456, map.get("Bar")); assertEquals("cheese", map.get("cAKe")); service.shutdownNow(); } public void testCopyMapWithCamelHeadersTest() throws Exception { Map<String, Object> map = new CaseInsensitiveMap(); map.put("CamelA", "A"); map.put("CamelB", "B"); map.put("CamelC", "C"); // retain maps so we can profile that the map doesn't duplicate // camel keys as they are intern List<Map> maps = new ArrayList<Map>(); for (int i = 0; i < 10000; i++) { Map<String, Object> copy = new CaseInsensitiveMap(map); assertEquals(3, copy.size()); assertEquals("A", copy.get("CamelA")); assertEquals("B", copy.get("CamelB")); assertEquals("C", copy.get("CamelC")); maps.add(copy); } assertEquals(10000, maps.size()); assertEquals(3, map.size()); assertEquals("A", map.get("CamelA")); assertEquals("B", map.get("CamelB")); assertEquals("C", map.get("CamelC")); // use a memory profiler to see memory allocation // often you may want to give it time to run so you // have chance to capture memory snapshot in profiler // Thread.sleep(9999999); } }
apache-2.0
fcamblor/dbmaintain-maven-plugin
dbmaintain/src/main/java/org/dbmaintain/script/parser/impl/DefaultScriptParser.java
5339
/* * Copyright DbMaintain.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dbmaintain.script.parser.impl; import org.dbmaintain.script.parser.ScriptParser; import org.dbmaintain.script.parser.parsingstate.ParsingState; import org.dbmaintain.util.DbMaintainException; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.util.Properties; /** * A class for parsing statements out of sql scripts. * <p/> * All statements should be separated with a semicolon (;). The last statement will be * added even if it does not end with a semicolon. The semicolons will not be included in the returned statements. * <p/> * This parser also takes quoted literals, double quoted text and in-line (--comment) and block (/ * comment * /) * into account when parsing the statements. * * @author Tim Ducheyne * @author Filip Neven * @author Stefan Bangels */ public class DefaultScriptParser implements ScriptParser { /** * The reader for the script content stream */ protected Reader scriptReader; /** * Whether backslash escaping is enabled */ protected boolean backSlashEscapingEnabled; /** * Parameters that must be replaced in the script. Null if there are no such parameters */ protected Properties scriptParameters; /** * The starting state */ protected ParsingState initialParsingState; /** * True if the script has ended */ protected boolean endOfScriptReached = false; /** * The current parsed character */ protected Character currentChar, nextChar; /** * Constructor for DefaultScriptParser. * * @param scriptReader the reader that will provide the script content, not null * @param initialParsingState the inial state when starting to parse a script, not null * @param backSlashEscapingEnabled true if backslash escaping is enabled * @param scriptParameters parameters that must be replaced in the script. null if there are no such parameters. */ public DefaultScriptParser(Reader scriptReader, ParsingState initialParsingState, boolean backSlashEscapingEnabled, Properties scriptParameters) { this.scriptReader = scriptReader; this.backSlashEscapingEnabled = backSlashEscapingEnabled; this.initialParsingState = initialParsingState; this.scriptParameters = scriptParameters; this.scriptReader = new BufferedReader(scriptReader); } /** * Parses the next statement out of the given script stream. * * @return the statements, null if no more statements */ public String getNextStatement() { try { return getNextStatementImpl(); } catch (IOException e) { throw new DbMaintainException("Unable to parse next statement from script.", e); } } /** * Actual implementation of getNextStatement. * * @return the statements, null if no more statements * @throws IOException if a problem occurs reading the script from the file system */ protected String getNextStatementImpl() throws IOException { StatementBuilder statementBuilder = createStatementBuilder(); // Make sure that we read currentChar when we start reading a new script. If not null, currentChar was already // set to the first character of the next statement when we read the previous statement. if (currentChar == null) { currentChar = readNextCharacter(); } while (!endOfScriptReached) { if (currentChar == null) { endOfScriptReached = true; } nextChar = readNextCharacter(); statementBuilder.addCharacter(currentChar, nextChar); currentChar = nextChar; if (statementBuilder.isComplete()) { if (statementBuilder.hasExecutableContent()) { return statementBuilder.buildStatement(); } statementBuilder = createStatementBuilder(); } } if (!statementBuilder.isComplete() && statementBuilder.hasExecutableContent()) { throw new DbMaintainException("Last statement in script was not ended correctly."); } return null; } protected Character readNextCharacter() throws IOException { int charAsInt = scriptReader.read(); return charAsInt == -1 ? null : (char) charAsInt; } /** * Factory method for the statement builder. * * @return The statement builder, not null */ protected StatementBuilder createStatementBuilder() { return new StatementBuilder(initialParsingState, scriptParameters); } }
apache-2.0
ptupitsyn/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformProcessorImpl.java
29379
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteAtomicSequence; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.BaselineNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.configuration.PlatformConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.binary.BinaryRawReaderEx; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.cluster.DetachedClusterNode; import org.apache.ignite.internal.logger.platform.PlatformLogger; import org.apache.ignite.internal.processors.GridProcessorAdapter; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl; import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl; import org.apache.ignite.internal.processors.platform.binary.PlatformBinaryProcessor; import org.apache.ignite.internal.processors.platform.cache.PlatformCache; import org.apache.ignite.internal.processors.platform.cache.PlatformCacheExtension; import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity; import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore; import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup; import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicReference; import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicSequence; import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore; import org.apache.ignite.internal.processors.platform.memory.PlatformMemory; import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream; import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions; import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.FALSE; import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.TRUE; import static org.apache.ignite.internal.processors.platform.client.ClientConnectionContext.CURRENT_VER; /** * GridGain platform processor. */ @SuppressWarnings({"unchecked"}) public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor, PlatformTarget { /** */ private static final int OP_GET_CACHE = 1; /** */ private static final int OP_CREATE_CACHE = 2; /** */ private static final int OP_GET_OR_CREATE_CACHE = 3; /** */ private static final int OP_CREATE_CACHE_FROM_CONFIG = 4; /** */ private static final int OP_GET_OR_CREATE_CACHE_FROM_CONFIG = 5; /** */ private static final int OP_DESTROY_CACHE = 6; /** */ private static final int OP_GET_AFFINITY = 7; /** */ private static final int OP_GET_DATA_STREAMER = 8; /** */ private static final int OP_GET_TRANSACTIONS = 9; /** */ private static final int OP_GET_CLUSTER_GROUP = 10; /** */ private static final int OP_GET_EXTENSION = 11; /** */ private static final int OP_GET_ATOMIC_LONG = 12; /** */ private static final int OP_GET_ATOMIC_REFERENCE = 13; /** */ private static final int OP_GET_ATOMIC_SEQUENCE = 14; /** */ private static final int OP_GET_IGNITE_CONFIGURATION = 15; /** */ private static final int OP_GET_CACHE_NAMES = 16; /** */ private static final int OP_CREATE_NEAR_CACHE = 17; /** */ private static final int OP_GET_OR_CREATE_NEAR_CACHE = 18; /** */ private static final int OP_LOGGER_IS_LEVEL_ENABLED = 19; /** */ private static final int OP_LOGGER_LOG = 20; /** */ private static final int OP_GET_BINARY_PROCESSOR = 21; /** */ private static final int OP_RELEASE_START = 22; /** */ private static final int OP_ADD_CACHE_CONFIGURATION = 23; /** */ private static final int OP_SET_BASELINE_TOPOLOGY_VER = 24; /** */ private static final int OP_SET_BASELINE_TOPOLOGY_NODES = 25; /** */ private static final int OP_GET_BASELINE_TOPOLOGY = 26; /** */ private static final int OP_DISABLE_WAL = 27; /** */ private static final int OP_ENABLE_WAL = 28; /** */ private static final int OP_IS_WAL_ENABLED = 29; /** */ private static final int OP_SET_TX_TIMEOUT_ON_PME = 30; /** Start latch. */ private final CountDownLatch startLatch = new CountDownLatch(1); /** Stores pending initialization. */ private final Collection<StoreInfo> pendingStores = Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>()); /** Lock for store lifecycle operations. */ private final ReadWriteLock storeLock = new ReentrantReadWriteLock(); /** Logger. */ @SuppressWarnings("FieldCanBeLocal") private final IgniteLogger log; /** Context. */ private final PlatformContext platformCtx; /** Interop configuration. */ private final PlatformConfigurationEx interopCfg; /** Extensions. */ private final PlatformPluginExtension[] extensions; /** Whether processor is started. */ private boolean started; /** Whether processor if stopped (or stopping). */ private volatile boolean stopped; /** Cache extensions. */ private final PlatformCacheExtension[] cacheExts; /** Cluster restart flag for the reconnect callback. */ private volatile boolean clusterRestarted; /** * Constructor. * * @param ctx Kernal context. */ public PlatformProcessorImpl(GridKernalContext ctx) { super(ctx); log = ctx.log(PlatformProcessorImpl.class); PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration(); assert interopCfg0 != null : "Must be checked earlier during component creation."; if (!(interopCfg0 instanceof PlatformConfigurationEx)) throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName()); interopCfg = (PlatformConfigurationEx)interopCfg0; if (!F.isEmpty(interopCfg.warnings())) { for (String w : interopCfg.warnings()) U.warn(log, w); } platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory(), interopCfg.platform()); // Initialize cache extensions (if any). cacheExts = prepareCacheExtensions(interopCfg.cacheExtensions()); if (interopCfg.logger() != null) interopCfg.logger().setContext(platformCtx); // Initialize extensions (if any). extensions = prepareExtensions(ctx.plugins().extensions(PlatformPluginExtension.class)); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { try (PlatformMemory mem = platformCtx.memory().allocate()) { PlatformOutputStream out = mem.output(); BinaryRawWriterEx writer = platformCtx.writer(out); writer.writeString(ctx.igniteInstanceName()); out.synchronize(); platformCtx.gateway().onStart(new PlatformTargetProxyImpl(this, platformCtx), mem.pointer()); } // At this moment all necessary native libraries must be loaded, so we can process with store creation. storeLock.writeLock().lock(); try { for (StoreInfo store : pendingStores) registerStore0(store.store, store.convertBinary); pendingStores.clear(); started = true; } finally { storeLock.writeLock().unlock(); } // Add Interop node attributes. ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform()); } /** {@inheritDoc} */ @Override public void onKernalStop(boolean cancel) { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { if (platformCtx != null) { stopped = true; platformCtx.gateway().onStop(); } } /** {@inheritDoc} */ @Override public Ignite ignite() { return ctx.grid(); } /** {@inheritDoc} */ @Override public long environmentPointer() { return platformCtx.gateway().environmentPointer(); } /** {@inheritDoc} */ @Override public void releaseStart() { startLatch.countDown(); } /** {@inheritDoc} */ @Override public void awaitStart() throws IgniteCheckedException { U.await(startLatch); } /** {@inheritDoc} */ @Override public PlatformContext context() { return platformCtx; } /** {@inheritDoc} */ @Override public void registerStore(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { storeLock.readLock().lock(); try { if (stopped) throw new IgniteCheckedException("Failed to initialize interop store because node is stopping: " + store); if (started) registerStore0(store, convertBinary); else pendingStores.add(new StoreInfo(store, convertBinary)); } finally { storeLock.readLock().unlock(); } } /** {@inheritDoc} */ @Override public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException { platformCtx.gateway().onClientDisconnected(); // 1) onReconnected is called on all grid components. // 2) After all of grid components have completed their reconnection, reconnectFut is completed. reconnectFut.listen(new CI1<IgniteFuture<?>>() { @Override public void apply(IgniteFuture<?> future) { platformCtx.gateway().onClientReconnected(clusterRestarted); } }); } /** {@inheritDoc} */ @Override public IgniteInternalFuture<?> onReconnected(boolean clusterRestarted) throws IgniteCheckedException { // Save the flag value for callback of reconnectFut. this.clusterRestarted = clusterRestarted; return null; } /** * Creates new platform cache. */ private PlatformTarget createPlatformCache(IgniteCacheProxy cache) { assert cache != null; return new PlatformCache(platformCtx, cache, false, cacheExts); } /** * Checks whether logger level is enabled. * * @param level Level. * @return Result. */ private boolean loggerIsLevelEnabled(int level) { IgniteLogger log = ctx.grid().log(); switch (level) { case PlatformLogger.LVL_TRACE: return log.isTraceEnabled(); case PlatformLogger.LVL_DEBUG: return log.isDebugEnabled(); case PlatformLogger.LVL_INFO: return log.isInfoEnabled(); case PlatformLogger.LVL_WARN: return true; case PlatformLogger.LVL_ERROR: return true; default: assert false; } return false; } /** * Logs to the Ignite logger. * * @param level Level. * @param message Message. * @param category Category. * @param errorInfo Exception. */ private void loggerLog(int level, String message, String category, String errorInfo) { IgniteLogger log = ctx.grid().log(); if (category != null) log = log.getLogger(category); Throwable err = errorInfo == null ? null : new IgniteException("Platform error:" + errorInfo); switch (level) { case PlatformLogger.LVL_TRACE: log.trace(message); break; case PlatformLogger.LVL_DEBUG: log.debug(message); break; case PlatformLogger.LVL_INFO: log.info(message); break; case PlatformLogger.LVL_WARN: log.warning(message, err); break; case PlatformLogger.LVL_ERROR: log.error(message, err); break; default: assert false; } } /** {@inheritDoc} */ @Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException { switch (type) { case OP_LOGGER_IS_LEVEL_ENABLED: { return loggerIsLevelEnabled((int) val) ? TRUE : FALSE; } case OP_RELEASE_START: { releaseStart(); return 0; } case OP_SET_BASELINE_TOPOLOGY_VER: { ctx.grid().cluster().setBaselineTopology(val); return 0; } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_DESTROY_CACHE: { ctx.grid().destroyCache(reader.readString()); return 0; } case OP_LOGGER_LOG: { loggerLog(reader.readInt(), reader.readString(), reader.readString(), reader.readString()); return 0; } case OP_SET_BASELINE_TOPOLOGY_NODES: { int cnt = reader.readInt(); Collection<BaselineNode> nodes = new ArrayList<>(cnt); for (int i = 0; i < cnt; i++) { Object consId = reader.readObjectDetached(); Map<String, Object> attrs = PlatformUtils.readNodeAttributes(reader); nodes.add(new DetachedClusterNode(consId, attrs)); } ctx.grid().cluster().setBaselineTopology(nodes); return 0; } case OP_ADD_CACHE_CONFIGURATION: CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); ctx.grid().addCacheConfiguration(cfg); return 0; case OP_DISABLE_WAL: ctx.grid().cluster().disableWal(reader.readString()); return 0; case OP_ENABLE_WAL: ctx.grid().cluster().enableWal(reader.readString()); return 0; case OP_SET_TX_TIMEOUT_ON_PME: ctx.grid().cluster().setTxTimeoutOnPartitionMapExchange(reader.readLong()); return 0; case OP_IS_WAL_ENABLED: return ctx.grid().cluster().isWalEnabled(reader.readString()) ? TRUE : FALSE; } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader, PlatformMemory mem) throws IgniteCheckedException { return processInStreamOutLong(type, reader); } /** {@inheritDoc} */ @Override public void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException { PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processInStreamOutObject(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { switch (type) { case OP_GET_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name); if (cache == null) throw new IllegalArgumentException("Cache doesn't exist: " + name); return createPlatformCache(cache); } case OP_CREATE_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name); return createPlatformCache(cache); } case OP_GET_OR_CREATE_CACHE: { String name = reader.readString(); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name); return createPlatformCache(cache); } case OP_CREATE_CACHE_FROM_CONFIG: { CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().createCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().createCache(cfg); return createPlatformCache(cache); } case OP_GET_OR_CREATE_CACHE_FROM_CONFIG: { CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER); IgniteCacheProxy cache = reader.readBoolean() ? (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader)) : (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg); return createPlatformCache(cache); } case OP_GET_AFFINITY: { return new PlatformAffinity(platformCtx, ctx, reader.readString()); } case OP_GET_DATA_STREAMER: { String cacheName = reader.readString(); boolean keepBinary = reader.readBoolean(); IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName); ldr.keepBinary(true); return new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary); } case OP_GET_EXTENSION: { int id = reader.readInt(); if (extensions != null && id < extensions.length) { PlatformPluginExtension ext = extensions[id]; if (ext != null) { return ext.createTarget(); } } throw new IgniteException("Platform extension is not registered [id=" + id + ']'); } case OP_GET_ATOMIC_LONG: { String name = reader.readString(); long initVal = reader.readLong(); boolean create = reader.readBoolean(); GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create); if (atomicLong == null) return null; return new PlatformAtomicLong(platformCtx, atomicLong); } case OP_GET_ATOMIC_REFERENCE: { String name = reader.readString(); Object initVal = reader.readObjectDetached(); boolean create = reader.readBoolean(); return PlatformAtomicReference.createInstance(platformCtx, name, initVal, create); } case OP_GET_ATOMIC_SEQUENCE: { String name = reader.readString(); long initVal = reader.readLong(); boolean create = reader.readBoolean(); IgniteAtomicSequence atomicSeq = ignite().atomicSequence(name, initVal, create); if (atomicSeq == null) return null; return new PlatformAtomicSequence(platformCtx, atomicSeq); } case OP_CREATE_NEAR_CACHE: { String cacheName = reader.readString(); NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createNearCache(cacheName, cfg); return createPlatformCache(cache); } case OP_GET_OR_CREATE_NEAR_CACHE: { String cacheName = reader.readString(); NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader); IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateNearCache(cacheName, cfg); return createPlatformCache(cache); } case OP_GET_TRANSACTIONS: { String lbl = reader.readString(); return new PlatformTransactions(platformCtx, lbl); } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processInObjectStreamOutObjectStream(int type, @Nullable PlatformTarget arg, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException { return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException { switch (type) { case OP_GET_IGNITE_CONFIGURATION: { PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration(), CURRENT_VER); return; } case OP_GET_CACHE_NAMES: { Collection<String> names = ignite().cacheNames(); writer.writeInt(names.size()); for (String name : names) writer.writeString(name); return; } case OP_GET_BASELINE_TOPOLOGY: { Collection<BaselineNode> blt = ignite().cluster().currentBaselineTopology(); writer.writeInt(blt.size()); for (BaselineNode n : blt) { writer.writeObjectDetached(n.consistentId()); PlatformUtils.writeNodeAttributes(writer, n.attributes()); } return; } } PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformTarget processOutObject(int type) throws IgniteCheckedException { switch (type) { case OP_GET_TRANSACTIONS: return new PlatformTransactions(platformCtx); case OP_GET_CLUSTER_GROUP: return new PlatformClusterGroup(platformCtx, ctx.grid().cluster()); case OP_GET_BINARY_PROCESSOR: { return new PlatformBinaryProcessor(platformCtx); } } return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public PlatformAsyncResult processInStreamAsync(int type, BinaryRawReaderEx reader) throws IgniteCheckedException { return PlatformAbstractTarget.throwUnsupported(type); } /** {@inheritDoc} */ @Override public Exception convertException(Exception e) { return e; } /** * Internal store initialization routine. * * @param store Store. * @param convertBinary Convert binary flag. * @throws IgniteCheckedException If failed. */ private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException { if (store instanceof PlatformDotNetCacheStore) { PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store; store0.initialize(ctx, convertBinary); } else throw new IgniteCheckedException("Unsupported interop store: " + store); } /** * Prepare cache extensions. * * @param cacheExts Original extensions. * @return Prepared extensions. */ private static PlatformCacheExtension[] prepareCacheExtensions(Collection<PlatformCacheExtension> cacheExts) { if (!F.isEmpty(cacheExts)) { int maxExtId = 0; Map<Integer, PlatformCacheExtension> idToExt = new HashMap<>(); for (PlatformCacheExtension cacheExt : cacheExts) { if (cacheExt == null) throw new IgniteException("Platform cache extension cannot be null."); if (cacheExt.id() < 0) throw new IgniteException("Platform cache extension ID cannot be negative: " + cacheExt); PlatformCacheExtension oldCacheExt = idToExt.put(cacheExt.id(), cacheExt); if (oldCacheExt != null) throw new IgniteException("Platform cache extensions cannot have the same ID [" + "id=" + cacheExt.id() + ", first=" + oldCacheExt + ", second=" + cacheExt + ']'); if (cacheExt.id() > maxExtId) maxExtId = cacheExt.id(); } PlatformCacheExtension[] res = new PlatformCacheExtension[maxExtId + 1]; for (PlatformCacheExtension cacheExt : cacheExts) res[cacheExt.id()]= cacheExt; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformCacheExtension[0]; } /** * Prepare extensions. * * @param exts Original extensions. * @return Prepared extensions. */ private static PlatformPluginExtension[] prepareExtensions(PlatformPluginExtension[] exts) { if (!F.isEmpty(exts)) { int maxExtId = 0; Map<Integer, PlatformPluginExtension> idToExt = new HashMap<>(); for (PlatformPluginExtension ext : exts) { if (ext == null) throw new IgniteException("Platform extension cannot be null."); if (ext.id() < 0) throw new IgniteException("Platform extension ID cannot be negative: " + ext); PlatformPluginExtension oldCacheExt = idToExt.put(ext.id(), ext); if (oldCacheExt != null) throw new IgniteException("Platform extensions cannot have the same ID [" + "id=" + ext.id() + ", first=" + oldCacheExt + ", second=" + ext + ']'); if (ext.id() > maxExtId) maxExtId = ext.id(); } PlatformPluginExtension[] res = new PlatformPluginExtension[maxExtId + 1]; for (PlatformPluginExtension ext : exts) res[ext.id()]= ext; return res; } else //noinspection ZeroLengthArrayAllocation return new PlatformPluginExtension[0]; } /** * Store and manager pair. */ private static class StoreInfo { /** Store. */ private final PlatformCacheStore store; /** Convert binary flag. */ private final boolean convertBinary; /** * Constructor. * * @param store Store. * @param convertBinary Convert binary flag. */ private StoreInfo(PlatformCacheStore store, boolean convertBinary) { this.store = store; this.convertBinary = convertBinary; } } }
apache-2.0
ChrisCanCompute/assertj-core
src/main/java/org/assertj/core/util/diff/DeleteDelta.java
1820
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2017 the original author or authors. */ package org.assertj.core.util.diff; import java.util.List; /** * Initially copied from https://code.google.com/p/java-diff-utils/. * <p> * Describes the delete-delta between original and revised texts. * * @author <a href="dm.naumenko@gmail.com">Dmitry Naumenko</a> * @param <T> The type of the compared elements in the 'lines'. */ public class DeleteDelta<T> extends Delta<T> { /** * Creates a change delta with the two given chunks. * * @param original * The original chunk. Must not be {@code null}. * @param revised * The original chunk. Must not be {@code null}. */ public DeleteDelta(Chunk<T> original, Chunk<T> revised) { super(original, revised); } /** * {@inheritDoc} */ @Override public void applyTo(List<T> target) throws IllegalStateException { verify(target); int position = getOriginal().getPosition(); int size = getOriginal().size(); for (int i = 0; i < size; i++) { target.remove(position); } } @Override public TYPE getType() { return Delta.TYPE.DELETE; } @Override public void verify(List<T> target) throws IllegalStateException { getOriginal().verify(target); } }
apache-2.0
Fabryprog/camel
components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeJmsMapMessageTest.java
4265
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.util.HashMap; import java.util.Map; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.Session; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Before; import org.junit.Test; import org.springframework.jms.core.JmsTemplate; import org.springframework.jms.core.MessageCreator; import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; public class ConsumeJmsMapMessageTest extends CamelTestSupport { protected JmsTemplate jmsTemplate; private MockEndpoint endpoint; @Test public void testConsumeMapMessage() throws Exception { endpoint.expectedMessageCount(1); jmsTemplate.setPubSubDomain(false); jmsTemplate.send("test.map", new MessageCreator() { public Message createMessage(Session session) throws JMSException { MapMessage mapMessage = session.createMapMessage(); mapMessage.setString("foo", "abc"); mapMessage.setString("bar", "xyz"); return mapMessage; } }); endpoint.assertIsSatisfied(); assertCorrectMapReceived(); } protected void assertCorrectMapReceived() { Exchange exchange = endpoint.getReceivedExchanges().get(0); // This should be a JMS Exchange assertNotNull(ExchangeHelper.getBinding(exchange, JmsBinding.class)); JmsMessage in = (JmsMessage) exchange.getIn(); assertNotNull(in); Map<?, ?> map = exchange.getIn().getBody(Map.class); log.info("Received map: " + map); assertNotNull("Should have received a map message!", map); assertIsInstanceOf(MapMessage.class, in.getJmsMessage()); assertEquals("map.foo", "abc", map.get("foo")); assertEquals("map.bar", "xyz", map.get("bar")); assertEquals("map.size", 2, map.size()); } @Test public void testSendMapMessage() throws Exception { endpoint.expectedMessageCount(1); Map<String, String> map = new HashMap<>(); map.put("foo", "abc"); map.put("bar", "xyz"); template.sendBody("direct:test", map); endpoint.assertIsSatisfied(); assertCorrectMapReceived(); } @Override @Before public void setUp() throws Exception { super.setUp(); endpoint = getMockEndpoint("mock:result"); } protected CamelContext createCamelContext() throws Exception { CamelContext camelContext = super.createCamelContext(); ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); jmsTemplate = new JmsTemplate(connectionFactory); camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory)); return camelContext; } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { from("activemq:test.map").to("mock:result"); from("direct:test").to("activemq:test.map"); } }; } }
apache-2.0
psiroky/optaplanner
optaplanner-core/src/main/java/org/optaplanner/core/impl/constructionheuristic/greedyFit/decider/ConstructionHeuristicPickEarlyType.java
779
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.constructionheuristic.greedyFit.decider; public enum ConstructionHeuristicPickEarlyType { NEVER, FIRST_LAST_STEP_SCORE_EQUAL_OR_IMPROVING; }
apache-2.0
dhalperi/beam
runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java
18014
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; import com.google.common.annotations.VisibleForTesting; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import org.apache.beam.runners.flink.metrics.FlinkMetricContainer; import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.sdk.values.ValueWithRecordId; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.StoppableFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.DefaultOperatorStateBackend; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source. */ public class UnboundedSourceWrapper< OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark> extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>> implements ProcessingTimeCallback, StoppableFunction, CheckpointListener, CheckpointedFunction { private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class); private final String stepName; /** * Keep the options so that we can initialize the localReaders. */ private final SerializedPipelineOptions serializedOptions; /** * For snapshot and restore. */ private final KvCoder< ? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder; /** * The split sources. We split them in the constructor to ensure that all parallel * sources are consistent about the split sources. */ private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources; /** * The local split sources. Assigned at runtime when the wrapper is executed in parallel. */ private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources; /** * The local split readers. Assigned at runtime when the wrapper is executed in parallel. * Make it a field so that we can access it in {@link #onProcessingTime(long)} for * emitting watermarks. */ private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders; /** * Flag to indicate whether the source is running. * Initialize here and not in run() to prevent races where we cancel a job before run() is * ever called or run() is called after cancel(). */ private volatile boolean isRunning = true; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new * triggers. */ private transient StreamingRuntimeContext runtimeContext; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting * watermarks. */ private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context; /** * Pending checkpoints which have not been acknowledged yet. */ private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints; /** * Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; private transient ListState<KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint; /** * false if checkpointCoder is null or no restore state by starting first. */ private transient boolean isRestored = false; @SuppressWarnings("unchecked") public UnboundedSourceWrapper( String stepName, PipelineOptions pipelineOptions, UnboundedSource<OutputT, CheckpointMarkT> source, int parallelism) throws Exception { this.stepName = stepName; this.serializedOptions = new SerializedPipelineOptions(pipelineOptions); if (source.requiresDeduping()) { LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source); } Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder(); if (checkpointMarkCoder == null) { LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots."); checkpointCoder = null; } else { Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder = (Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() { }); checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder); } // get the splits early. we assume that the generated splits are stable, // this is necessary so that the mapping of state to source is correct // when restoring splitSources = source.split(parallelism, pipelineOptions); } /** * Initialize and restore state before starting execution of the source. */ @Override public void open(Configuration parameters) throws Exception { runtimeContext = (StreamingRuntimeContext) getRuntimeContext(); // figure out which split sources we're responsible for int subtaskIndex = runtimeContext.getIndexOfThisSubtask(); int numSubtasks = runtimeContext.getNumberOfParallelSubtasks(); localSplitSources = new ArrayList<>(); localReaders = new ArrayList<>(); pendingCheckpoints = new LinkedHashMap<>(); if (isRestored) { // restore the splitSources from the checkpoint to ensure consistent ordering for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored: stateForCheckpoint.get()) { localSplitSources.add(restored.getKey()); localReaders.add(restored.getKey().createReader( serializedOptions.getPipelineOptions(), restored.getValue())); } } else { // initialize localReaders and localSources from scratch for (int i = 0; i < splitSources.size(); i++) { if (i % numSubtasks == subtaskIndex) { UnboundedSource<OutputT, CheckpointMarkT> source = splitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = source.createReader(serializedOptions.getPipelineOptions(), null); localSplitSources.add(source); localReaders.add(reader); } } } LOG.info("Unbounded Flink Source {}/{} is reading from sources: {}", subtaskIndex, numSubtasks, localSplitSources); } @Override public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception { context = ctx; FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker = new ReaderInvocationUtil<>( stepName, serializedOptions.getPipelineOptions(), metricContainer); if (localReaders.size() == 0) { // do nothing, but still look busy ... // also, output a Long.MAX_VALUE watermark since we know that we're not // going to emit anything // we can't return here since Flink requires that all operators stay up, // otherwise checkpointing would not work correctly anymore ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); // wait until this is canceled final Object waitLock = new Object(); while (isRunning) { try { // Flink will interrupt us at some point //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (waitLock) { // don't wait indefinitely, in case something goes horribly wrong waitLock.wait(1000); } } catch (InterruptedException e) { if (!isRunning) { // restore the interrupted state, and fall through the loop Thread.currentThread().interrupt(); } } } } else if (localReaders.size() == 1) { // the easy case, we just read from one reader UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0); boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } setNextWatermarkTimer(this.runtimeContext); while (isRunning) { dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); } else { Thread.sleep(50); } } } else { // a bit more complicated, we are responsible for several localReaders // loop through them and sleep if none of them had any data int numReaders = localReaders.size(); int currentReader = 0; // start each reader and emit data if immediately available for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } } // a flag telling us whether any of the localReaders had data // if no reader had data, sleep for bit boolean hadData = false; while (isRunning) { UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader); boolean dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); hadData = true; } currentReader = (currentReader + 1) % numReaders; if (currentReader == 0 && !hadData) { Thread.sleep(50); } else if (currentReader == 0) { hadData = false; } } } } /** * Emit the current element from the given Reader. The reader is guaranteed to have data. */ private void emitElement( SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx, UnboundedSource.UnboundedReader<OutputT> reader) { // make sure that reader state update and element emission are atomic // with respect to snapshots synchronized (ctx.getCheckpointLock()) { OutputT item = reader.getCurrent(); byte[] recordId = reader.getCurrentRecordId(); Instant timestamp = reader.getCurrentTimestamp(); WindowedValue<ValueWithRecordId<OutputT>> windowedValue = WindowedValue.of(new ValueWithRecordId<>(item, recordId), timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING); ctx.collectWithTimestamp(windowedValue, timestamp.getMillis()); } } @Override public void close() throws Exception { super.close(); if (localReaders != null) { for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { reader.close(); } } } @Override public void cancel() { isRunning = false; } @Override public void stop() { isRunning = false; } // ------------------------------------------------------------------------ // Checkpoint and restore // ------------------------------------------------------------------------ @Override public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { if (!isRunning) { LOG.debug("snapshotState() called on closed source"); } else { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } stateForCheckpoint.clear(); long checkpointId = functionSnapshotContext.getCheckpointId(); // we checkpoint the sources along with the CheckpointMarkT to ensure // than we have a correct mapping of checkpoints to sources when // restoring List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size()); for (int i = 0; i < localSplitSources.size(); i++) { UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i); @SuppressWarnings("unchecked") CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark(); checkpointMarks.add(mark); KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv = KV.of(source, mark); stateForCheckpoint.add(kv); } // cleanup old pending checkpoints and add new checkpoint int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS; if (diff >= 0) { for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); diff >= 0; diff--) { iterator.next(); iterator.remove(); } } pendingCheckpoints.put(checkpointId, checkpointMarks); } } @Override public void initializeState(FunctionInitializationContext context) throws Exception { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } OperatorStateStore stateStore = context.getOperatorStateStore(); CoderTypeInformation< KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder); stateForCheckpoint = stateStore.getOperatorState( new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME, typeInformation.createSerializer(new ExecutionConfig()))); if (context.isRestored()) { isRestored = true; LOG.info("Having restore state in the UnbounedSourceWrapper."); } else { LOG.info("No restore state for UnbounedSourceWrapper."); } } @Override public void onProcessingTime(long timestamp) throws Exception { if (this.isRunning) { synchronized (context.getCheckpointLock()) { // find minimum watermark over all localReaders long watermarkMillis = Long.MAX_VALUE; for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) { Instant watermark = reader.getWatermark(); if (watermark != null) { watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis); } } context.emitWatermark(new Watermark(watermarkMillis)); } setNextWatermarkTimer(this.runtimeContext); } } private void setNextWatermarkTimer(StreamingRuntimeContext runtime) { if (this.isRunning) { long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval(); long timeToNextWatermark = getTimeToNextWatermark(watermarkInterval); runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this); } } private long getTimeToNextWatermark(long watermarkInterval) { return System.currentTimeMillis() + watermarkInterval; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() { return splitSources; } /** * Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() { return localSplitSources; } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId); if (checkpointMarks != null) { // remove old checkpoints including the current one Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); long currentId; do { currentId = iterator.next(); iterator.remove(); } while (currentId != checkpointId); // confirm all marks for (CheckpointMarkT mark : checkpointMarks) { mark.finalizeCheckpoint(); } } } }
apache-2.0
susinda/product-mss
carbon-mss/components/org.wso2.carbon.mss/src/test/java/org/wso2/carbon/mss/internal/mime/MimeMapperTest.java
1232
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.mss.internal.mime; import org.junit.Assert; import org.junit.Test; /** * Test the functionality of MimeMapper */ public class MimeMapperTest { @Test public void testMimeMappingForKnownExtension() throws MimeMappingException { String mimeType = MimeMapper.getMimeType("png"); Assert.assertEquals("image/png", mimeType); } @Test(expected = MimeMappingException.class) public void testMimeMappingForUnknownExtension() throws MimeMappingException { MimeMapper.getMimeType("unknownext"); } }
apache-2.0
dangdangdotcom/elastic-job
elasticjob-lite/elasticjob-lite-spring/elasticjob-lite-spring-namespace/src/test/java/org/apache/shardingsphere/elasticjob/lite/spring/namespace/job/AbstractOneOffJobSpringIntegrateTest.java
3370
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.elasticjob.lite.spring.namespace.job; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.elasticjob.infra.concurrent.BlockUtils; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; import org.apache.shardingsphere.elasticjob.lite.internal.schedule.JobRegistry; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.DataflowElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.FooSimpleElasticJob; import org.apache.shardingsphere.elasticjob.lite.spring.namespace.test.AbstractZookeeperJUnit4SpringContextTests; import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.annotation.Resource; import static org.junit.Assert.assertTrue; @RequiredArgsConstructor public abstract class AbstractOneOffJobSpringIntegrateTest extends AbstractZookeeperJUnit4SpringContextTests { private final String simpleJobName; private final String throughputDataflowJobName; @Resource private CoordinatorRegistryCenter regCenter; @Before @After public void reset() { FooSimpleElasticJob.reset(); DataflowElasticJob.reset(); } @After public void tearDown() { JobRegistry.getInstance().shutdown(simpleJobName); JobRegistry.getInstance().shutdown(throughputDataflowJobName); } @Test public void assertSpringJobBean() { assertSimpleElasticJobBean(); assertThroughputDataflowElasticJobBean(); } private void assertSimpleElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(simpleJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!FooSimpleElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(FooSimpleElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + simpleJobName + "/sharding")); } private void assertThroughputDataflowElasticJobBean() { OneOffJobBootstrap bootstrap = applicationContext.getBean(throughputDataflowJobName, OneOffJobBootstrap.class); bootstrap.execute(); while (!DataflowElasticJob.isCompleted()) { BlockUtils.waitingShortTime(); } assertTrue(DataflowElasticJob.isCompleted()); assertTrue(regCenter.isExisted("/" + throughputDataflowJobName + "/sharding")); } }
apache-2.0
liveontologies/elk-reasoner
elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/entailments/impl/DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl.java
2181
/*- * #%L * ELK Reasoner Core * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.semanticweb.elk.reasoner.entailments.impl; import java.util.Collections; import java.util.List; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.DerivedClassInclusionEntailsObjectPropertyAssertionAxiom; import org.semanticweb.elk.reasoner.entailments.model.Entailment; import org.semanticweb.elk.reasoner.entailments.model.EntailmentInference; import org.semanticweb.elk.reasoner.entailments.model.ObjectPropertyAssertionAxiomEntailment; import org.semanticweb.elk.reasoner.saturation.conclusions.model.SubClassInclusionComposed; public class DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl extends AbstractAxiomEntailmentInference<ElkObjectPropertyAssertionAxiom, ObjectPropertyAssertionAxiomEntailment> implements DerivedClassInclusionEntailsObjectPropertyAssertionAxiom { private final SubClassInclusionComposed reason_; public DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl( final ObjectPropertyAssertionAxiomEntailment conclusion, final SubClassInclusionComposed reason) { super(conclusion); this.reason_ = reason; } @Override public List<? extends Entailment> getPremises() { return Collections.emptyList(); } @Override public SubClassInclusionComposed getReason() { return reason_; } @Override public <O> O accept(final EntailmentInference.Visitor<O> visitor) { return visitor.visit(this); } }
apache-2.0
OSS-TheWeatherCompany/dasein-cloud-core
src/main/java/org/dasein/cloud/AbstractCloud.java
3068
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; import org.dasein.cloud.admin.AdminServices; import org.dasein.cloud.ci.CIServices; import org.dasein.cloud.compute.ComputeServices; import org.dasein.cloud.identity.IdentityServices; import org.dasein.cloud.network.NetworkServices; import org.dasein.cloud.platform.PlatformServices; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>. * @author George Reese * @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE * @since unknown */ public abstract class AbstractCloud extends CloudProvider { /** * Constructs a cloud provider instance. */ public AbstractCloud() { } @Override public @Nullable AdminServices getAdminServices() { return null; } @Override public @Nullable ComputeServices getComputeServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getComputeServices()); } @Override public @Nonnull ContextRequirements getContextRequirements() { return new ContextRequirements( new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR), new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false) ); } @Override public @Nullable CIServices getCIServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getCIServices()); } @Override public @Nullable IdentityServices getIdentityServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getIdentityServices()); } @Override public @Nullable NetworkServices getNetworkServices() { CloudProvider compute = getComputeCloud(); return (compute == null ? null : compute.getNetworkServices()); } @Override public @Nullable PlatformServices getPlatformServices() { CloudProvider compute = getComputeCloud(); return ( compute == null ? null : compute.getPlatformServices() ); } }
apache-2.0
apetrucci/katharsis-framework
katharsis-jpa/src/test/java/io/katharsis/jpa/meta/MetaPrimitiveTypeTest.java
3187
package io.katharsis.jpa.meta; import java.io.Serializable; import java.util.UUID; import org.junit.Assert; import org.junit.Test; import io.katharsis.meta.model.MetaPrimitiveType; public class MetaPrimitiveTypeTest { @Test public void testString() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(String.class); } @Test public void testInteger() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Integer.class); } @Test public void testShort() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Short.class); } @Test public void testLong() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Long.class); } @Test public void testFloat() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Float.class); } @Test public void testDouble() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Double.class); } @Test public void testBoolean() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Boolean.class); } @Test public void testByte() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(Byte.class); } @Test public void testUUID() { UUID uuid = UUID.randomUUID(); MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(UUID.class); } enum TestEnum { A } @Test public void testEnum() { MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestEnum.class); } public static class TestObjectWithParse { int value; public static TestObjectWithParse parse(String value) { TestObjectWithParse parser = new TestObjectWithParse(); parser.value = Integer.parseInt(value); return parser; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestObjectWithParse other = (TestObjectWithParse) obj; if (value != other.value) return false; return true; } } public static class TestObjectWithConstructor implements Serializable { int value; public TestObjectWithConstructor() { } public TestObjectWithConstructor(String value) { this.value = Integer.parseInt(value); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestObjectWithConstructor other = (TestObjectWithConstructor) obj; if (value != other.value) return false; return true; } } @Test public void testParse() { TestObjectWithParse value = new TestObjectWithParse(); value.value = 12; MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestObjectWithParse.class); } @Test public void testOther() { TestObjectWithConstructor value = new TestObjectWithConstructor(); value.value = 12; MetaPrimitiveType type = new MetaPrimitiveType(); type.setImplementationType(TestObjectWithConstructor.class); } }
apache-2.0
Atos-FiwareOps/sla-framework
sla-core/sla-repository/src/main/java/eu/atos/sla/dao/jpa/TemplateDAOJpa.java
5164
package eu.atos.sla.dao.jpa; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; import javax.persistence.TypedQuery; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import eu.atos.sla.dao.ITemplateDAO; import eu.atos.sla.datamodel.ITemplate; import eu.atos.sla.datamodel.bean.Template; @Repository("TemplateRepository") public class TemplateDAOJpa implements ITemplateDAO { private static Logger logger = LoggerFactory.getLogger(TemplateDAOJpa.class); private EntityManager entityManager; @PersistenceContext(unitName = "slarepositoryDB") public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } public EntityManager getEntityManager() { return entityManager; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getById(Long id) { return entityManager.find(Template.class, id); } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public Template getByUuid(String uuid) { try { Query query = entityManager .createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); Template template = null; template = (Template) query.getSingleResult(); return template; } catch (NoResultException e) { logger.debug("No Result found: " + e); return null; } } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> search(String providerId, String []serviceIds) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_SEARCH, ITemplate.class); query.setParameter("providerId", providerId); query.setParameter("serviceIds", (serviceIds!=null)?Arrays.asList(serviceIds):null); query.setParameter("flagServiceIds", (serviceIds!=null)?"flag":null); logger.debug("providerId:{} - serviceIds:{}" , providerId, (serviceIds!=null)?Arrays.asList(serviceIds):null); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getByAgreement(String agreement) { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_BY_AGREEMENT, ITemplate.class); query.setParameter("agreement", agreement); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public List<ITemplate> getAll() { TypedQuery<ITemplate> query = entityManager.createNamedQuery( Template.QUERY_FIND_ALL, ITemplate.class); List<ITemplate> templates = new ArrayList<ITemplate>(); templates = (List<ITemplate>) query.getResultList(); if (templates != null) { logger.debug("Number of templates:" + templates.size()); } else { logger.debug("No Result found."); } return templates; } @Override @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public ITemplate save(ITemplate template) { logger.info("template.getUuid() "+template.getUuid()); entityManager.persist(template); entityManager.flush(); return template; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean update(String uuid, ITemplate template) { Template templateDB = null; try { Query query = entityManager.createNamedQuery(Template.QUERY_FIND_BY_UUID); query.setParameter("uuid", uuid); templateDB = (Template)query.getSingleResult(); } catch (NoResultException e) { logger.debug("No Result found: " + e); } if (templateDB!=null){ template.setId(templateDB.getId()); logger.info("template to update with id"+template.getId()); entityManager.merge(template); entityManager.flush(); }else return false; return true; } @Transactional(readOnly = false, propagation = Propagation.REQUIRED) public boolean delete(ITemplate template) { try { Template templateDeleted = entityManager.getReference(Template.class, template.getId()); entityManager.remove(templateDeleted); entityManager.flush(); return true; } catch (EntityNotFoundException e) { logger.debug("Template[{}] not found", template.getId()); return false; } } }
apache-2.0
gabedwrds/cas
support/cas-server-support-oauth/src/main/java/org/apereo/cas/ticket/code/DefaultOAuthCodeFactory.java
1592
package org.apereo.cas.ticket.code; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.ticket.ExpirationPolicy; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketFactory; import org.apereo.cas.ticket.UniqueTicketIdGenerator; import org.apereo.cas.util.DefaultUniqueTicketIdGenerator; /** * Default OAuth code factory. * * @author Jerome Leleu * @since 5.0.0 */ public class DefaultOAuthCodeFactory implements OAuthCodeFactory { /** Default instance for the ticket id generator. */ protected final UniqueTicketIdGenerator oAuthCodeIdGenerator; /** ExpirationPolicy for refresh tokens. */ protected final ExpirationPolicy expirationPolicy; public DefaultOAuthCodeFactory(final ExpirationPolicy expirationPolicy) { this(new DefaultUniqueTicketIdGenerator(), expirationPolicy); } public DefaultOAuthCodeFactory(final UniqueTicketIdGenerator refreshTokenIdGenerator, final ExpirationPolicy expirationPolicy) { this.oAuthCodeIdGenerator = refreshTokenIdGenerator; this.expirationPolicy = expirationPolicy; } @Override public OAuthCode create(final Service service, final Authentication authentication) { final String codeId = this.oAuthCodeIdGenerator.getNewTicketId(OAuthCode.PREFIX); return new OAuthCodeImpl(codeId, service, authentication, this.expirationPolicy); } @Override public <T extends TicketFactory> T get(final Class<? extends Ticket> clazz) { return (T) this; } }
apache-2.0
jwren/intellij-community
platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/PassExecutorService.java
25837
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeHighlighting.EditorBoundHighlightingPass; import com.intellij.codeHighlighting.HighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPass; import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar; import com.intellij.concurrency.Job; import com.intellij.concurrency.JobLauncher; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.ex.ApplicationUtil; import com.intellij.openapi.application.impl.ApplicationImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.util.Functions; import com.intellij.util.containers.CollectionFactory; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashingStrategy; import com.intellij.util.ui.UIUtil; import it.unimi.dsi.fastutil.ints.Int2ObjectMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; final class PassExecutorService implements Disposable { static final Logger LOG = Logger.getInstance(PassExecutorService.class); private static final boolean CHECK_CONSISTENCY = ApplicationManager.getApplication().isUnitTestMode(); private final Map<ScheduledPass, Job<Void>> mySubmittedPasses = new ConcurrentHashMap<>(); private final Project myProject; private volatile boolean isDisposed; private final AtomicInteger nextAvailablePassId; // used to assign random id to a pass if not set PassExecutorService(@NotNull Project project) { myProject = project; nextAvailablePassId = ((TextEditorHighlightingPassRegistrarImpl)TextEditorHighlightingPassRegistrar.getInstance(myProject)).getNextAvailableId(); } @Override public void dispose() { cancelAll(true); // some workers could, although idle, still retain some thread references for some time causing leak hunter to frown ForkJoinPool.commonPool().awaitQuiescence(1, TimeUnit.SECONDS); isDisposed = true; } void cancelAll(boolean waitForTermination) { for (Map.Entry<ScheduledPass, Job<Void>> entry : mySubmittedPasses.entrySet()) { Job<Void> job = entry.getValue(); ScheduledPass pass = entry.getKey(); pass.myUpdateProgress.cancel(); job.cancel(); } try { if (waitForTermination) { while (!waitFor(50)) { int i = 0; } } } catch (ProcessCanceledException ignored) { } catch (Error | RuntimeException e) { throw e; } catch (Throwable throwable) { LOG.error(throwable); } finally { mySubmittedPasses.clear(); } } void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap, // a list of opened FileEditors for each Document. The first FileEditor in the list is the preferred one @NotNull Map<Document, List<FileEditor>> documentToEditors, @NotNull DaemonProgressIndicator updateProgress) { if (isDisposed()) return; Map<FileEditor, List<TextEditorHighlightingPass>> documentBoundPasses = new HashMap<>(); Map<FileEditor, List<EditorBoundHighlightingPass>> editorBoundPasses = new HashMap<>(); Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass = new HashMap<>(); List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5); AtomicInteger threadsToStartCountdown = new AtomicInteger(0); for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) { FileEditor fileEditor = entry.getKey(); HighlightingPass[] passes = entry.getValue(); for (HighlightingPass pass : passes) { Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(30)); if (pass instanceof EditorBoundHighlightingPass) { EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass)pass; // have to make ids unique for this document assignUniqueId(editorPass, thisEditorId2Pass); editorBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(editorPass); } else if (pass instanceof TextEditorHighlightingPass) { TextEditorHighlightingPass tePass = (TextEditorHighlightingPass)pass; assignUniqueId(tePass, thisEditorId2Pass); documentBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(tePass); } else { // generic HighlightingPass, run all of them concurrently freePasses.add(new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown)); } } } List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10); // fileEditor-> (passId -> created pass) Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted = new HashMap<>(passesMap.size()); for (Map.Entry<Document, List<FileEditor>> entry : documentToEditors.entrySet()) { List<FileEditor> fileEditors = entry.getValue(); FileEditor preferredFileEditor = fileEditors.get(0); // assumption: the preferred fileEditor is stored first List<TextEditorHighlightingPass> passes = documentBoundPasses.get(preferredFileEditor); if (passes == null || passes.isEmpty()) { continue; } sortById(passes); for (TextEditorHighlightingPass pass : passes) { createScheduledPass(preferredFileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } for (Map.Entry<FileEditor, List<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) { FileEditor fileEditor = entry.getKey(); Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue(); for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) { createScheduledPass(fileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } if (CHECK_CONSISTENCY && !ApplicationManagerEx.isInStressTest()) { assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown); } if (LOG.isDebugEnabled()) { Set<VirtualFile> vFiles = ContainerUtil.map2Set(passesMap.keySet(), FileEditor::getFile); log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses); } for (ScheduledPass dependentPass : dependentPasses) { mySubmittedPasses.put(dependentPass, Job.nullJob()); } for (ScheduledPass freePass : freePasses) { submit(freePass); } } private void assignUniqueId(@NotNull TextEditorHighlightingPass pass, @NotNull Int2ObjectMap<TextEditorHighlightingPass> id2Pass) { int id = pass.getId(); if (id == -1 || id == 0) { id = nextAvailablePassId.incrementAndGet(); pass.setId(id); } TextEditorHighlightingPass prevPass = id2Pass.put(id, pass); if (prevPass != null) { LOG.error("Duplicate pass id found: "+id+". Both passes returned the same getId(): "+prevPass+" ("+prevPass.getClass() +") and "+pass+" ("+pass.getClass()+")"); } } private void assertConsistency(@NotNull List<ScheduledPass> freePasses, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull AtomicInteger threadsToStartCountdown) { assert threadsToStartCountdown.get() == toBeSubmitted.values().stream().mapToInt(m->m.size()).sum(); Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits = CollectionFactory.createCustomHashingStrategyMap(new HashingStrategy<>() { @Override public int hashCode(@Nullable PassExecutorService.ScheduledPass sp) { if (sp == null) return 0; return ((TextEditorHighlightingPass)sp.myPass).getId() * 31 + sp.myFileEditor.hashCode(); } @Override public boolean equals(@Nullable PassExecutorService.ScheduledPass sp1, @Nullable PassExecutorService.ScheduledPass sp2) { if (sp1 == null || sp2 == null) return sp1 == sp2; int id1 = ((TextEditorHighlightingPass)sp1.myPass).getId(); int id2 = ((TextEditorHighlightingPass)sp2.myPass).getId(); return id1 == id2 && sp1.myFileEditor == sp2.myFileEditor; } }); for (ScheduledPass freePass : freePasses) { HighlightingPass pass = freePass.myPass; if (pass instanceof TextEditorHighlightingPass) { id2Visits.put(freePass, Pair.create(freePass, 0)); checkConsistency(freePass, id2Visits); } } for (Map.Entry<ScheduledPass, Pair<ScheduledPass, Integer>> entry : id2Visits.entrySet()) { int count = entry.getValue().second; assert count == 0 : entry.getKey(); } assert id2Visits.size() == threadsToStartCountdown.get() : "Expected "+threadsToStartCountdown+" but got "+id2Visits.size()+": "+id2Visits; } private void checkConsistency(@NotNull ScheduledPass pass, Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits) { for (ScheduledPass succ : ContainerUtil.concat(pass.mySuccessorsOnCompletion, pass.mySuccessorsOnSubmit)) { Pair<ScheduledPass, Integer> succPair = id2Visits.get(succ); if (succPair == null) { succPair = Pair.create(succ, succ.myRunningPredecessorsCount.get()); id2Visits.put(succ, succPair); } int newPred = succPair.second - 1; id2Visits.put(succ, Pair.create(succ, newPred)); assert newPred >= 0; if (newPred == 0) { checkConsistency(succ, id2Visits); } } } @NotNull private ScheduledPass createScheduledPass(@NotNull FileEditor fileEditor, @NotNull TextEditorHighlightingPass pass, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown) { Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass = toBeSubmitted.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20)); int passId = pass.getId(); ScheduledPass scheduledPass = thisEditorId2ScheduledPass.get(passId); if (scheduledPass != null) return scheduledPass; scheduledPass = new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown); threadsToStartCountdown.incrementAndGet(); thisEditorId2ScheduledPass.put(passId, scheduledPass); for (int predecessorId : pass.getCompletionPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnCompletion(scheduledPass); } } for (int predecessorId : pass.getStartingPredecessorIds()) { ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown, predecessorId, thisEditorId2ScheduledPass, thisEditorId2Pass); if (predecessor != null) { predecessor.addSuccessorOnSubmit(scheduledPass); } } if (scheduledPass.myRunningPredecessorsCount.get() == 0 && !freePasses.contains(scheduledPass)) { freePasses.add(scheduledPass); } else if (!dependentPasses.contains(scheduledPass)) { dependentPasses.add(scheduledPass); } if (pass.isRunIntentionPassAfter() && fileEditor instanceof TextEditor) { Editor editor = ((TextEditor)fileEditor).getEditor(); VirtualFile virtualFile = fileEditor.getFile(); PsiFile psiFile = virtualFile == null ? null : ReadAction.compute(() -> PsiManager.getInstance(myProject).findFile(virtualFile)); if (psiFile != null) { ShowIntentionsPass ip = new ShowIntentionsPass(psiFile, editor, false); assignUniqueId(ip, thisEditorId2Pass); ip.setCompletionPredecessorIds(new int[]{passId}); createScheduledPass(fileEditor, ip, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown); } } return scheduledPass; } private ScheduledPass findOrCreatePredecessorPass(@NotNull FileEditor fileEditor, @NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted, @NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass, @NotNull List<ScheduledPass> freePasses, @NotNull List<ScheduledPass> dependentPasses, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger myThreadsToStartCountdown, int predecessorId, @NotNull Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass, @NotNull Int2ObjectMap<? extends TextEditorHighlightingPass> thisEditorId2Pass) { ScheduledPass predecessor = thisEditorId2ScheduledPass.get(predecessorId); if (predecessor == null) { TextEditorHighlightingPass textEditorPass = thisEditorId2Pass.get(predecessorId); predecessor = textEditorPass == null ? null : createScheduledPass(fileEditor, textEditorPass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, myThreadsToStartCountdown); } return predecessor; } private void submit(@NotNull ScheduledPass pass) { if (!pass.myUpdateProgress.isCanceled()) { Job<Void> job = JobLauncher.getInstance().submitToJobThread(pass, future -> { try { if (!future.isCancelled()) { // for canceled task .get() generates CancellationException which is expensive future.get(); } } catch (CancellationException | InterruptedException ignored) { } catch (ExecutionException e) { LOG.error(e.getCause()); } }); mySubmittedPasses.put(pass, job); } } private final class ScheduledPass implements Runnable { private final FileEditor myFileEditor; private final HighlightingPass myPass; private final AtomicInteger myThreadsToStartCountdown; private final AtomicInteger myRunningPredecessorsCount = new AtomicInteger(0); private final List<ScheduledPass> mySuccessorsOnCompletion = new ArrayList<>(); private final List<ScheduledPass> mySuccessorsOnSubmit = new ArrayList<>(); @NotNull private final DaemonProgressIndicator myUpdateProgress; private ScheduledPass(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator progressIndicator, @NotNull AtomicInteger threadsToStartCountdown) { myFileEditor = fileEditor; myPass = pass; myThreadsToStartCountdown = threadsToStartCountdown; myUpdateProgress = progressIndicator; } @Override public void run() { ((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(() -> { try { doRun(); } catch (ApplicationUtil.CannotRunReadActionException e) { myUpdateProgress.cancel(); } catch (RuntimeException | Error e) { saveException(e, myUpdateProgress); throw e; } }); } private void doRun() { if (myUpdateProgress.isCanceled()) return; log(myUpdateProgress, myPass, "Started. "); for (ScheduledPass successor : mySuccessorsOnSubmit) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } ProgressManager.getInstance().executeProcessUnderProgress(() -> { boolean success = ApplicationManagerEx.getApplicationEx().tryRunReadAction(() -> { try { if (DumbService.getInstance(myProject).isDumb() && !DumbService.isDumbAware(myPass)) { return; } if (!myUpdateProgress.isCanceled() && !myProject.isDisposed()) { myPass.collectInformation(myUpdateProgress); } } catch (ProcessCanceledException e) { log(myUpdateProgress, myPass, "Canceled "); if (!myUpdateProgress.isCanceled()) { myUpdateProgress.cancel(e); //in case when some smart asses throw PCE just for fun } } catch (RuntimeException | Error e) { myUpdateProgress.cancel(e); LOG.error(e); throw e; } }); if (!success) { myUpdateProgress.cancel(); } }, myUpdateProgress); log(myUpdateProgress, myPass, "Finished. "); if (!myUpdateProgress.isCanceled()) { applyInformationToEditorsLater(myFileEditor, myPass, myUpdateProgress, myThreadsToStartCountdown, ()->{ for (ScheduledPass successor : mySuccessorsOnCompletion) { int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet(); if (predecessorsToRun == 0) { submit(successor); } } }); } } @NonNls @Override public String toString() { return "SP: " + myPass; } private void addSuccessorOnCompletion(@NotNull ScheduledPass successor) { mySuccessorsOnCompletion.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } private void addSuccessorOnSubmit(@NotNull ScheduledPass successor) { mySuccessorsOnSubmit.add(successor); successor.myRunningPredecessorsCount.incrementAndGet(); } } private void applyInformationToEditorsLater(@NotNull FileEditor fileEditor, @NotNull HighlightingPass pass, @NotNull DaemonProgressIndicator updateProgress, @NotNull AtomicInteger threadsToStartCountdown, @NotNull Runnable callbackOnApplied) { ApplicationManager.getApplication().invokeLater(() -> { if (isDisposed() || !fileEditor.isValid()) { updateProgress.cancel(); } if (updateProgress.isCanceled()) { log(updateProgress, pass, " is canceled during apply, sorry"); return; } try { if (UIUtil.isShowing(fileEditor.getComponent())) { pass.applyInformationToEditor(); repaintErrorStripeAndIcon(fileEditor); if (pass instanceof TextEditorHighlightingPass) { FileStatusMap fileStatusMap = DaemonCodeAnalyzerEx.getInstanceEx(myProject).getFileStatusMap(); Document document = ((TextEditorHighlightingPass)pass).getDocument(); int passId = ((TextEditorHighlightingPass)pass).getId(); fileStatusMap.markFileUpToDate(document, passId); } log(updateProgress, pass, " Applied"); } } catch (ProcessCanceledException e) { log(updateProgress, pass, "Error " + e); throw e; } catch (RuntimeException e) { VirtualFile file = fileEditor.getFile(); FileType fileType = file == null ? null : file.getFileType(); String message = "Exception while applying information to " + fileEditor + "("+fileType+")"; log(updateProgress, pass, message + e); throw new RuntimeException(message, e); } if (threadsToStartCountdown.decrementAndGet() == 0) { HighlightingSessionImpl.waitForAllSessionsHighlightInfosApplied(updateProgress); log(updateProgress, pass, "Stopping "); updateProgress.stopIfRunning(); clearStaleEntries(); } else { log(updateProgress, pass, "Finished but there are passes in the queue: " + threadsToStartCountdown.get()); } callbackOnApplied.run(); }, updateProgress.getModalityState(), pass.getExpiredCondition()); } private void clearStaleEntries() { mySubmittedPasses.keySet().removeIf(pass -> pass.myUpdateProgress.isCanceled()); } private void repaintErrorStripeAndIcon(@NotNull FileEditor fileEditor) { if (fileEditor instanceof TextEditor) { DefaultHighlightInfoProcessor.repaintErrorStripeAndIcon(((TextEditor)fileEditor).getEditor(), myProject); } } private boolean isDisposed() { return isDisposed || myProject.isDisposed(); } @NotNull List<HighlightingPass> getAllSubmittedPasses() { List<HighlightingPass> result = new ArrayList<>(mySubmittedPasses.size()); for (ScheduledPass scheduledPass : mySubmittedPasses.keySet()) { if (!scheduledPass.myUpdateProgress.isCanceled()) { result.add(scheduledPass.myPass); } } return result; } private static void sortById(@NotNull List<? extends TextEditorHighlightingPass> result) { ContainerUtil.quickSort(result, Comparator.comparingInt(TextEditorHighlightingPass::getId)); } private static int getThreadNum() { Matcher matcher = Pattern.compile("JobScheduler FJ pool (\\d*)/(\\d*)").matcher(Thread.currentThread().getName()); String num = matcher.matches() ? matcher.group(1) : null; return StringUtil.parseInt(num, 0); } static void log(ProgressIndicator progressIndicator, HighlightingPass pass, @NonNls Object @NotNull ... info) { if (LOG.isDebugEnabled()) { Document document = pass instanceof TextEditorHighlightingPass ? ((TextEditorHighlightingPass)pass).getDocument() : null; CharSequence docText = document == null ? "" : ": '" + StringUtil.first(document.getCharsSequence(), 10, true)+ "'"; synchronized (PassExecutorService.class) { String infos = StringUtil.join(info, Functions.TO_STRING(), " "); String message = StringUtil.repeatSymbol(' ', getThreadNum() * 4) + " " + pass + " " + infos + "; progress=" + (progressIndicator == null ? null : progressIndicator.hashCode()) + " " + (progressIndicator == null ? "?" : progressIndicator.isCanceled() ? "X" : "V") + docText; LOG.debug(message); //System.out.println(message); } } } private static final Key<Throwable> THROWABLE_KEY = Key.create("THROWABLE_KEY"); static void saveException(@NotNull Throwable e, @NotNull DaemonProgressIndicator indicator) { indicator.putUserDataIfAbsent(THROWABLE_KEY, e); } @TestOnly static Throwable getSavedException(@NotNull DaemonProgressIndicator indicator) { return indicator.getUserData(THROWABLE_KEY); } // return true if terminated boolean waitFor(int millis) throws Throwable { try { for (Job<Void> job : mySubmittedPasses.values()) { job.waitForCompletion(millis); } return true; } catch (TimeoutException ignored) { return false; } catch (InterruptedException e) { return true; } catch (ExecutionException e) { throw e.getCause(); } } }
apache-2.0
hpehl/hal.next
app/src/main/java/org/jboss/hal/client/runtime/subsystem/elytron/wizardpassword/PasswordState.java
787
/* * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.client.runtime.subsystem.elytron.wizardpassword; public enum PasswordState { CHOOSE_PASSWORD_TYPE, CONFIGURATION, REVIEW }
apache-2.0
kidaa/rave
rave-components/rave-jpa/src/main/java/org/apache/rave/portal/repository/impl/JpaApplicationDataRepository.java
7525
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.portal.repository.impl; import org.apache.rave.exception.NotSupportedException; import org.apache.commons.lang3.StringUtils; import org.apache.rave.exception.DataSerializationException; import org.apache.rave.model.ApplicationData; import org.apache.rave.portal.model.JpaApplicationData; import org.apache.rave.portal.model.conversion.JpaApplicationDataConverter; import org.apache.rave.portal.repository.ApplicationDataRepository; import org.apache.rave.util.CollectionUtils; import org.apache.rave.util.JsonUtils; import org.json.JSONException; import org.json.JSONObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Transactional; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityManager; import javax.persistence.Lob; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.apache.rave.persistence.jpa.util.JpaUtil.getSingleResult; import static org.apache.rave.persistence.jpa.util.JpaUtil.saveOrUpdate; @Repository public class JpaApplicationDataRepository implements ApplicationDataRepository { @PersistenceContext private EntityManager manager; @Autowired private JpaApplicationDataConverter converter; @Override public Class<? extends ApplicationData> getType() { return JpaApplicationData.class; } @Override public ApplicationData get(String id) { JpaSerializableApplicationData applicationData = (JpaSerializableApplicationData) manager.find(JpaApplicationData.class, Long.parseLong(id)); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } @Override @Transactional public JpaApplicationData save(ApplicationData item) { JpaApplicationData jpaAppData = converter.convert(item); JpaSerializableApplicationData jpaSerializableApplicationData = getJpaSerializableApplicationData(jpaAppData); jpaSerializableApplicationData.serializeData(); return saveOrUpdate(jpaSerializableApplicationData.getEntityId(), manager, jpaSerializableApplicationData); } @Override public void delete(ApplicationData item) { manager.remove(item instanceof JpaApplicationData ? item : get(item.getId())); } @Override public List<ApplicationData> getAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getLimitedList(int offset, int limit) { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public int getCountAll() { throw new NotSupportedException("This function is not yet implemented for this class."); } @Override public List<ApplicationData> getApplicationData(List<String> userIds, String appId) { //if the call is only looking for data for a single user use the more efficient single user variant transparently if (userIds.size() == 1) { List<ApplicationData> data = new ArrayList<ApplicationData>(); ApplicationData applicationData = getApplicationData(userIds.get(0), appId); if (applicationData != null) { data.add(applicationData); } return data; } TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_IDS_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_IDS_PARAM, userIds); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); List<JpaSerializableApplicationData> results = query.getResultList(); for (JpaSerializableApplicationData applicationData : results) { applicationData.deserializeData(); } return CollectionUtils.<ApplicationData>toBaseTypedList(results); } @Override public JpaApplicationData getApplicationData(String personId, String appId) { TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_ID_AND_APP_ID, JpaSerializableApplicationData.class); query.setParameter(JpaApplicationData.USER_ID_PARAM, personId); query.setParameter(JpaApplicationData.APP_URL_PARAM, appId); JpaSerializableApplicationData applicationData = getSingleResult(query.getResultList()); if (applicationData != null) { applicationData.deserializeData(); } return applicationData; } private JpaSerializableApplicationData getJpaSerializableApplicationData(JpaApplicationData applicationData) { if (applicationData instanceof JpaSerializableApplicationData) { return (JpaSerializableApplicationData) applicationData; } return new JpaSerializableApplicationData(applicationData.getEntityId(), applicationData.getUserId(), applicationData.getAppUrl(), applicationData.getData()); } /** * This class is here so that the details of the persistence strategy in use for serializing the appdata map to a * JSON string doesnt end up being reflected in any public API of the ApplicationData object itself. * <p/> * This allows the public API of this repository to deal in clean ApplicationData models, but under the covers it * uses this model for the actual persistence to the database. */ @Entity public static class JpaSerializableApplicationData extends JpaApplicationData { @Lob @Column(name = "serialized_data") private String serializedData; public JpaSerializableApplicationData() { super(); } public JpaSerializableApplicationData(Long entityId, String userId, String appUrl, Map<String, Object> data) { super(entityId, userId, appUrl, data); } public void serializeData() { Map<String, Object> data = this.getData(); if (data != null) { serializedData = JsonUtils.stringify(data); } } @SuppressWarnings("unchecked") public void deserializeData() { if (serializedData != null && StringUtils.isNotBlank(serializedData)) { this.setData(JsonUtils.parse(serializedData, Map.class)); } } } }
apache-2.0
dimone-kun/cuba
modules/web-widgets/src/com/haulmont/cuba/web/widgets/client/addons/dragdroplayouts/ui/tabsheet/DDTabSheetState.java
1315
/* * Copyright 2015 John Ahlroos * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.tabsheet; import com.vaadin.shared.ui.tabsheet.TabsheetState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DDLayoutState; import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DragAndDropAwareState; public class DDTabSheetState extends TabsheetState implements DragAndDropAwareState { public static final float DEFAULT_HORIZONTAL_DROP_RATIO = 0.2f; public float tabLeftRightDropRatio = DEFAULT_HORIZONTAL_DROP_RATIO; public DDLayoutState ddState = new DDLayoutState(); @Override public DDLayoutState getDragAndDropState() { return ddState; } }
apache-2.0
apache/pdfbox
fontbox/src/main/java/org/apache/fontbox/ttf/HorizontalMetricsTable.java
4062
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fontbox.ttf; import java.io.IOException; /** * A table in a true type font. * * @author Ben Litchfield */ public class HorizontalMetricsTable extends TTFTable { /** * A tag that identifies this table type. */ public static final String TAG = "hmtx"; private int[] advanceWidth; private short[] leftSideBearing; private short[] nonHorizontalLeftSideBearing; private int numHMetrics; HorizontalMetricsTable(TrueTypeFont font) { super(font); } /** * This will read the required data from the stream. * * @param ttf The font that is being read. * @param data The stream to read the data from. * @throws IOException If there is an error reading the data. */ @Override void read(TrueTypeFont ttf, TTFDataStream data) throws IOException { HorizontalHeaderTable hHeader = ttf.getHorizontalHeader(); if (hHeader == null) { throw new IOException("Could not get hmtx table"); } numHMetrics = hHeader.getNumberOfHMetrics(); int numGlyphs = ttf.getNumberOfGlyphs(); int bytesRead = 0; advanceWidth = new int[ numHMetrics ]; leftSideBearing = new short[ numHMetrics ]; for( int i=0; i<numHMetrics; i++ ) { advanceWidth[i] = data.readUnsignedShort(); leftSideBearing[i] = data.readSignedShort(); bytesRead += 4; } int numberNonHorizontal = numGlyphs - numHMetrics; // handle bad fonts with too many hmetrics if (numberNonHorizontal < 0) { numberNonHorizontal = numGlyphs; } // make sure that table is never null and correct size, even with bad fonts that have no // "leftSideBearing" table although they should nonHorizontalLeftSideBearing = new short[numberNonHorizontal]; if (bytesRead < getLength()) { for( int i=0; i<numberNonHorizontal; i++ ) { if (bytesRead < getLength()) { nonHorizontalLeftSideBearing[i] = data.readSignedShort(); bytesRead += 2; } } } initialized = true; } /** * Returns the advance width for the given GID. * * @param gid GID */ public int getAdvanceWidth(int gid) { if (advanceWidth.length == 0) { return 250; } if (gid < numHMetrics) { return advanceWidth[gid]; } else { // monospaced fonts may not have a width for every glyph // the last one is for subsequent glyphs return advanceWidth[advanceWidth.length -1]; } } /** * Returns the left side bearing for the given GID. * * @param gid GID */ public int getLeftSideBearing(int gid) { if (leftSideBearing.length == 0) { return 0; } if (gid < numHMetrics) { return leftSideBearing[gid]; } else { return nonHorizontalLeftSideBearing[gid - numHMetrics]; } } }
apache-2.0
apache/incubator-asterixdb
asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/base/JComplexObject.java
1245
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.asterix.external.library.java.base; import org.apache.asterix.external.api.IJObject; import org.apache.asterix.om.types.IAType; import org.apache.asterix.om.util.container.IObjectPool; public abstract class JComplexObject<T> implements IJObject<T> { protected IObjectPool<IJObject, IAType> pool; public void setPool(IObjectPool<IJObject, IAType> pool) { this.pool = pool; } }
apache-2.0
twitter/cloudhopper-commons
ch-commons-charset/src/test/java/com/cloudhopper/commons/charset/demo/Charset5Main.java
1291
package com.cloudhopper.commons.charset.demo; /* * #%L * ch-commons-charset * %% * Copyright (C) 2012 Cloudhopper by Twitter * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.cloudhopper.commons.charset.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author joelauer */ public class Charset5Main { private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class); static public void main(String[] args) throws Exception { String sourceString = "h\u6025\u20ACllo"; String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8); logger.debug("source string: " + sourceString); logger.debug("target string: " + targetString); } }
apache-2.0
rLadia/AttacknidPatch
decompiled_src/Procyon/org/anddev/andengine/opengl/texture/BuildableTexture.java
4239
package org.anddev.andengine.opengl.texture; import java.util.*; import org.anddev.andengine.opengl.texture.source.*; import org.anddev.andengine.util.*; import org.anddev.andengine.opengl.texture.builder.*; import android.graphics.*; public class BuildableTexture extends Texture { private final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace; public BuildableTexture(final int n, final int n2) { super(n, n2, TextureOptions.DEFAULT, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final ITextureStateListener textureStateListener) { super(n, n2, TextureOptions.DEFAULT, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions) throws IllegalArgumentException { super(n, n2, textureOptions, null); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions, final ITextureStateListener textureStateListener) throws IllegalArgumentException { super(n, n2, textureOptions, textureStateListener); this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>(); } @Deprecated @Override public TextureSourceWithLocation addTextureSource(final ITextureSource textureSource, final int n, final int n2) { return super.addTextureSource(textureSource, n, n2); } public void addTextureSource(final ITextureSource textureSource, final Callback<TextureSourceWithLocation> callback) { this.mTextureSourcesToPlace.add(new TextureSourceWithWithLocationCallback(textureSource, callback)); } public void build(final ITextureBuilder textureBuilder) throws ITextureBuilder.TextureSourcePackingException { textureBuilder.pack(this, this.mTextureSourcesToPlace); this.mTextureSourcesToPlace.clear(); this.mUpdateOnHardwareNeeded = true; } @Override public void clearTextureSources() { super.clearTextureSources(); this.mTextureSourcesToPlace.clear(); } public void removeTextureSource(final ITextureSource textureSource) { final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace = this.mTextureSourcesToPlace; for (int i = -1 + mTextureSourcesToPlace.size(); i >= 0; --i) { if (mTextureSourcesToPlace.get(i).mTextureSource == textureSource) { mTextureSourcesToPlace.remove(i); this.mUpdateOnHardwareNeeded = true; return; } } } public static class TextureSourceWithWithLocationCallback implements ITextureSource { private final Callback<TextureSourceWithLocation> mCallback; private final ITextureSource mTextureSource; public TextureSourceWithWithLocationCallback(final ITextureSource mTextureSource, final Callback<TextureSourceWithLocation> mCallback) { super(); this.mTextureSource = mTextureSource; this.mCallback = mCallback; } @Override public TextureSourceWithWithLocationCallback clone() { return null; } public Callback<TextureSourceWithLocation> getCallback() { return this.mCallback; } @Override public int getHeight() { return this.mTextureSource.getHeight(); } public ITextureSource getTextureSource() { return this.mTextureSource; } @Override public int getWidth() { return this.mTextureSource.getWidth(); } @Override public Bitmap onLoadBitmap() { return this.mTextureSource.onLoadBitmap(); } @Override public String toString() { return this.mTextureSource.toString(); } } }
apache-2.0
stefan-ziel/Activiti
modules/activiti-dmn-engine/src/main/java/org/activiti/dmn/engine/impl/parser/DmnParse.java
6382
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.dmn.engine.impl.parser; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.activiti.dmn.engine.ActivitiDmnException; import org.activiti.dmn.engine.DmnEngineConfiguration; import org.activiti.dmn.engine.impl.context.Context; import org.activiti.dmn.engine.impl.io.InputStreamSource; import org.activiti.dmn.engine.impl.io.ResourceStreamSource; import org.activiti.dmn.engine.impl.io.StreamSource; import org.activiti.dmn.engine.impl.io.StringStreamSource; import org.activiti.dmn.engine.impl.io.UrlStreamSource; import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntity; import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntity; import org.activiti.dmn.model.Decision; import org.activiti.dmn.model.DmnDefinition; import org.activiti.dmn.xml.constants.DmnXMLConstants; import org.activiti.dmn.xml.converter.DmnXMLConverter; import org.activiti.dmn.xml.exception.DmnXMLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Specific parsing of one BPMN 2.0 XML file, created by the {@link DmnParse}. * * @author Tijs Rademakers * @author Joram Barrez */ public class DmnParse implements DmnXMLConstants { protected static final Logger LOGGER = LoggerFactory.getLogger(DmnParse.class); protected String name; protected boolean validateSchema = true; protected StreamSource streamSource; protected String sourceSystemId; protected DmnDefinition dmnDefinition; protected String targetNamespace; /** The deployment to which the parsed decision tables will be added. */ protected DmnDeploymentEntity deployment; /** The end result of the parsing: a list of decision tables. */ protected List<DecisionTableEntity> decisionTables = new ArrayList<DecisionTableEntity>(); public DmnParse deployment(DmnDeploymentEntity deployment) { this.deployment = deployment; return this; } public DmnParse execute(DmnEngineConfiguration dmnEngineConfig) { try { DmnXMLConverter converter = new DmnXMLConverter(); boolean enableSafeDmnXml = dmnEngineConfig.isEnableSafeDmnXml(); String encoding = dmnEngineConfig.getXmlEncoding(); if (encoding != null) { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml, encoding); } else { dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml); } if (dmnDefinition != null && dmnDefinition.getDecisions() != null) { for (Decision decision : dmnDefinition.getDecisions()) { DecisionTableEntity decisionTableEntity = Context.getDmnEngineConfiguration().getDecisionTableEntityManager().create(); decisionTableEntity.setKey(decision.getId()); decisionTableEntity.setName(decision.getName()); decisionTableEntity.setResourceName(name); decisionTableEntity.setDeploymentId(deployment.getId()); decisionTableEntity.setParentDeploymentId(deployment.getParentDeploymentId()); decisionTableEntity.setDescription(decision.getDescription()); decisionTables.add(decisionTableEntity); } } } catch (Exception e) { if (e instanceof ActivitiDmnException) { throw (ActivitiDmnException) e; } else if (e instanceof DmnXMLException) { throw (DmnXMLException) e; } else { throw new ActivitiDmnException("Error parsing XML", e); } } return this; } public DmnParse name(String name) { this.name = name; return this; } public DmnParse sourceInputStream(InputStream inputStream) { if (name == null) { name("inputStream"); } setStreamSource(new InputStreamSource(inputStream)); return this; } public DmnParse sourceUrl(URL url) { if (name == null) { name(url.toString()); } setStreamSource(new UrlStreamSource(url)); return this; } public DmnParse sourceUrl(String url) { try { return sourceUrl(new URL(url)); } catch (MalformedURLException e) { throw new ActivitiDmnException("malformed url: " + url, e); } } public DmnParse sourceResource(String resource) { if (name == null) { name(resource); } setStreamSource(new ResourceStreamSource(resource)); return this; } public DmnParse sourceString(String string) { if (name == null) { name("string"); } setStreamSource(new StringStreamSource(string)); return this; } protected void setStreamSource(StreamSource streamSource) { if (this.streamSource != null) { throw new ActivitiDmnException("invalid: multiple sources " + this.streamSource + " and " + streamSource); } this.streamSource = streamSource; } public String getSourceSystemId() { return sourceSystemId; } public DmnParse setSourceSystemId(String sourceSystemId) { this.sourceSystemId = sourceSystemId; return this; } /* * ------------------- GETTERS AND SETTERS ------------------- */ public boolean isValidateSchema() { return validateSchema; } public void setValidateSchema(boolean validateSchema) { this.validateSchema = validateSchema; } public List<DecisionTableEntity> getDecisionTables() { return decisionTables; } public String getTargetNamespace() { return targetNamespace; } public DmnDeploymentEntity getDeployment() { return deployment; } public void setDeployment(DmnDeploymentEntity deployment) { this.deployment = deployment; } public DmnDefinition getDmnDefinition() { return dmnDefinition; } public void setDmnDefinition(DmnDefinition dmnDefinition) { this.dmnDefinition = dmnDefinition; } }
apache-2.0
apache/derby
java/build/org/apache/derbyBuild/MessageVetter.java
7276
/* Derby - Class org.apache.derbyBuild.MessageVetter Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyBuild; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.text.MessageFormat; import java.util.Enumeration; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.regex.Pattern; /** * Class that checks the message files for common problems. */ public class MessageVetter { /** * <p> * Check all the message translations in the specified directories for * common problems. Assume that all properties files in the directories * are message translations. * </p> * * <p> * If a problem is found, an error will be raised. * </p> * * @param args names of the directories to check */ public static void main(String[] args) throws IOException { FileFilter filter = new FileFilter() { public boolean accept(File pathname) { return pathname.getName().endsWith(".properties"); } }; for (String directory : args) { for (File file : new File(directory).listFiles(filter)) { new MessageVetter(file).vet(); } } } /** * A regular expression that matches a single-quote character that is * neither preceeded nor followed by another single-quote character. Used * by {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that messages contain two single-quotes in order to produce a * single apostrophe (dictated by {@code java.text.MessageFormat}). */ private static final Pattern LONE_QUOTE_PATTERN = Pattern.compile("^'[^']|[^']'[^']|[^']'$"); /** * A regular expression that matches a single-quote character that have * no adjacent single-quote or curly brace character. Used by * {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to * verify that all single-quotes are either correctly formatted apostrophes * or used for quoting curly braces, as required by * {@code java.text.MessageFormat}. */ private static final Pattern LONE_QUOTE_ALLOWED_PATTERN = Pattern.compile("^'[^'{}]|[^'{}]'[^'{}]|[^'{}]'$"); /** * A set of message identifiers in whose messages single-quotes may legally * appear with no adjacent single-quote character. This will be messages * where the single-quotes are needed to quote curly braces that should * appear literally in the message text. */ private static final Set<String> LONE_QUOTE_ALLOWED = new HashSet<String>(); static { // The IJ help text contains curly braces that need quoting. LONE_QUOTE_ALLOWED.add("IJ_HelpText"); // Some of the DRDA usage messages contain the text {on|off}, which // needs quoting. LONE_QUOTE_ALLOWED.add("DRDA_Usage8.I"); LONE_QUOTE_ALLOWED.add("DRDA_Usage11.I"); LONE_QUOTE_ALLOWED.add("PE_HelpText"); } /** The message file to check. */ private final File file; /** The properties found in the message file. */ private final Properties properties; /** * Create a new {@code MessageVetter} instance. * * @param file the file with the messages to check * @throws IOException if the file cannot be loaded */ private MessageVetter(File file) throws IOException { this.file = file; properties = new Properties(); FileInputStream in = new FileInputStream(file); try { properties.load(in); } finally { in.close(); } } /** * Vet the messages in this file. An error will be raised if an * ill-formatted message is found. */ private void vet() { Enumeration e = properties.propertyNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String message = properties.getProperty(key); vetMessage(key, message); } } /** * Vet a specific message. Raise an error if it is not well-formed. * * @param key the message identifier * @param message the message format specifier */ private void vetMessage(String key, String message) { checkSingleQuotes(key, message); checkValidMessageFormat(key, message); } /** * Check that single-quote characters are doubled, as required by * {@code java.text.MessageFormat}. Raise an error otherwise. * * @param key the message identifier * @param message the message format specifier */ private void checkSingleQuotes(String key, String message) { Pattern p; if (LONE_QUOTE_ALLOWED.contains(key)) { // In some messages we allow lone single-quote characters, but // only if they are used to quote curly braces. Use a regular // expression that finds all single-quotes that aren't adjacent to // another single-quote or a curly brace character. p = LONE_QUOTE_ALLOWED_PATTERN; } else { // Otherwise, we don't allow lone single-quote characters at all. p = LONE_QUOTE_PATTERN; } if (p.matcher(message).find()) { throw new AssertionError("Lone single-quote in message " + key + " in " + file + ".\nThis is OK if it is used for quoting " + "special characters in the message. If this is what the " + "character is used for, add an exception in " + getClass().getName() + ".LONE_QUOTE_ALLOWED."); } } /** * Check that a message format specifier is valid. Raise an error if it * is not. * * @param key the message identifier * @param message the message format specifier */ private void checkValidMessageFormat(String key, String message) { try { // See if a MessageFormat instance can be produced based on this // message format specifier. new MessageFormat(message); } catch (Exception e) { AssertionError ae = new AssertionError( "Message " + key + " in " + file + " isn't a valid " + "java.text.MessageFormat pattern."); ae.initCause(e); throw ae; } } }
apache-2.0
IAMTJW/Tomcat-8.5.20
tomcat-8.5.20/java/org/apache/tomcat/util/http/parser/MediaTypeCache.java
2331
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.http.parser; import java.io.IOException; import java.io.StringReader; import org.apache.tomcat.util.collections.ConcurrentCache; /** * Caches the results of parsing content-type headers. */ public class MediaTypeCache { private final ConcurrentCache<String,String[]> cache; public MediaTypeCache(int size) { cache = new ConcurrentCache<>(size); } /** * Looks in the cache and returns the cached value if one is present. If no * match exists in the cache, a new parser is created, the input parsed and * the results placed in the cache and returned to the user. * * @param input The content-type header value to parse * @return The results are provided as a two element String array. The * first element is the media type less the charset and * the second element is the charset */ public String[] parse(String input) { String[] result = cache.get(input); if (result != null) { return result; } MediaType m = null; try { m = MediaType.parseMediaType(new StringReader(input)); } catch (IOException e) { // Ignore - return null } if (m != null) { result = new String[] {m.toStringNoCharset(), m.getCharset()}; cache.put(input, result); } return result; } }
apache-2.0
AbleOne/link-rest
agrest/src/test/java/io/agrest/it/fixture/cayenne/E15E1.java
183
package io.agrest.it.fixture.cayenne; import io.agrest.it.fixture.cayenne.auto._E15E1; public class E15E1 extends _E15E1 { private static final long serialVersionUID = 1L; }
apache-2.0
xasx/assertj-core
src/test/java/org/assertj/core/error/ShouldOnlyHaveFields_create_Test.java
8163
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.error; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields; import static org.assertj.core.util.Sets.newLinkedHashSet; import java.util.LinkedHashSet; import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; import org.assertj.core.presentation.Representation; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.test.Player; import org.assertj.core.util.Sets; import org.junit.jupiter.api.Test; /** * Tests for * <code>{@link ShouldOnlyHaveFields#create(Description, Representation)}</code> * * @author Filip Hrisafov */ public class ShouldOnlyHaveFields_create_Test { private static final LinkedHashSet<String> EMPTY_STRING_SET = Sets.<String> newLinkedHashSet(); @Test public void should_create_error_message_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_fields() { ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following public accessible fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } @Test public void should_create_error_message_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "fields not found:%n" + " <[\"nickname\"]>%n" + "and fields not expected:%n" + " <[\"address\"]>")); } @Test public void should_not_display_unexpected_fields_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), newLinkedHashSet("nickname"), EMPTY_STRING_SET); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but could not find the following fields:%n" + " <[\"nickname\"]>")); } @Test public void should_not_display_fields_not_found_when_there_are_none_for_declared_fields() { ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class, newLinkedHashSet("name", "team"), EMPTY_STRING_SET, newLinkedHashSet("address")); String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); assertThat(message).isEqualTo(String.format("[Test] %n" + "Expecting%n" + " <org.assertj.core.test.Player>%n" + "to only have the following declared fields:%n" + " <[\"name\", \"team\"]>%n" + "but the following fields were unexpected:%n" + " <[\"address\"]>")); } }
apache-2.0
goodwinnk/intellij-community
platform/platform-impl/src/com/intellij/openapi/diff/impl/settings/DiffMergeSettingsAction.java
3098
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.diff.impl.settings; import com.intellij.icons.AllIcons; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.ToggleAction; import com.intellij.openapi.editor.Editor; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; /** * The "gear" action allowing to configure merge tool visual preferences, such as displaying whitespaces, line numbers and soft wraps. * * @see DiffMergeSettings */ public class DiffMergeSettingsAction extends ActionGroup { @NotNull private final Collection<Editor> myEditors; @NotNull private final DiffMergeSettings mySettings; public DiffMergeSettingsAction(@NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) { super("Settings", null, AllIcons.General.GearPlain); setPopup(true); myEditors = editors; mySettings = settings; } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return new AnAction[] { new DiffMergeToggleAction("EditorToggleShowWhitespaces", DiffMergeEditorSetting.WHITESPACES, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleShowLineNumbers", DiffMergeEditorSetting.LINE_NUMBERS, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleShowIndentLines", DiffMergeEditorSetting.INDENT_LINES, myEditors, mySettings), new DiffMergeToggleAction("EditorToggleUseSoftWraps", DiffMergeEditorSetting.SOFT_WRAPS, myEditors, mySettings) }; } private static class DiffMergeToggleAction extends ToggleAction { @NotNull private final DiffMergeEditorSetting mySetting; @NotNull private final Collection<Editor> myEditors; @NotNull private final DiffMergeSettings mySettings; private DiffMergeToggleAction(@NotNull String actionId, @NotNull DiffMergeEditorSetting setting, @NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) { super(ActionsBundle.actionText(actionId), ActionsBundle.actionDescription(actionId), null); mySetting = setting; myEditors = editors; mySettings = settings; } @Override public boolean isSelected(@NotNull AnActionEvent e) { return getPreference(mySetting); } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { setPreference(mySetting, state); for (Editor editor : myEditors) { mySetting.apply(editor, state); } } private void setPreference(DiffMergeEditorSetting preference, boolean state) { mySettings.setPreference(preference, state); } private boolean getPreference(DiffMergeEditorSetting preference) { return mySettings.getPreference(preference); } } }
apache-2.0
stuartwdouglas/undertow
servlet/src/test/java/io/undertow/servlet/test/response/cookies/ResponseCookiesTestCase.java
7421
/* * JBoss, Home of Professional Open Source. * Copyright 2019 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.servlet.test.response.cookies; import java.util.Arrays; import java.util.Comparator; import javax.servlet.ServletException; import io.undertow.servlet.api.ServletInfo; import io.undertow.servlet.test.util.DeploymentUtils; import io.undertow.testutils.DefaultServer; import io.undertow.testutils.HttpClientUtils; import io.undertow.testutils.TestHttpClient; import io.undertow.util.StatusCodes; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Test for response.addCookie * * @author Flavia Rainone */ @RunWith(DefaultServer.class) public class ResponseCookiesTestCase { @BeforeClass public static void setup() throws ServletException { DeploymentUtils.setupServlet( new ServletInfo("add-cookies", AddCookiesServlet.class) .addMapping("/add-cookies"), new ServletInfo("duplicate-cookies", DuplicateCookiesServlet.class) .addMapping("/duplicate-cookies"), new ServletInfo("overwrite-cookies", OverwriteCookiesServlet.class) .addMapping("/overwrite-cookies"), new ServletInfo("jsessionid-cookies", JSessionIDCookiesServlet.class) .addMapping("/jsessionid-cookies")); } @Test public void addCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/add-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(2, setCookieHeaders.length); assertEquals("test1=test1; path=/test", setCookieHeaders[0].getValue()); assertEquals("test2=test2", setCookieHeaders[1].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void duplicateCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/duplicate-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(7, setCookieHeaders.length); Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString)); assertEquals("test1=test1; path=/test1_1", setCookieHeaders[0].getValue()); assertEquals("test1=test1; path=/test1_2", setCookieHeaders[1].getValue()); assertEquals("test2=test2; path=/test2", setCookieHeaders[2].getValue()); assertEquals("test2=test2; path=/test2; domain=www.domain2.com", setCookieHeaders[3].getValue()); assertEquals("test3=test3", setCookieHeaders[4].getValue()); assertEquals("test3=test3; domain=www.domain3-1.com", setCookieHeaders[5].getValue()); assertEquals("test3=test3; domain=www.domain3-2.com", setCookieHeaders[6].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void overwriteCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/overwrite-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(5, setCookieHeaders.length); Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString)); assertTrue("Header " + setCookieHeaders[0] + "didn't match expected regex", setCookieHeaders[0].getValue().matches("JSESSIONID=.*; path=/servletContext")); assertEquals("test=test10; domain=www.domain.com", setCookieHeaders[1].getValue()); assertEquals("test=test2; path=/test", setCookieHeaders[2].getValue()); assertEquals("test=test5", setCookieHeaders[3].getValue()); assertEquals("test=test8; path=/test; domain=www.domain.com", setCookieHeaders[4].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void jsessionIdCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/jsessionid-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(3, setCookieHeaders.length); assertTrue("Header " + setCookieHeaders[0] + "didn't start with expected prefix", setCookieHeaders[0].getValue().startsWith("JSESSIONID=_bug_fix; path=/path3; Max-Age=500; Expires=")); assertTrue("Header " + setCookieHeaders[1] + "didn't start with expected prefix", setCookieHeaders[1].getValue().startsWith("JSESSIONID=_bug_fix; path=/path4; Max-Age=1000; Expires=")); assertTrue("Header " + setCookieHeaders[2] + "didn't match expected regex", setCookieHeaders[2].getValue().matches("JSESSIONID=.*; path=/servletContext")); } finally { client.getConnectionManager().shutdown(); } } }
apache-2.0
asolntsev/selenium
java/server/test/org/openqa/selenium/grid/node/local/CreateSessionTest.java
7428
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid.node.local; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.junit.Test; import org.openqa.selenium.Capabilities; import org.openqa.selenium.ImmutableCapabilities; import org.openqa.selenium.events.local.GuavaEventBus; import org.openqa.selenium.grid.data.CreateSessionRequest; import org.openqa.selenium.grid.data.CreateSessionResponse; import org.openqa.selenium.grid.data.Session; import org.openqa.selenium.grid.node.Node; import org.openqa.selenium.grid.testing.TestSessionFactory; import org.openqa.selenium.json.Json; import org.openqa.selenium.remote.ErrorCodes; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.tracing.DefaultTestTracer; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; import static org.openqa.selenium.json.Json.MAP_TYPE; import static org.openqa.selenium.remote.Dialect.OSS; import static org.openqa.selenium.remote.Dialect.W3C; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.POST; public class CreateSessionTest { private final Json json = new Json(); private final Capabilities stereotype = new ImmutableCapabilities("cheese", "brie"); @Test public void shouldAcceptAW3CPayload() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "capabilities", ImmutableMap.of( "alwaysMatch", ImmutableMap.of("cheese", "brie")))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(W3C), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // Ensure that there's no status field (as this is used by the protocol handshake to determine // whether the session is using the JWP or the W3C dialect. assertThat(all.containsKey("status")).isFalse(); // Now check the fields required by the spec Map<?, ?> value = (Map<?, ?>) all.get("value"); assertThat(value.get("sessionId")).isInstanceOf(String.class); assertThat(value.get("capabilities")).isInstanceOf(Map.class); } @Test public void shouldOnlyAcceptAJWPPayloadIfConfiguredTo() { // TODO: implement shouldOnlyAcceptAJWPPayloadIfConfiguredTo test } @Test public void ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured() { // TODO: implement ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured test } @Test public void ifOnlyJWPPayloadSentResponseShouldBeJWPOnlyIfJWPConfigured() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "desiredCapabilities", ImmutableMap.of("cheese", "brie"))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(OSS), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // The status field is used by local ends to determine whether or not the session is a JWP one. assertThat(all.get("status")).matches(obj -> ((Number) obj).intValue() == ErrorCodes.SUCCESS); // The session id is a top level field assertThat(all.get("sessionId")).isInstanceOf(String.class); // And the value should contain the capabilities. assertThat(all.get("value")).isInstanceOf(Map.class); } @Test public void shouldPreferUsingTheW3CProtocol() throws URISyntaxException { String payload = json.toJson(ImmutableMap.of( "desiredCapabilities", ImmutableMap.of( "cheese", "brie"), "capabilities", ImmutableMap.of( "alwaysMatch", ImmutableMap.of("cheese", "brie")))); HttpRequest request = new HttpRequest(POST, "/session"); request.setContent(utf8String(payload)); URI uri = new URI("http://example.com"); Node node = LocalNode.builder( DefaultTestTracer.createTracer(), new GuavaEventBus(), uri, uri, null) .add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps))) .build(); CreateSessionResponse sessionResponse = node.newSession( new CreateSessionRequest( ImmutableSet.of(W3C), stereotype, ImmutableMap.of())) .orElseThrow(() -> new AssertionError("Unable to create session")); Map<String, Object> all = json.toType( new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8), MAP_TYPE); // Ensure that there's no status field (as this is used by the protocol handshake to determine // whether the session is using the JWP or the W3C dialect. assertThat(all.containsKey("status")).isFalse(); // Now check the fields required by the spec Map<?, ?> value = (Map<?, ?>) all.get("value"); assertThat(value.get("sessionId")).isInstanceOf(String.class); assertThat(value.get("capabilities")).isInstanceOf(Map.class); } @Test public void sessionDataShouldBeCorrectRegardlessOfPayloadProtocol() { // TODO: implement sessionDataShouldBeCorrectRegardlessOfPayloadProtocol test } @Test public void shouldSupportProtocolConversion() { // TODO: implement shouldSupportProtocolConversion test } }
apache-2.0
FRC-Team5333/2015-RecycleRush
FRC2015/src/main/java/frc/team5333/lib/RobotData.java
455
package frc.team5333.lib; import java.util.HashMap; /** * A static class that contains all kinds of Launch data for the robot, * such as network ports, current state and more * * @author Jaci */ public class RobotData { /** * A blackboard containing objects that are common throughout the * program, along with their String Identifier */ public static HashMap<String, Object> blackboard = new HashMap<String, Object>(); }
apache-2.0
nicolaferraro/camel
components/camel-kubernetes/src/main/java/org/apache/camel/component/openshift/build_configs/OpenshiftBuildConfigsProducer.java
6392
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.openshift.build_configs; import java.util.Map; import io.fabric8.kubernetes.client.Watch; import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.openshift.api.model.Build; import io.fabric8.openshift.api.model.BuildConfig; import io.fabric8.openshift.api.model.BuildConfigList; import io.fabric8.openshift.api.model.DoneableBuildConfig; import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.openshift.client.dsl.BuildConfigResource; import org.apache.camel.Exchange; import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint; import org.apache.camel.component.kubernetes.KubernetesConstants; import org.apache.camel.component.kubernetes.KubernetesOperations; import org.apache.camel.support.DefaultProducer; import org.apache.camel.support.MessageHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OpenshiftBuildConfigsProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(OpenshiftBuildConfigsProducer.class); public OpenshiftBuildConfigsProducer(AbstractKubernetesEndpoint endpoint) { super(endpoint); } @Override public AbstractKubernetesEndpoint getEndpoint() { return (AbstractKubernetesEndpoint) super.getEndpoint(); } @Override public void process(Exchange exchange) throws Exception { String operation; if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration().getOperation())) { operation = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_OPERATION, String.class); } else { operation = getEndpoint().getKubernetesConfiguration().getOperation(); } switch (operation) { case KubernetesOperations.LIST_BUILD_CONFIGS: doList(exchange, operation); break; case KubernetesOperations.LIST_BUILD_CONFIGS_BY_LABELS_OPERATION: doListBuildConfigsByLabels(exchange, operation); break; case KubernetesOperations.GET_BUILD_CONFIG_OPERATION: doGetBuildConfig(exchange, operation); break; default: throw new IllegalArgumentException("Unsupported operation " + operation); } } protected void doList(Exchange exchange, String operation) throws Exception { BuildConfigList buildConfigsList = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace().list(); exchange.getOut().setBody(buildConfigsList.getItems()); } protected void doListBuildConfigsByLabels(Exchange exchange, String operation) throws Exception { BuildConfigList buildConfigsList = null; Map<String, String> labels = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIGS_LABELS, Map.class); String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (!ObjectHelper.isEmpty(namespaceName)) { NonNamespaceOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> buildConfigs; buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs() .inNamespace(namespaceName); for (Map.Entry<String, String> entry : labels.entrySet()) { buildConfigs.withLabel(entry.getKey(), entry.getValue()); } buildConfigsList = buildConfigs.list(); } else { FilterWatchListMultiDeletable<BuildConfig, BuildConfigList, Boolean, Watch> buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace(); for (Map.Entry<String, String> entry : labels.entrySet()) { buildConfigs.withLabel(entry.getKey(), entry.getValue()); } buildConfigsList = buildConfigs.list(); } MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true); exchange.getOut().setBody(buildConfigsList.getItems()); } protected void doGetBuildConfig(Exchange exchange, String operation) throws Exception { BuildConfig buildConfig = null; String buildConfigName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIG_NAME, String.class); String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (ObjectHelper.isEmpty(buildConfigName)) { LOG.error("Get a specific Build Config require specify a Build Config name"); throw new IllegalArgumentException("Get a specific Build Config require specify a Build Config name"); } if (ObjectHelper.isEmpty(namespaceName)) { LOG.error("Get a specific Build Config require specify a namespace name"); throw new IllegalArgumentException("Get a specific Build Config require specify a namespace name"); } buildConfig = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespaceName) .withName(buildConfigName).get(); MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true); exchange.getOut().setBody(buildConfig); } }
apache-2.0
harinigunabalan/PerformanceHat
cw-feedback-handler/src/main/java/CloudWave/CloudWaveJNI.java
3270
/******************************************************************************* * Copyright 2015 Software Evolution and Architecture Lab, University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package CloudWave; public enum CloudWaveJNI { instance; public static final String CLOUDWAVE_LIB = "cloudwavejni"; CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);} public static CloudWaveJNI getInstance(){return instance;} public void init() throws CloudWaveException{ int r = initJNI(); if (r<0) { System.err.println("initJNI returned " + r); throw new CloudWaveException(); } } public void free(){ freeJNI(); } protected IEventHandler eventHandler; public IEventHandler getEventHandler() { return eventHandler; } public void setEventHandler(IEventHandler eh) { synchronized(this){ eventHandler = eh;} } public void doEvent(String event){ synchronized(this) { if (eventHandler!=null) eventHandler.doEvent(event); } } protected synchronized static void callback(String event){ instance.doEvent(event); } //#: Init/Free public native int initJNI(); protected native int freeJNI(); //:# //#: Log protected native int initLog(); protected native int freeLog(); protected native int setLogId(String id); protected native String getLogId(); protected native int recordLog(int level, String message); protected native int recordLogL(int level, String message, long id); //:# //#: Metric protected native int initMetric(); protected native int freeMetric(); protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value); protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value); protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value); protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value); protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value); protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value); //:# //#: Events protected native int initEvent(); protected native int freeEvent(); protected native int postEvent(String event_json); protected native long subscribe(String event_id); protected native int unsubscribe(long id); //:# }
apache-2.0
wattale/carbon-identity
components/identity/org.wso2.carbon.identity.entitlement.filter/src/main/java/org/wso2/carbon/identity/entitlement/filter/callback/BasicAuthCallBackHandler.java
1973
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.entitlement.filter.callback; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException; import javax.servlet.http.HttpServletRequest; public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler { private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class); public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException { String authHeaderEn = null; if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) { authHeaderEn = request.getHeader("Authorization"); String tempArr[] = authHeaderEn.split(" "); if (tempArr.length == 2) { String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes())); tempArr = authHeaderDc.split(":"); if (tempArr.length == 2) { setUserName(tempArr[0]); } } throw new EntitlementFilterException("Unable to retrieve username from Authorization header"); } } }
apache-2.0
vakninr/spring-boot
spring-boot-project/spring-boot/src/test/java/org/springframework/boot/logging/DeferredLogTests.java
4690
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.logging; import org.apache.commons.logging.Log; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; /** * Tests for {@link DeferredLog}. * * @author Phillip Webb */ public class DeferredLogTests { private DeferredLog deferredLog = new DeferredLog(); private Object message = "Message"; private Throwable throwable = new IllegalStateException(); private Log log = mock(Log.class); @Test public void isTraceEnabled() throws Exception { assertThat(this.deferredLog.isTraceEnabled()).isTrue(); } @Test public void isDebugEnabled() throws Exception { assertThat(this.deferredLog.isDebugEnabled()).isTrue(); } @Test public void isInfoEnabled() throws Exception { assertThat(this.deferredLog.isInfoEnabled()).isTrue(); } @Test public void isWarnEnabled() throws Exception { assertThat(this.deferredLog.isWarnEnabled()).isTrue(); } @Test public void isErrorEnabled() throws Exception { assertThat(this.deferredLog.isErrorEnabled()).isTrue(); } @Test public void isFatalEnabled() throws Exception { assertThat(this.deferredLog.isFatalEnabled()).isTrue(); } @Test public void trace() throws Exception { this.deferredLog.trace(this.message); this.deferredLog.replayTo(this.log); verify(this.log).trace(this.message, null); } @Test public void traceWithThrowable() throws Exception { this.deferredLog.trace(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).trace(this.message, this.throwable); } @Test public void debug() throws Exception { this.deferredLog.debug(this.message); this.deferredLog.replayTo(this.log); verify(this.log).debug(this.message, null); } @Test public void debugWithThrowable() throws Exception { this.deferredLog.debug(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).debug(this.message, this.throwable); } @Test public void info() throws Exception { this.deferredLog.info(this.message); this.deferredLog.replayTo(this.log); verify(this.log).info(this.message, null); } @Test public void infoWithThrowable() throws Exception { this.deferredLog.info(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).info(this.message, this.throwable); } @Test public void warn() throws Exception { this.deferredLog.warn(this.message); this.deferredLog.replayTo(this.log); verify(this.log).warn(this.message, null); } @Test public void warnWithThrowable() throws Exception { this.deferredLog.warn(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).warn(this.message, this.throwable); } @Test public void error() throws Exception { this.deferredLog.error(this.message); this.deferredLog.replayTo(this.log); verify(this.log).error(this.message, null); } @Test public void errorWithThrowable() throws Exception { this.deferredLog.error(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).error(this.message, this.throwable); } @Test public void fatal() throws Exception { this.deferredLog.fatal(this.message); this.deferredLog.replayTo(this.log); verify(this.log).fatal(this.message, null); } @Test public void fatalWithThrowable() throws Exception { this.deferredLog.fatal(this.message, this.throwable); this.deferredLog.replayTo(this.log); verify(this.log).fatal(this.message, this.throwable); } @Test public void clearsOnReplayTo() throws Exception { this.deferredLog.info("1"); this.deferredLog.fatal("2"); Log log2 = mock(Log.class); this.deferredLog.replayTo(this.log); this.deferredLog.replayTo(log2); verify(this.log).info("1", null); verify(this.log).fatal("2", null); verifyNoMoreInteractions(this.log); verifyZeroInteractions(log2); } }
apache-2.0
mariofusco/droolsjbpm-integration
drools-pipeline/src/main/java/org/drools/runtime/pipeline/Transformer.java
836
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.runtime.pipeline; /** * * <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p> */ public interface Transformer extends Emitter, Receiver, Stage { }
apache-2.0
antelder/tool.lars
server/src/test/java/com/ibm/ws/lars/rest/TestUtils.java
2655
/******************************************************************************* * Copyright (c) 2015 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.ibm.ws.lars.rest; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import com.ibm.ws.lars.rest.model.Asset; import com.ibm.ws.lars.rest.model.AssetList; /** * */ public class TestUtils { /** * Reads the specified InputStream and returns a byte array containing all the bytes read. */ public static byte[] slurp(InputStream is) throws IOException { byte[] buffer = new byte[1024]; int length; ByteArrayOutputStream baos = new ByteArrayOutputStream(); while ((length = is.read(buffer)) != -1) { baos.write(buffer, 0, length); } return baos.toByteArray(); } /** * Assert that an AssetList contains exactly the given list of assets * <p> * This method assumes that all assets have an ID and there are no duplicates in the asset list. */ public static void assertAssetList(AssetList list, Asset... assets) { Map<String, Asset> assetIdMap = new HashMap<>(); for (Asset asset : assets) { if (assetIdMap.put(asset.get_id(), asset) != null) { throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson()); } } for (Asset asset : list) { if (assetIdMap.remove(asset.get_id()) == null) { throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson()); } } if (!assetIdMap.isEmpty()) { StringBuilder message = new StringBuilder("Assets missing from asset list:\n"); for (Asset asset : assetIdMap.values()) { message.append(asset.toJson()); message.append("\n"); } throw new AssertionError(message.toString()); } } }
apache-2.0
mdogan/hazelcast
hazelcast/src/test/java/com/hazelcast/topic/impl/reliable/SubscriptionMigrationTest.java
5092
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.topic.impl.reliable; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.partition.MigrationState; import com.hazelcast.partition.MigrationListener; import com.hazelcast.partition.ReplicaMigrationEvent; import com.hazelcast.ringbuffer.impl.RingbufferService; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.OverridePropertyRule; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import com.hazelcast.topic.ITopic; import com.hazelcast.topic.Message; import com.hazelcast.topic.MessageListener; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.atomic.AtomicInteger; import static org.junit.Assert.assertTrue; @Category({QuickTest.class, ParallelJVMTest.class}) @RunWith(HazelcastParallelClassRunner.class) public class SubscriptionMigrationTest extends HazelcastTestSupport { @Rule public OverridePropertyRule overridePropertyRule = OverridePropertyRule.set("hazelcast.partition.count", "2"); // gh issue: https://github.com/hazelcast/hazelcast/issues/13602 @Test public void testListenerReceivesMessagesAfterPartitionIsMigratedBack() { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance instance1 = factory.newHazelcastInstance(); final String rtNameOnPartition0 = generateReliableTopicNameForPartition(instance1, 0); final String rtNameOnPartition1 = generateReliableTopicNameForPartition(instance1, 1); ITopic<String> topic0 = instance1.getReliableTopic(rtNameOnPartition0); ITopic<String> topic1 = instance1.getReliableTopic(rtNameOnPartition1); final CountingMigrationListener migrationListener = new CountingMigrationListener(); instance1.getPartitionService().addMigrationListener(migrationListener); final PayloadMessageListener<String> listener0 = new PayloadMessageListener<String>(); final PayloadMessageListener<String> listener1 = new PayloadMessageListener<String>(); topic0.addMessageListener(listener0); topic1.addMessageListener(listener1); topic0.publish("itemA"); topic1.publish("item1"); HazelcastInstance instance2 = factory.newHazelcastInstance(); // 1 primary, 1 backup migration assertEqualsEventually(2, migrationListener.partitionMigrationCount); instance2.shutdown(); assertEqualsEventually(3, migrationListener.partitionMigrationCount); topic0.publish("itemB"); topic1.publish("item2"); assertTrueEventually(new AssertTask() { @Override public void run() { assertTrue(listener0.isReceived("itemA")); assertTrue(listener0.isReceived("itemB")); assertTrue(listener1.isReceived("item1")); assertTrue(listener1.isReceived("item2")); } }); } public class PayloadMessageListener<V> implements MessageListener<V> { private Collection<V> receivedMessages = new HashSet<V>(); @Override public void onMessage(Message<V> message) { receivedMessages.add(message.getMessageObject()); } boolean isReceived(V message) { return receivedMessages.contains(message); } } public class CountingMigrationListener implements MigrationListener { AtomicInteger partitionMigrationCount = new AtomicInteger(); @Override public void migrationStarted(MigrationState state) { } @Override public void migrationFinished(MigrationState state) { } @Override public void replicaMigrationCompleted(ReplicaMigrationEvent event) { partitionMigrationCount.incrementAndGet(); } @Override public void replicaMigrationFailed(ReplicaMigrationEvent event) { } } private String generateReliableTopicNameForPartition(HazelcastInstance instance, int partitionId) { return generateKeyForPartition(instance, RingbufferService.TOPIC_RB_PREFIX, partitionId); } }
apache-2.0
pdxrunner/geode
geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CountDurableCQEventsCommand.java
4255
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import java.util.List; import java.util.Set; import org.springframework.shell.core.annotation.CliCommand; import org.springframework.shell.core.annotation.CliOption; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.management.cli.CliMetaData; import org.apache.geode.management.cli.ConverterHint; import org.apache.geode.management.cli.Result; import org.apache.geode.management.internal.cli.CliUtil; import org.apache.geode.management.internal.cli.domain.SubscriptionQueueSizeResult; import org.apache.geode.management.internal.cli.functions.GetSubscriptionQueueSizeFunction; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.result.ResultBuilder; import org.apache.geode.management.internal.security.ResourceOperation; import org.apache.geode.security.ResourcePermission; public class CountDurableCQEventsCommand extends InternalGfshCommand { DurableClientCommandsResultBuilder builder = new DurableClientCommandsResultBuilder(); @CliCommand(value = CliStrings.COUNT_DURABLE_CQ_EVENTS, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__HELP) @CliMetaData() @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, operation = ResourcePermission.Operation.READ) public Result countDurableCqEvents( @CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID, mandatory = true, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID__HELP) final String durableClientId, @CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME__HELP) final String cqName, @CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__MEMBER__HELP, optionContext = ConverterHint.MEMBERIDNAME) final String[] memberNameOrId, @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__GROUP__HELP, optionContext = ConverterHint.MEMBERGROUP) final String[] group) { Result result; try { Set<DistributedMember> targetMembers = findMembers(group, memberNameOrId); if (targetMembers.isEmpty()) { return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE); } String[] params = new String[2]; params[0] = durableClientId; params[1] = cqName; final ResultCollector<?, ?> rc = CliUtil.executeFunction(new GetSubscriptionQueueSizeFunction(), params, targetMembers); final List<SubscriptionQueueSizeResult> funcResults = (List<SubscriptionQueueSizeResult>) rc.getResult(); String queueSizeColumnName; if (cqName != null && !cqName.isEmpty()) { queueSizeColumnName = CliStrings .format(CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, cqName); } else { queueSizeColumnName = CliStrings.format( CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, durableClientId); } result = builder.buildTableResultForQueueSize(funcResults, queueSizeColumnName); } catch (Exception e) { result = ResultBuilder.createGemFireErrorResult(e.getMessage()); } return result; } }
apache-2.0
Alex0889/oprunyak
chapter_001/src/test/java/ru/job4j/loop/package-info.java
110
/** * @author Oleksandr Prunyak (987456987p@gmail.com) * @version $Id$ * @since 0.1 */ package ru.job4j.loop;
apache-2.0
Rikkola/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/test/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/EmbeddedSubprocessPropertyWriterTest.java
1772
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties; import org.junit.Test; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomElement; import org.kie.workbench.common.stunner.bpmn.definition.property.general.SLADueDate; import static junit.framework.TestCase.assertTrue; import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.Factories.bpmn2; public class EmbeddedSubprocessPropertyWriterTest { private SubProcessPropertyWriter tested = new SubProcessPropertyWriter(bpmn2.createSubProcess(), new FlatVariableScope()); @Test public void testSetIsAsync() { tested.setAsync(Boolean.TRUE); assertTrue(CustomElement.async.of(tested.getFlowElement()).get()); } @Test public void testSetSlaDueDate() { String slaDueDate = "12/25/1983"; tested.setSlaDueDate(new SLADueDate(slaDueDate)); assertTrue(CustomElement.slaDueDate.of(tested.getFlowElement()).get().contains(slaDueDate)); } }
apache-2.0
Digaku/closure-template
java/src/com/google/template/soy/exprtree/OperatorNodes.java
7510
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.exprtree; /** * Container of nodes representing operators. * * <p> Important: Do not use outside of Soy code (treat as superpackage-private). * * @author Kai Huang */ public class OperatorNodes { private OperatorNodes() {} /** * Node representing the unary '-' (negative) operator. */ public static class NegativeOpNode extends AbstractOperatorNode { public NegativeOpNode() { super(Operator.NEGATIVE); } protected NegativeOpNode(NegativeOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; } @Override public NegativeOpNode clone() { return new NegativeOpNode(this); } } /** * Node representing the 'not' operator. */ public static class NotOpNode extends AbstractOperatorNode { public NotOpNode() { super(Operator.NOT); } protected NotOpNode(NotOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NOT_OP_NODE; } @Override public NotOpNode clone() { return new NotOpNode(this); } } /** * Node representing the '*' (times) operator. */ public static class TimesOpNode extends AbstractOperatorNode { public TimesOpNode() { super(Operator.TIMES); } protected TimesOpNode(TimesOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.TIMES_OP_NODE; } @Override public TimesOpNode clone() { return new TimesOpNode(this); } } /** * Node representing the '/' (divde by) operator. */ public static class DivideByOpNode extends AbstractOperatorNode { public DivideByOpNode() { super(Operator.DIVIDE_BY); } protected DivideByOpNode(DivideByOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; } @Override public DivideByOpNode clone() { return new DivideByOpNode(this); } } /** * Node representing the '%' (mod) operator. */ public static class ModOpNode extends AbstractOperatorNode { public ModOpNode() { super(Operator.MOD); } protected ModOpNode(ModOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.MOD_OP_NODE; } @Override public ModOpNode clone() { return new ModOpNode(this); } } /** * Node representing the '+' (plus) operator. */ public static class PlusOpNode extends AbstractOperatorNode { public PlusOpNode() { super(Operator.PLUS); } protected PlusOpNode(PlusOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.PLUS_OP_NODE; } @Override public PlusOpNode clone() { return new PlusOpNode(this); } } /** * Node representing the binary '-' (minus) operator. */ public static class MinusOpNode extends AbstractOperatorNode { public MinusOpNode() { super(Operator.MINUS); } protected MinusOpNode(MinusOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.MINUS_OP_NODE; } @Override public MinusOpNode clone() { return new MinusOpNode(this); } } /** * Node representing the '&lt;' (less than) operator. */ public static class LessThanOpNode extends AbstractOperatorNode { public LessThanOpNode() { super(Operator.LESS_THAN); } protected LessThanOpNode(LessThanOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; } @Override public LessThanOpNode clone() { return new LessThanOpNode(this); } } /** * Node representing the '&gt;' (greater than) operator. */ public static class GreaterThanOpNode extends AbstractOperatorNode { public GreaterThanOpNode() { super(Operator.GREATER_THAN); } protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; } @Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); } } /** * Node representing the '&lt;=' (less than or equal) operator. */ public static class LessThanOrEqualOpNode extends AbstractOperatorNode { public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); } protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; } @Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); } } /** * Node representing the '&gt;=' (greater than or equal) operator. */ public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode { public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); } protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; } @Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); } } /** * Node representing the '==' (equal) operator. */ public static class EqualOpNode extends AbstractOperatorNode { public EqualOpNode() { super(Operator.EQUAL); } protected EqualOpNode(EqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.EQUAL_OP_NODE; } @Override public EqualOpNode clone() { return new EqualOpNode(this); } } /** * Node representing the '!=' (not equal) operator. */ public static class NotEqualOpNode extends AbstractOperatorNode { public NotEqualOpNode() { super(Operator.NOT_EQUAL); } protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; } @Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); } } /** * Node representing the 'and' operator. */ public static class AndOpNode extends AbstractOperatorNode { public AndOpNode() { super(Operator.AND); } protected AndOpNode(AndOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.AND_OP_NODE; } @Override public AndOpNode clone() { return new AndOpNode(this); } } /** * Node representing the 'or' operator. */ public static class OrOpNode extends AbstractOperatorNode { public OrOpNode() { super(Operator.OR); } protected OrOpNode(OrOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.OR_OP_NODE; } @Override public OrOpNode clone() { return new OrOpNode(this); } } /** * Node representing the ternary '? :' (conditional) operator. */ public static class ConditionalOpNode extends AbstractOperatorNode { public ConditionalOpNode() { super(Operator.CONDITIONAL); } protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; } @Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); } } }
apache-2.0
gocd/gocd
common/src/main/java/com/thoughtworks/go/remote/work/artifact/ArtifactRequestProcessor.java
5906
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.remote.work.artifact; import com.thoughtworks.go.plugin.access.artifact.ArtifactExtensionConstants; import com.thoughtworks.go.plugin.api.request.GoApiRequest; import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse; import com.thoughtworks.go.plugin.api.response.GoApiResponse; import com.thoughtworks.go.plugin.infra.GoPluginApiRequestProcessor; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import com.thoughtworks.go.remote.work.artifact.ConsoleLogMessage.LogLevel; import com.thoughtworks.go.util.command.*; import com.thoughtworks.go.work.GoPublisher; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import static java.lang.String.format; public class ArtifactRequestProcessor implements GoPluginApiRequestProcessor { private static final List<String> goSupportedVersions = ArtifactExtensionConstants.SUPPORTED_VERSIONS; private final SafeOutputStreamConsumer safeOutputStreamConsumer; private final ProcessType processType; private enum ProcessType { FETCH, PUBLISH } private static final Map<LogLevel, String> FETCH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{ put(LogLevel.INFO, TaggedStreamConsumer.OUT); put(LogLevel.ERROR, TaggedStreamConsumer.ERR); }}; private static final Map<LogLevel, String> PUBLISH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{ put(LogLevel.INFO, TaggedStreamConsumer.PUBLISH); put(LogLevel.ERROR, TaggedStreamConsumer.PUBLISH_ERR); }}; private ArtifactRequestProcessor(GoPublisher publisher, ProcessType processType, EnvironmentVariableContext environmentVariableContext) { CompositeConsumer errorStreamConsumer = new CompositeConsumer(CompositeConsumer.ERR, publisher); CompositeConsumer outputStreamConsumer = new CompositeConsumer(CompositeConsumer.OUT, publisher); this.safeOutputStreamConsumer = new SafeOutputStreamConsumer(new ProcessOutputStreamConsumer(errorStreamConsumer, outputStreamConsumer)); safeOutputStreamConsumer.addSecrets(environmentVariableContext.secrets()); this.processType = processType; } public static ArtifactRequestProcessor forFetchArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) { return new ArtifactRequestProcessor(goPublisher, ProcessType.FETCH, environmentVariableContext); } public static ArtifactRequestProcessor forPublishArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) { return new ArtifactRequestProcessor(goPublisher, ProcessType.PUBLISH, environmentVariableContext); } @Override public GoApiResponse process(GoPluginDescriptor pluginDescriptor, GoApiRequest request) { validatePluginRequest(request); switch (Request.fromString(request.api())) { case CONSOLE_LOG: return processConsoleLogRequest(pluginDescriptor, request); default: return DefaultGoApiResponse.error("Illegal api request"); } } private GoApiResponse processConsoleLogRequest(GoPluginDescriptor pluginDescriptor, GoApiRequest request) { final ConsoleLogMessage consoleLogMessage = ConsoleLogMessage.fromJSON(request.requestBody()); final String message = format("[%s] %s", pluginDescriptor.id(), consoleLogMessage.getMessage()); Optional<String> parsedTag = parseTag(processType, consoleLogMessage.getLogLevel()); if (parsedTag.isPresent()) { safeOutputStreamConsumer.taggedStdOutput(parsedTag.get(), message); return DefaultGoApiResponse.success(null); } return DefaultGoApiResponse.error(format("Unsupported log level `%s`.", consoleLogMessage.getLogLevel())); } private Optional<String> parseTag(ProcessType requestType, LogLevel logLevel) { switch (requestType) { case FETCH: return Optional.ofNullable(FETCH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel)); case PUBLISH: return Optional.ofNullable(PUBLISH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel)); } return Optional.empty(); } private void validatePluginRequest(GoApiRequest goPluginApiRequest) { if (!goSupportedVersions.contains(goPluginApiRequest.apiVersion())) { throw new RuntimeException(format("Unsupported '%s' API version: %s. Supported versions: %s", goPluginApiRequest.api(), goPluginApiRequest.apiVersion(), goSupportedVersions)); } } public enum Request { CONSOLE_LOG("go.processor.artifact.console-log"); private final String requestName; Request(String requestName) { this.requestName = requestName; } public static Request fromString(String requestName) { if (requestName != null) { for (Request request : Request.values()) { if (requestName.equalsIgnoreCase(request.requestName)) { return request; } } } return null; } public String requestName() { return requestName; } } }
apache-2.0
javasoze/sensei
sensei-core/src/main/java/com/senseidb/search/node/impl/AbstractJsonQueryBuilderFactory.java
774
package com.senseidb.search.node.impl; import org.json.JSONObject; import com.senseidb.search.node.SenseiQueryBuilder; import com.senseidb.search.node.SenseiQueryBuilderFactory; import com.senseidb.search.req.SenseiQuery; import com.senseidb.util.JSONUtil.FastJSONObject; public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory { @Override public SenseiQueryBuilder getQueryBuilder(SenseiQuery query) throws Exception { JSONObject jsonQuery = null; if (query != null) { byte[] bytes = query.toBytes(); jsonQuery = new FastJSONObject(new String(bytes, SenseiQuery.utf8Charset)); } return buildQueryBuilder(jsonQuery); } public abstract SenseiQueryBuilder buildQueryBuilder(JSONObject jsonQuery); }
apache-2.0
visallo/vertexium
core/src/main/java/org/vertexium/util/VerticesToEdgeIdsIterable.java
855
package org.vertexium.util; import org.vertexium.Authorizations; import org.vertexium.Direction; import org.vertexium.Vertex; import java.util.Iterator; public class VerticesToEdgeIdsIterable implements Iterable<String> { private final Iterable<? extends Vertex> vertices; private final Authorizations authorizations; public VerticesToEdgeIdsIterable(Iterable<? extends Vertex> vertices, Authorizations authorizations) { this.vertices = vertices; this.authorizations = authorizations; } @Override public Iterator<String> iterator() { return new SelectManyIterable<Vertex, String>(this.vertices) { @Override public Iterable<String> getIterable(Vertex vertex) { return vertex.getEdgeIds(Direction.BOTH, authorizations); } }.iterator(); } }
apache-2.0
BigAppOS/BigApp_Discuz_Android
libs/ZUtils/src/com/kit/db/Obj.java
43
package com.kit.db; public class Obj { }
apache-2.0
apache/incubator-systemml
src/test/java/org/apache/sysds/test/functions/recompile/FunctionRecompileTest.java
4614
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.test.functions.recompile; import java.util.HashMap; import org.junit.Assert; import org.junit.Test; import org.apache.sysds.conf.CompilerConfig; import org.apache.sysds.hops.OptimizerUtils; import org.apache.sysds.runtime.matrix.data.MatrixValue.CellIndex; import org.apache.sysds.test.AutomatedTestBase; import org.apache.sysds.test.TestConfiguration; import org.apache.sysds.test.TestUtils; import org.apache.sysds.utils.Statistics; public class FunctionRecompileTest extends AutomatedTestBase { private final static String TEST_NAME1 = "funct_recompile"; private final static String TEST_DIR = "functions/recompile/"; private final static String TEST_CLASS_DIR = TEST_DIR + FunctionRecompileTest.class.getSimpleName() + "/"; private final static double eps = 1e-10; private final static int rows = 20; private final static int cols = 10; private final static double sparsity = 1.0; @Override public void setUp() { TestUtils.clearAssertionInformation(); addTestConfiguration(TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "Rout" }) ); } @Test public void testFunctionWithoutRecompileWithoutIPA() { runFunctionTest(false, false); } @Test public void testFunctionWithoutRecompileWithIPA() { runFunctionTest(false, true); } @Test public void testFunctionWithRecompileWithoutIPA() { runFunctionTest(true, false); } @Test public void testFunctionWithRecompileWithIPA() { runFunctionTest(true, true); } private void runFunctionTest( boolean recompile, boolean IPA ) { boolean oldFlagRecompile = CompilerConfig.FLAG_DYN_RECOMPILE; boolean oldFlagIPA = OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS; try { TestConfiguration config = getTestConfiguration(TEST_NAME1); config.addVariable("rows", rows); config.addVariable("cols", cols); loadTestConfiguration(config); String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME1 + ".dml"; programArgs = new String[]{"-args", input("V"), Integer.toString(rows), Integer.toString(cols), output("R") }; fullRScriptName = HOME + TEST_NAME1 + ".R"; rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir(); long seed = System.nanoTime(); double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, seed); writeInputMatrix("V", V, true); CompilerConfig.FLAG_DYN_RECOMPILE = recompile; OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = IPA; boolean exceptionExpected = false; runTest(true, exceptionExpected, null, -1); runRScript(true); //note: change from previous version due to fix in op selection (unknown size XtX and mapmult) //CHECK compiled MR jobs int expectNumCompiled = -1; if( IPA ) expectNumCompiled = 1; //reblock else expectNumCompiled = 5; //reblock, GMR,GMR,GMR,GMR (last two should piggybacked) Assert.assertEquals("Unexpected number of compiled MR jobs.", expectNumCompiled, Statistics.getNoOfCompiledSPInst()); //CHECK executed MR jobs int expectNumExecuted = -1; if( recompile ) expectNumExecuted = 0; else if( IPA ) expectNumExecuted = 1; //reblock else expectNumExecuted = 41; //reblock, 10*(GMR,GMR,GMR, GMR) (last two should piggybacked) Assert.assertEquals("Unexpected number of executed MR jobs.", expectNumExecuted, Statistics.getNoOfExecutedSPInst()); //compare matrices HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromOutputDir("R"); HashMap<CellIndex, Double> rfile = readRMatrixFromExpectedDir("Rout"); TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R"); } finally { CompilerConfig.FLAG_DYN_RECOMPILE = oldFlagRecompile; OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = oldFlagIPA; } } }
apache-2.0
hwstreaming/flink
flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/JobSubmitTest.java
7730
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobmanager; import akka.actor.ActorSystem; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.akka.ListeningBehaviour; import org.apache.flink.runtime.blob.BlobClient; import org.apache.flink.runtime.blob.BlobKey; import org.apache.flink.runtime.client.JobExecutionException; import org.apache.flink.runtime.instance.ActorGateway; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings; import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings; import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService; import org.apache.flink.runtime.messages.JobManagerMessages; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.testtasks.NoOpInvokable; import org.apache.flink.runtime.util.LeaderRetrievalUtils; import org.apache.flink.util.NetUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import scala.Tuple2; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests that the JobManager handles Jobs correctly that fail in * the initialization during the submit phase. */ public class JobSubmitTest { private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS); private static ActorSystem jobManagerSystem; private static ActorGateway jmGateway; private static Configuration jmConfig; @BeforeClass public static void setupJobManager() { jmConfig = new Configuration(); int port = NetUtils.getAvailablePort(); jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost"); jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port); scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port)); jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress); // only start JobManager (no ResourceManager) JobManager.startJobManagerActors( jmConfig, jobManagerSystem, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), JobManager.class, MemoryArchivist.class)._1(); try { LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig); jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway( lrs, jobManagerSystem, timeout ); } catch (Exception e) { fail("Could not retrieve the JobManager gateway. " + e.getMessage()); } } @AfterClass public static void teardownJobmanager() { if (jobManagerSystem != null) { jobManagerSystem.shutdown(); } } @Test public void testFailureWhenJarBlobsMissing() { try { // create a simple job graph JobVertex jobVertex = new JobVertex("Test Vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); JobGraph jg = new JobGraph("test job", jobVertex); // request the blob port from the job manager Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout); int blobPort = (Integer) Await.result(future, timeout); // upload two dummy bytes and add their keys to the job graph as dependencies BlobKey key1, key2; BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig); try { key1 = bc.put(new byte[10]); key2 = bc.put(new byte[10]); // delete one of the blobs to make sure that the startup failed bc.delete(key2); } finally { bc.close(); } jg.addBlob(key1); jg.addBlob(key2); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob( jg, ListeningBehaviour.EXECUTION_RESULT), timeout); try { Await.result(submitFuture, timeout); } catch (JobExecutionException e) { // that is what we expect assertTrue(e.getCause() instanceof IOException); } catch (Exception e) { fail("Wrong exception type"); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Verifies a correct error message when vertices with master initialization * (input formats / output formats) fail. */ @Test public void testFailureWhenInitializeOnMasterFails() { try { // create a simple job graph JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") { private static final long serialVersionUID = -3540303593784587652L; @Override public void initializeOnMaster(ClassLoader loader) throws Exception { throw new RuntimeException("test exception"); } }; jobVertex.setInvokableClass(NoOpInvokable.class); JobGraph jg = new JobGraph("test job", jobVertex); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob( jg, ListeningBehaviour.EXECUTION_RESULT), timeout); try { Await.result(submitFuture, timeout); } catch (JobExecutionException e) { // that is what we expect // test that the exception nesting is not too deep assertTrue(e.getCause() instanceof RuntimeException); } catch (Exception e) { fail("Wrong exception type"); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testAnswerFailureWhenSavepointReadFails() throws Exception { // create a simple job graph JobGraph jg = createSimpleJobGraph(); jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist...")); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout); Object result = Await.result(submitFuture, timeout); assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass()); } private JobGraph createSimpleJobGraph() { JobVertex jobVertex = new JobVertex("Vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID()); JobGraph jg = new JobGraph("test job", jobVertex); jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList, 5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true)); return jg; } }
apache-2.0
ngs-mtech/drools
drools-core/src/main/java/org/drools/core/base/accumulators/CountAccumulateFunction.java
2915
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.base.accumulators; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; /** * An implementation of an accumulator capable of counting occurences */ public class CountAccumulateFunction extends AbstractAccumulateFunction<CountAccumulateFunction.CountData> { public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } protected static class CountData implements Externalizable { public long count = 0; public CountData() {} public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { count = in.readLong(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeLong(count); } } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#createContext() */ public CountData createContext() { return new CountData(); } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#init(java.lang.Object) */ public void init(CountData data) { data.count = 0; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#accumulate(java.lang.Object, java.lang.Object) */ public void accumulate(CountData data, Object value) { data.count++; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#reverse(java.lang.Object, java.lang.Object) */ public void reverse(CountData data, Object value) { data.count--; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#getResult(java.lang.Object) */ public Object getResult(CountData data) { return new Long( data.count ); } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#supportsReverse() */ public boolean supportsReverse() { return true; } /** * {@inheritDoc} */ public Class< ? > getResultType() { return Long.class; } }
apache-2.0
ptkool/presto
presto-main/src/test/java/com/facebook/presto/type/TestIpPrefixOperators.java
13240
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.google.common.net.InetAddresses; import io.airlift.slice.Slices; import org.testng.annotations.Test; import static com.facebook.presto.spi.function.OperatorType.HASH_CODE; import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.type.IpAddressType.IPADDRESS; import static com.facebook.presto.type.IpPrefixType.IPPREFIX; import static java.lang.System.arraycopy; public class TestIpPrefixOperators extends AbstractTestFunctions { @Test public void testVarcharToIpPrefixCast() { assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24"); assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24"); assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0"); assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1"); assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2"); assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4"); assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8"); assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16"); assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24"); assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25"); assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26"); assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28"); assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30"); assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32"); assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128"); assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32"); assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128"); assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32"); assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32"); assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128"); assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128"); assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32"); assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1"); assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128"); } @Test public void testIpPrefixToVarcharCast() { assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128"); assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128"); assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128"); assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128"); } @Test public void testIpPrefixToIpAddressCast() { assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4"); assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0"); assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::"); } @Test public void testIpAddressToIpPrefixCast() { assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32"); assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32"); assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128"); assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); } @Test public void testEquals() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true); assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true); assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false); assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false); } @Test public void testDistinctFrom() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false); assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true); } @Test public void testNotEquals() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false); } @Test public void testOrderOperators() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false); assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false); assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true); assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true); assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false); assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true); assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false); } @Test public void testIndeterminate() { assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true); assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false); } @Test public void testHash() { assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null); assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128")); } private static long hashFromType(String address) { BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1); String[] parts = address.split("/"); byte[] bytes = new byte[IPPREFIX.getFixedSize()]; byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress(); arraycopy(addressBytes, 0, bytes, 0, 16); bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]); IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes)); Block block = blockBuilder.build(); return IPPREFIX.hash(block, 0); } }
apache-2.0
plasma-framework/plasma
plasma-provisioning/src/main/java/org/plasma/provisioning/rdb/mysql/v5_5/query/QTableColumnConstraint.java
3301
package org.plasma.provisioning.rdb.mysql.v5_5.query; import org.plasma.provisioning.rdb.mysql.v5_5.TableColumnConstraint; import org.plasma.query.DataProperty; import org.plasma.query.Expression; import org.plasma.query.dsl.DataNode; import org.plasma.query.dsl.DomainRoot; import org.plasma.query.dsl.PathNode; import org.plasma.sdo.helper.PlasmaTypeHelper; /** * Generated Domain Specific Language (DSL) implementation class representing * the domain model entity <b>TableColumnConstraint</b>. * * <p> * </p> * <b>Data Store Mapping:</b> Corresponds to the physical data store entity * <b>REFERENTIAL_CONSTRAINTS</b>. * */ public class QTableColumnConstraint extends DomainRoot { private QTableColumnConstraint() { super(PlasmaTypeHelper.INSTANCE.getType(TableColumnConstraint.class)); } /** * Constructor which instantiates a domain query path node. A path may span * multiple namespaces and therefore Java inplementation packages based on the * <a href= * "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html" * >Condiguration</a>. Note: while this constructor is public, it is not for * application use! * * @param source * the source path node * @param sourceProperty * the source property logical name */ public QTableColumnConstraint(PathNode source, String sourceProperty) { super(source, sourceProperty); } /** * Constructor which instantiates a domain query path node. A path may span * multiple namespaces and therefore Java inplementation packages based on the * <a href= * "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html" * >Condiguration</a>. Note: while this constructor is public, it is not for * application use! * * @param source * the source path node * @param sourceProperty * the source property logical name * @param expr * the path predicate expression */ public QTableColumnConstraint(PathNode source, String sourceProperty, Expression expr) { super(source, sourceProperty, expr); } /** * Returns a new DSL query for <a * href="http://docs.plasma-sdo.org/api/org/plasma/sdo/PlasmaType.html" * >Type</a> <b>TableColumnConstraint</b> which can be used either as a query * root or as the start (entry point) for a new path predicate expression. * * @return a new DSL query */ public static QTableColumnConstraint newQuery() { return new QTableColumnConstraint(); } /** * Returns a DSL data element for property, <b>name</b>. * * @return a DSL data element for property, <b>name</b>. */ public DataProperty name() { return new DataNode(this, TableColumnConstraint.PROPERTY.name.name()); } /** * Returns a DSL data element for property, <b>owner</b>. * * @return a DSL data element for property, <b>owner</b>. */ public DataProperty owner() { return new DataNode(this, TableColumnConstraint.PROPERTY.owner.name()); } /** * Returns a DSL query element for reference property, <b>table</b>. * * @return a DSL query element for reference property, <b>table</b>. */ public QTable table() { return new QTable(this, TableColumnConstraint.PROPERTY.table.name()); } }
apache-2.0
adufilie/flex-falcon
compiler/src/org/apache/flex/compiler/problems/EmbedMultipleMetaTagsProblem.java
1453
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.problems; import org.apache.flex.compiler.common.ISourceLocation; /** * This problem gets created when a variable has two Embed meta data tags * associated with it. */ public final class EmbedMultipleMetaTagsProblem extends CompilerProblem { public static final String DESCRIPTION = "A variable can only only have one [${EMBED}] metadata tag"; public static final int errorCode = 1344; public EmbedMultipleMetaTagsProblem(ISourceLocation site) { super(site); } // Prevent these from being localized. public final String EMBED = "Embed"; }
apache-2.0
Skarlso/gocd
server/src/main/java/com/thoughtworks/go/server/materials/SCMMaterialSource.java
6425
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.materials; import com.thoughtworks.go.config.CruiseConfig; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.remote.ConfigRepoConfig; import com.thoughtworks.go.domain.materials.Material; import com.thoughtworks.go.domain.packagerepository.PackageDefinition; import com.thoughtworks.go.domain.packagerepository.PackageRepository; import com.thoughtworks.go.domain.scm.SCM; import com.thoughtworks.go.listener.ConfigChangedListener; import com.thoughtworks.go.listener.EntityConfigChangedListener; import com.thoughtworks.go.server.service.GoConfigService; import com.thoughtworks.go.server.service.MaterialConfigConverter; import com.thoughtworks.go.util.SystemEnvironment; import org.slf4j.Logger; import org.joda.time.DateTimeUtils; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Provides a list of unique SCMMaterials to be updated which will be consumed by MaterialUpdateService */ @Component public class SCMMaterialSource extends EntityConfigChangedListener<ConfigRepoConfig> implements ConfigChangedListener, MaterialSource, MaterialUpdateCompleteListener { private static final Logger LOGGER = LoggerFactory.getLogger(SCMMaterialSource.class); private final GoConfigService goConfigService; private ConcurrentMap<Material, Long> materialLastUpdateTimeMap = new ConcurrentHashMap<>(); private final MaterialConfigConverter materialConfigConverter; private final MaterialUpdateService materialUpdateService; private final long materialUpdateInterval; private Set<Material> schedulableMaterials; @Autowired public SCMMaterialSource(GoConfigService goConfigService, SystemEnvironment systemEnvironment, MaterialConfigConverter materialConfigConverter, MaterialUpdateService materialUpdateService) { this.goConfigService = goConfigService; this.materialConfigConverter = materialConfigConverter; this.materialUpdateService = materialUpdateService; this.materialUpdateInterval = systemEnvironment.getMaterialUpdateIdleInterval(); } public void initialize() { goConfigService.register(this); goConfigService.register(new InternalConfigChangeListener() { @Override public void onEntityConfigChange(Object entity) { updateSchedulableMaterials(true); } }); materialUpdateService.registerMaterialSources(this); materialUpdateService.registerMaterialUpdateCompleteListener(this); } @Override public Set<Material> materialsForUpdate() { updateSchedulableMaterials(false); return materialsWithUpdateIntervalElapsed(); } @Override public void onMaterialUpdate(Material material) { if (!(material instanceof DependencyMaterial)) { updateLastUpdateTimeForScmMaterial(material); } } @Override public void onConfigChange(CruiseConfig newCruiseConfig) { updateSchedulableMaterials(true); } @Override public void onEntityConfigChange(ConfigRepoConfig entity) { updateSchedulableMaterials(true); } protected EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener() { final SCMMaterialSource self = this; return new EntityConfigChangedListener<PipelineConfig>() { @Override public void onEntityConfigChange(PipelineConfig pipelineConfig) { self.onConfigChange(null); } }; } private Set<Material> materialsWithUpdateIntervalElapsed() { Set<Material> materialsForUpdate = new HashSet<>(); for (Material material : schedulableMaterials) { if (hasUpdateIntervalElapsedForScmMaterial(material)) { materialsForUpdate.add(material); } } return materialsForUpdate; } boolean hasUpdateIntervalElapsedForScmMaterial(Material material) { Long lastMaterialUpdateTime = materialLastUpdateTimeMap.get(material); if (lastMaterialUpdateTime != null) { boolean shouldUpdateMaterial = (DateTimeUtils.currentTimeMillis() - lastMaterialUpdateTime) >= materialUpdateInterval; if (LOGGER.isDebugEnabled() && !shouldUpdateMaterial) { LOGGER.debug("[Material Update] Skipping update of material {} which has been last updated at {}", material, new Date(lastMaterialUpdateTime)); } return shouldUpdateMaterial; } return true; } private void updateLastUpdateTimeForScmMaterial(Material material) { materialLastUpdateTimeMap.put(material, DateTimeUtils.currentTimeMillis()); } private void updateSchedulableMaterials(boolean forceLoad) { if (forceLoad || schedulableMaterials == null) { schedulableMaterials = materialConfigConverter.toMaterials(goConfigService.getSchedulableSCMMaterials()); } } private abstract class InternalConfigChangeListener extends EntityConfigChangedListener<Object> { private final List<Class<?>> securityConfigClasses = Arrays.asList( PipelineConfig.class, PackageDefinition.class, PackageRepository.class, SCM.class ); @Override public boolean shouldCareAbout(Object entity) { return securityConfigClasses.stream().anyMatch(aClass -> aClass.isAssignableFrom(entity.getClass())); } } }
apache-2.0
ge0ffrey/optaplanner
optaplanner-core/src/main/java/org/optaplanner/core/impl/score/stream/drools/quad/DroolsQuadToTriGroupByCollectorProcessor.java
3217
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.score.stream.drools.quad; import java.util.function.Function; import java.util.function.Supplier; import org.optaplanner.core.api.function.PentaFunction; import org.optaplanner.core.api.function.QuadFunction; import org.optaplanner.core.api.score.stream.quad.QuadConstraintCollector; import org.optaplanner.core.impl.score.stream.drools.common.BiTuple; import org.optaplanner.core.impl.score.stream.drools.common.DroolsAbstractUniCollectingGroupByCollectorProcessor; import org.optaplanner.core.impl.score.stream.drools.common.QuadTuple; import org.optaplanner.core.impl.score.stream.drools.common.TriTuple; final class DroolsQuadToTriGroupByCollectorProcessor<A, B, C, D, ResultContainer, NewA, NewB, NewC> extends DroolsAbstractUniCollectingGroupByCollectorProcessor<ResultContainer, QuadTuple<A, B, C, D>, BiTuple<NewA, NewB>, TriTuple<NewA, NewB, NewC>> { private final QuadFunction<A, B, C, D, NewA> groupKeyAMapping; private final QuadFunction<A, B, C, D, NewB> groupKeyBMapping; private final Supplier<ResultContainer> supplier; private final PentaFunction<ResultContainer, A, B, C, D, Runnable> accumulator; private final Function<ResultContainer, NewC> finisher; public DroolsQuadToTriGroupByCollectorProcessor(QuadFunction<A, B, C, D, NewA> groupKeyAMapping, QuadFunction<A, B, C, D, NewB> groupKeyBMapping, QuadConstraintCollector<A, B, C, D, ResultContainer, NewC> collector) { this.groupKeyAMapping = groupKeyAMapping; this.groupKeyBMapping = groupKeyBMapping; this.supplier = collector.supplier(); this.accumulator = collector.accumulator(); this.finisher = collector.finisher(); } @Override protected BiTuple<NewA, NewB> toKey(QuadTuple<A, B, C, D> abcdQuadTuple) { return new BiTuple<>(groupKeyAMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d), groupKeyBMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d)); } @Override protected ResultContainer newContainer() { return supplier.get(); } @Override protected Runnable process(QuadTuple<A, B, C, D> abcdQuadTuple, ResultContainer container) { return accumulator.apply(container, abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d); } @Override protected TriTuple<NewA, NewB, NewC> toResult(BiTuple<NewA, NewB> key, ResultContainer container) { return new TriTuple<>(key.a, key.b, finisher.apply(container)); } }
apache-2.0
rythmengine/rythmengine
src/main/java/org/rythmengine/cache/EhCacheServiceFactory.java
1271
/** * Copyright (C) 2013-2016 The Rythm Engine project * for LICENSE and other details see: * https://github.com/rythmengine/rythmengine */ package org.rythmengine.cache; /*- * #%L * Rythm Template Engine * %% * Copyright (C) 2017 - 2021 OSGL (Open Source General Library) * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.rythmengine.extension.ICacheService; import org.rythmengine.extension.ICacheServiceFactory; /** * Created with IntelliJ IDEA. * User: luog * Date: 2/12/13 * Time: 8:45 AM * To change this template use File | Settings | File Templates. */ class EhCacheServiceFactory implements ICacheServiceFactory { @Override public ICacheService get() { return EhCacheService.INSTANCE; } }
apache-2.0
openengsb-attic/openengsb-api
src/main/java/org/openengsb/core/api/security/OpenEngSBSecurityException.java
1499
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.core.api.security; /** * Serves as baseclass for all Security-related Exceptions in the OpenEngSB (similar to * {@link java.security.GeneralSecurityException} * */ public abstract class OpenEngSBSecurityException extends Exception { private static final long serialVersionUID = -2939758040088724227L; public OpenEngSBSecurityException() { } public OpenEngSBSecurityException(String message, Throwable cause) { super(message, cause); } public OpenEngSBSecurityException(String message) { super(message); } public OpenEngSBSecurityException(Throwable cause) { super(cause); } }
apache-2.0
andidev/spring-bootstrap-enterprise
src/main/java/org/andidev/applicationname/format/custom/CustomPrinter.java
1468
package org.andidev.applicationname.format.custom; import java.util.Locale; import org.andidev.applicationname.format.annotation.CustomFormat; import org.apache.commons.lang3.StringUtils; import org.springframework.expression.EvaluationContext; import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.SpelParseException; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.format.Printer; public class CustomPrinter implements Printer<Object> { private final String spelExpression; private final EvaluationContext evaluationContext; public CustomPrinter(String spelExpression, EvaluationContext evaluationContext) { this.spelExpression = StringUtils.defaultIfBlank(spelExpression, null); this.evaluationContext = evaluationContext; } @Override public String print(Object object, Locale locale) { if (spelExpression == null) { return null; } ExpressionParser parser = new SpelExpressionParser(); try { Object result = parser.parseExpression(spelExpression).getValue(evaluationContext, object); return result.toString(); } catch (SpelParseException e) { throw new CustomFormatException("Could not parse spel expression = \"" + spelExpression + "\" in " + CustomFormat.class.getSimpleName() + " annotation: " + e.getMessage()); } } }
apache-2.0
tabladrum/Hygieia
collectors/test-results/jira-xray/src/main/java/com/capitalone/dashboard/model/TestResultCollector.java
1094
package com.capitalone.dashboard.model; import com.capitalone.dashboard.util.FeatureCollectorConstants; import org.springframework.stereotype.Component; /** * Collector implementation for Feature that stores system configuration * settings required for source system data connection (e.g., API tokens, etc.) */ @Component public class TestResultCollector extends Collector { /** * Creates a static prototype of the Feature Collector, which includes any * specific settings or configuration required for the use of this * collector, including settings for connecting to any source systems. * * @return A configured TestResult Collector prototype */ public static TestResultCollector prototype() { TestResultCollector protoType = new TestResultCollector(); protoType.setName(FeatureCollectorConstants.JIRA_XRAY); protoType.setOnline(true); protoType.setEnabled(true); protoType.setCollectorType(CollectorType.Test); protoType.setLastExecuted(System.currentTimeMillis()); return protoType; } }
apache-2.0
tudelft-atlarge/granula
granula-modeller/src/main/java/science/atlarge/granula/modeller/rule/derivation/time/ParentalEndTimeDerivation.java
2098
/* * Copyright 2015 - 2017 Atlarge Research Team, * operating at Technische Universiteit Delft * and Vrije Universiteit Amsterdam, the Netherlands. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package science.atlarge.granula.modeller.rule.derivation.time; import science.atlarge.granula.modeller.platform.info.BasicInfo; import science.atlarge.granula.modeller.platform.info.InfoSource; import science.atlarge.granula.modeller.platform.operation.Operation; import science.atlarge.granula.modeller.rule.derivation.DerivationRule; import science.atlarge.granula.modeller.platform.info.Info; import science.atlarge.granula.modeller.platform.info.Source; import java.util.ArrayList; import java.util.List; public class ParentalEndTimeDerivation extends DerivationRule { public ParentalEndTimeDerivation(int level) { super(level); } @Override public boolean execute() { Operation operation = (Operation) entity; Operation parent = operation.getParent(); Info sourceInfo = parent.getInfo("EndTime"); long endTime = Long.parseLong(sourceInfo.getValue()); BasicInfo info = new BasicInfo("EndTime"); List<Source> sources = new ArrayList<>(); sources.add(new InfoSource("ParentalEndTime", sourceInfo)); info.setDescription("The [EndTime] of an (abstract) operation is derived from the largest value of [FilialEndTimes], which are [EndTime]s of all child operations."); info.addInfo(String.valueOf(endTime), sources); operation.addInfo(info); return true; } }
apache-2.0
djodjoni/jus
jus-java/src/main/java/io/apptik/comm/jus/error/AuthError.java
1412
/* * Copyright (C) 2015 Apptik Project * Copyright (C) 2014 Kalin Maldzhanski * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apptik.comm.jus.error; import io.apptik.comm.jus.NetworkResponse; /** * Error indicating that there was an authentication failure when performing a Request. */ @SuppressWarnings("serial") public class AuthError extends RequestError { public AuthError(NetworkResponse response) { super(response); } public AuthError(NetworkResponse response, String exceptionMessage) { super(response, exceptionMessage); } public AuthError(NetworkResponse response, String exceptionMessage, Throwable reason) { super(response, exceptionMessage, reason); } public AuthError(NetworkResponse response, Throwable reason) { super(response, reason); } }
apache-2.0
shudo/dht-access
src/dhtaccess/tools/Remove.java
3313
/* * Copyright 2006-2008 Kazuyuki Shudo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dhtaccess.tools; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.util.Properties; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import dhtaccess.core.DHTAccessor; public class Remove { private static final String COMMAND = "rm"; private static final String ENCODE = "UTF-8"; private static final String DEFAULT_GATEWAY = "http://opendht.nyuld.net:5851/"; private static void usage(String command) { System.out.println("usage: " + command + " [-h] [-g <gateway>] [-t <ttl (sec)>] <key> <value> <secret>"); } public static void main(String[] args) { int ttl = 3600; // parse properties Properties prop = System.getProperties(); String gateway = prop.getProperty("dhtaccess.gateway"); if (gateway == null || gateway.length() <= 0) { gateway = DEFAULT_GATEWAY; } // parse options Options options = new Options(); options.addOption("h", "help", false, "print help"); options.addOption("g", "gateway", true, "gateway URI, list at http://opendht.org/servers.txt"); options.addOption("t", "ttl", true, "how long (in seconds) to store the value"); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.out.println("There is an invalid option."); e.printStackTrace(); System.exit(1); } String optVal; if (cmd.hasOption('h')) { usage(COMMAND); System.exit(1); } optVal = cmd.getOptionValue('g'); if (optVal != null) { gateway = optVal; } optVal = cmd.getOptionValue('t'); if (optVal != null) { ttl = Integer.parseInt(optVal); } args = cmd.getArgs(); // parse arguments if (args.length < 3) { usage(COMMAND); System.exit(1); } byte[] key = null, value = null, secret = null; try { key = args[0].getBytes(ENCODE); value = args[1].getBytes(ENCODE); secret = args[2].getBytes(ENCODE); } catch (UnsupportedEncodingException e1) { // NOTREACHED } // prepare for RPC DHTAccessor accessor = null; try { accessor = new DHTAccessor(gateway); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(1); } // RPC int res = accessor.remove(key, value, ttl, secret); String resultString; switch (res) { case 0: resultString = "Success"; break; case 1: resultString = "Capacity"; break; case 2: resultString = "Again"; break; default: resultString = "???"; } System.out.println(resultString); } }
apache-2.0
adufilie/flex-falcon
compiler/src/org/apache/flex/compiler/problems/ConflictingInheritedNameInNamespaceProblem.java
1604
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.problems; import org.apache.flex.compiler.tree.as.IASNode; /** * Diagnostic emitted when the code generator detects * a definition that conflicts with an inherited definition * from a superclass. */ public final class ConflictingInheritedNameInNamespaceProblem extends SemanticProblem { public static final String DESCRIPTION = "A conflict exists with inherited definition ${declName} in namespace ${nsName}."; public static final int errorCode = 1152; public ConflictingInheritedNameInNamespaceProblem(IASNode site, String declName, String nsName) { super(site); this.declName = declName; this.nsName = nsName; } public final String declName; public final String nsName; }
apache-2.0
nknize/elasticsearch
modules/lang-painless/src/test/java/org/elasticsearch/painless/FeatureTestObject2.java
1258
package org.elasticsearch.painless; /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ public class FeatureTestObject2 { public FeatureTestObject2() {super();} public static int staticNumberArgument(int injected, int userArgument) { return injected * userArgument; } public static int staticNumberArgument2(int userArgument1, int userArgument2) { return userArgument1 * userArgument2; } }
apache-2.0
dans123456/pnc
rest/src/main/java/org/jboss/pnc/rest/provider/ProductMilestoneReleaseProvider.java
3113
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.rest.provider; import org.jboss.pnc.model.ProductMilestone; import org.jboss.pnc.model.ProductMilestoneRelease; import org.jboss.pnc.rest.restmodel.ProductMilestoneReleaseRest; import org.jboss.pnc.spi.datastore.repositories.PageInfoProducer; import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneReleaseRepository; import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneRepository; import org.jboss.pnc.spi.datastore.repositories.SortInfoProducer; import org.jboss.pnc.spi.datastore.repositories.api.RSQLPredicateProducer; import javax.ejb.Stateless; import javax.inject.Inject; import java.util.function.Function; @Stateless public class ProductMilestoneReleaseProvider extends AbstractProvider<ProductMilestoneRelease, ProductMilestoneReleaseRest> { private ProductMilestoneRepository milestoneRepository; private ProductMilestoneReleaseRepository releaseRepository; @Inject public ProductMilestoneReleaseProvider(ProductMilestoneReleaseRepository releaseRepository, ProductMilestoneRepository milestoneRepository, RSQLPredicateProducer rsqlPredicateProducer, SortInfoProducer sortInfoProducer, PageInfoProducer pageInfoProducer) { super(releaseRepository, rsqlPredicateProducer, sortInfoProducer, pageInfoProducer); this.releaseRepository = releaseRepository; this.milestoneRepository = milestoneRepository; } // needed for EJB/CDI @Deprecated public ProductMilestoneReleaseProvider() { } @Override protected Function<? super ProductMilestoneRelease, ? extends ProductMilestoneReleaseRest> toRESTModel() { return ProductMilestoneReleaseRest::new; } @Override protected Function<? super ProductMilestoneReleaseRest, ? extends ProductMilestoneRelease> toDBModel() { throw new IllegalStateException("ProductMilestoneRelease entity is not to be created via REST"); } public ProductMilestoneReleaseRest latestForMilestone(Integer milestoneId) { ProductMilestone milestone = milestoneRepository.queryById(milestoneId); ProductMilestoneRelease release = milestone == null ? null : releaseRepository.findLatestByMilestone(milestone); return release == null ? null : toRESTModel().apply(release); } }
apache-2.0