repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
luix/android-big-nerd-ranch
GeoQuiz/app/src/test/java/com/microlands/android/geoquiz/ExampleUnitTest.java
1018
/* * Copyright (c) 2016. Microlands Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.microlands.android.geoquiz; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
poxrucker/collaborative-learning-simulation
Simulator/src/allow/simulator/mobility/data/Route.java
5126
package allow.simulator.mobility.data; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import allow.simulator.core.Simulator; import allow.simulator.mobility.data.TimeTable.Day; import allow.simulator.mobility.data.gtfs.GTFSService; import allow.simulator.mobility.data.gtfs.GTFSServiceException; import allow.simulator.mobility.data.gtfs.GTFSStopTimes; import allow.simulator.world.Street; public class Route { // Id of this route. private String routeId; // Stops of this route. private Map<String, BusStop> stops; // Trips of this route ordered chronological by day. private List<List<PublicTransportationTrip>> trips; private Map<String, PublicTransportationTrip> tripInfo; // Buffer to return. private List<PublicTransportationTrip> tripsToReturn; /** * Constructor. * Creates a new route with given Id and time table. * * @param routeId Id of this route. * @param timeTable Time table of this route. */ public Route(String routeId, TimeTable timeTable, Map<String, BusStop> stops) { this.routeId = routeId; this.stops = stops; timeTableToTrips(timeTable); tripsToReturn = new ArrayList<PublicTransportationTrip>(16); } private static final DateTimeFormatter format = DateTimeFormatter.ofPattern("kk:mm:ss"); private void timeTableToTrips(TimeTable tt) { // For routing. IDataService service = Simulator.Instance().getContext().getDataService(); // Allocate trips structure. trips = new ArrayList<List<PublicTransportationTrip>>(7); tripInfo = new HashMap<String, PublicTransportationTrip>(); for (int i = 0; i < 7; i++) { List<GTFSStopTimes> stoptimes = tt.getTripsOfDay(Day.values()[i]); LinkedList<PublicTransportationTrip> toAdd = new LinkedList<PublicTransportationTrip>(); for (int j = 0; j < stoptimes.size(); j++) { // Current stop info. GTFSStopTimes info = stoptimes.get(j); // Allocate lists for times and stops and copy them. List<LocalTime> tripTimes = new ArrayList<LocalTime>(info.getStopIds().length); List<BusStop> tripStops = new ArrayList<BusStop>(info.getStopIds().length); for (int k = 0; k < info.getStopIds().length; k++) { tripStops.add(stops.get(info.getStopIds()[k])); tripTimes.add(LocalTime.parse(info.getDepartureTimes()[k], format)); } // Allocate lists for traces and generate them. List<List<Street>> traces = new ArrayList<List<Street>>(tripStops.size() - 1); for (int l = 0; l < tripStops.size() - 1; l++) { BusStop curr = tripStops.get(l); BusStop next = tripStops.get(l + 1); List<Street> routing = service.getBusstopRouting(curr.getStopId(), next.getStopId()); if (routing == null) routing = new ArrayList<Street>(0); traces.add(routing); } GTFSService serviceId = service.getServiceId(routeId, info.getTripId()); List<GTFSServiceException> exceptions = service.getServiceExceptions(serviceId.getServiceId()); PublicTransportationTrip t = new PublicTransportationTrip(info.getTripId(), serviceId.startDate(), serviceId.endDate(), exceptions, tripStops, tripTimes, traces); toAdd.addLast(t); if (!tripInfo.containsKey(t.getTripId())) tripInfo.put(t.getTripId(), t); } trips.add(toAdd); } } public String getRouteId() { return routeId; } public List<PublicTransportationTrip> getNextTrip(LocalDateTime currentTime) { LinkedList<PublicTransportationTrip> dayTrips = null; tripsToReturn.clear(); switch (currentTime.getDayOfWeek()) { case MONDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(0); break; case TUESDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(1); break; case WEDNESDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(2); break; case THURSDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(3); break; case FRIDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(4); break; case SATURDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(5); break; case SUNDAY: dayTrips = (LinkedList<PublicTransportationTrip>) trips.get(6); break; } if (dayTrips.size() == 0) { return tripsToReturn; } // Get starting time of next trip. PublicTransportationTrip nextTrip = dayTrips.peekFirst(); int c = 0; while ((nextTrip != null) && (c < dayTrips.size()) && (nextTrip.getStartingTime().getHour() == currentTime.getHour()) && (nextTrip.getStartingTime().getMinute() == currentTime.getMinute())) { if (nextTrip.isValidThisDay(currentTime.toLocalDate())) { tripsToReturn.add(nextTrip); } dayTrips.pollFirst(); nextTrip = dayTrips.peekFirst(); dayTrips.addLast(nextTrip); c++; } return tripsToReturn; } public BusStop getStop(String stopId) { return stops.get(stopId); } public Trip getTripInformation(String tripId) { return tripInfo.get(tripId); } }
apache-2.0
dennishuo/hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AutoCreatedLeafQueue.java
5945
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerDynamicEditException; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.QueueEntitlement; import org.apache.hadoop.yarn.util.resource.Resources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; /** * Leaf queues which are auto created by an underlying implementation of * AbstractManagedParentQueue. Eg: PlanQueue for reservations or * ManagedParentQueue for auto created dynamic queues */ public class AutoCreatedLeafQueue extends AbstractAutoCreatedLeafQueue { private static final Logger LOG = LoggerFactory .getLogger(AutoCreatedLeafQueue.class); public AutoCreatedLeafQueue(CapacitySchedulerContext cs, String queueName, ManagedParentQueue parent) throws IOException { super(cs, parent.getLeafQueueConfigs(queueName), queueName, parent, null); updateCapacitiesToZero(); } @Override public void reinitialize(CSQueue newlyParsedQueue, Resource clusterResource) throws IOException { try { writeLock.lock(); validate(newlyParsedQueue); ManagedParentQueue managedParentQueue = (ManagedParentQueue) parent; super.reinitialize(newlyParsedQueue, clusterResource, managedParentQueue .getLeafQueueConfigs(newlyParsedQueue.getQueueName())); //Reset capacities to 0 since reinitialize above // queueCapacities to initialize to configured capacity which might // overcommit resources from parent queue updateCapacitiesToZero(); } finally { writeLock.unlock(); } } public void reinitializeFromTemplate(AutoCreatedLeafQueueConfig leafQueueTemplate) throws SchedulerDynamicEditException, IOException { try { writeLock.lock(); // TODO: // reinitialize only capacities for now since 0 capacity updates // can cause // abs capacity related config computations to be incorrect if we go // through reinitialize QueueCapacities capacities = leafQueueTemplate.getQueueCapacities(); //update abs capacities setupConfigurableCapacities(capacities); //reset capacities for the leaf queue mergeCapacities(capacities); //update queue used capacity for all the node labels CSQueueUtils.updateQueueStatistics(resourceCalculator, csContext.getClusterResource(), this, labelManager, null); //activate applications if any are pending activateApplications(); } finally { writeLock.unlock(); } } private void mergeCapacities(QueueCapacities capacities) { for ( String nodeLabel : capacities.getExistingNodeLabels()) { queueCapacities.setCapacity(nodeLabel, capacities.getCapacity(nodeLabel)); queueCapacities.setAbsoluteCapacity(nodeLabel, capacities .getAbsoluteCapacity(nodeLabel)); queueCapacities.setMaximumCapacity(nodeLabel, capacities .getMaximumCapacity(nodeLabel)); queueCapacities.setAbsoluteMaximumCapacity(nodeLabel, capacities .getAbsoluteMaximumCapacity(nodeLabel)); Resource resourceByLabel = labelManager.getResourceByLabel(nodeLabel, csContext.getClusterResource()); getQueueResourceQuotas().setEffectiveMinResource(nodeLabel, Resources.multiply(resourceByLabel, queueCapacities.getAbsoluteCapacity(nodeLabel))); getQueueResourceQuotas().setEffectiveMaxResource(nodeLabel, Resources.multiply(resourceByLabel, queueCapacities .getAbsoluteMaximumCapacity(nodeLabel))); } } public void validateConfigurations(AutoCreatedLeafQueueConfig template) throws SchedulerDynamicEditException { QueueCapacities capacities = template.getQueueCapacities(); for (String label : capacities.getExistingNodeLabels()) { float capacity = capacities.getCapacity(label); if (capacity < 0 || capacity > 1.0f) { throw new SchedulerDynamicEditException( "Capacity demand is not in the [0,1] range: " + capacity); } } } private void validate(final CSQueue newlyParsedQueue) throws IOException { if (!(newlyParsedQueue instanceof AutoCreatedLeafQueue) || !newlyParsedQueue .getQueuePath().equals(getQueuePath())) { throw new IOException( "Error trying to reinitialize " + getQueuePath() + " from " + newlyParsedQueue.getQueuePath()); } } private void updateCapacitiesToZero() throws IOException { try { for( String nodeLabel : parent.getQueueCapacities().getExistingNodeLabels ()) { //TODO - update to use getMaximumCapacity(nodeLabel) in YARN-7574 setEntitlement(nodeLabel, new QueueEntitlement(0.0f, parent.getLeafQueueTemplate() .getQueueCapacities() .getMaximumCapacity())); } } catch (SchedulerDynamicEditException e) { throw new IOException(e); } } }
apache-2.0
NextGenIntelligence/gerrit
gerrit-lucene/src/main/java/com/google/gerrit/lucene/WrappableSearcherManager.java
8580
package com.google.gerrit.lucene; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.store.Directory; import java.io.IOException; /** * Utility class to safely share {@link IndexSearcher} instances across multiple * threads, while periodically reopening. This class ensures each searcher is * closed only once all threads have finished using it. * * <p> * Use {@link #acquire} to obtain the current searcher, and {@link #release} to * release it, like this: * * <pre class="prettyprint"> * IndexSearcher s = manager.acquire(); * try { * // Do searching, doc retrieval, etc. with s * } finally { * manager.release(s); * } * // Do not use s after this! * s = null; * </pre> * * <p> * In addition you should periodically call {@link #maybeRefresh}. While it's * possible to call this just before running each query, this is discouraged * since it penalizes the unlucky queries that need to refresh. It's better to use * a separate background thread, that periodically calls {@link #maybeRefresh}. Finally, * be sure to call {@link #close} once you are done. * * @see SearcherFactory * * @lucene.experimental */ // This file was copied from: // https://github.com/apache/lucene-solr/blob/lucene_solr_5_0/lucene/core/src/java/org/apache/lucene/search/SearcherManager.java // The only change (other than class name and import fixes) // is to skip the check in getSearcher that searcherFactory.newSearcher wraps // the provided searcher exactly. final class WrappableSearcherManager extends ReferenceManager<IndexSearcher> { private final SearcherFactory searcherFactory; /** * Creates and returns a new SearcherManager from the given * {@link IndexWriter}. * * @param writer * the IndexWriter to open the IndexReader from. * @param applyAllDeletes * If <code>true</code>, all buffered deletes will be applied (made * visible) in the {@link IndexSearcher} / {@link DirectoryReader}. * If <code>false</code>, the deletes may or may not be applied, but * remain buffered (in IndexWriter) so that they will be applied in * the future. Applying deletes can be costly, so if your app can * tolerate deleted documents being returned you might gain some * performance by passing <code>false</code>. See * {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter, boolean)}. * @param searcherFactory * An optional {@link SearcherFactory}. Pass <code>null</code> if you * don't require the searcher to be warmed before going live or other * custom behavior. * * @throws IOException if there is a low-level I/O error */ public WrappableSearcherManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory) throws IOException { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; current = getSearcher(searcherFactory, DirectoryReader.open(writer, applyAllDeletes)); } /** * Creates and returns a new SearcherManager from the given {@link Directory}. * @param dir the directory to open the DirectoryReader on. * @param searcherFactory An optional {@link SearcherFactory}. Pass * <code>null</code> if you don't require the searcher to be warmed * before going live or other custom behavior. * * @throws IOException if there is a low-level I/O error */ public WrappableSearcherManager(Directory dir, SearcherFactory searcherFactory) throws IOException { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; current = getSearcher(searcherFactory, DirectoryReader.open(dir)); } /** * Creates and returns a new SearcherManager from an existing {@link DirectoryReader}. Note that * this steals the incoming reference. * * @param reader the DirectoryReader. * @param searcherFactory An optional {@link SearcherFactory}. Pass * <code>null</code> if you don't require the searcher to be warmed * before going live or other custom behavior. * * @throws IOException if there is a low-level I/O error */ public WrappableSearcherManager(DirectoryReader reader, SearcherFactory searcherFactory) throws IOException { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; this.current = getSearcher(searcherFactory, reader); } @Override protected void decRef(IndexSearcher reference) throws IOException { reference.getIndexReader().decRef(); } @Override protected IndexSearcher refreshIfNeeded(IndexSearcher referenceToRefresh) throws IOException { final IndexReader r = referenceToRefresh.getIndexReader(); assert r instanceof DirectoryReader: "searcher's IndexReader should be a DirectoryReader, but got " + r; final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r); if (newReader == null) { return null; } else { return getSearcher(searcherFactory, newReader); } } @Override protected boolean tryIncRef(IndexSearcher reference) { return reference.getIndexReader().tryIncRef(); } @Override protected int getRefCount(IndexSearcher reference) { return reference.getIndexReader().getRefCount(); } /** * Returns <code>true</code> if no changes have occured since this searcher * ie. reader was opened, otherwise <code>false</code>. * @see DirectoryReader#isCurrent() */ public boolean isSearcherCurrent() throws IOException { final IndexSearcher searcher = acquire(); try { final IndexReader r = searcher.getIndexReader(); assert r instanceof DirectoryReader: "searcher's IndexReader should be a DirectoryReader, but got " + r; return ((DirectoryReader) r).isCurrent(); } finally { release(searcher); } } /** Expert: creates a searcher from the provided {@link * IndexReader} using the provided {@link * SearcherFactory}. NOTE: this decRefs incoming reader * on throwing an exception. */ @SuppressWarnings("resource") public static IndexSearcher getSearcher(SearcherFactory searcherFactory, IndexReader reader) throws IOException { boolean success = false; final IndexSearcher searcher; try { searcher = searcherFactory.newSearcher(reader); // Modification for Gerrit: Allow searcherFactory to transitively wrap the // provided reader. IndexReader unwrapped = searcher.getIndexReader(); while (true) { if (unwrapped == reader) { break; } else if (unwrapped instanceof FilterDirectoryReader) { unwrapped = ((FilterDirectoryReader) unwrapped).getDelegate(); } else if (unwrapped instanceof FilterLeafReader) { unwrapped = ((FilterLeafReader) unwrapped).getDelegate(); } else { break; } } if (unwrapped != reader) { throw new IllegalStateException( "SearcherFactory must wrap the provided reader (got " + searcher.getIndexReader() + " but expected " + reader + ")"); } success = true; } finally { if (!success) { reader.decRef(); } } return searcher; } }
apache-2.0
Bernardo-MG/Tabletop-Punkapocalyptic-Punkabuilder
src/main/java/com/wandrell/tabletop/punkapocalyptic/punkabuilder/service/DesktopRulesetService.java
1347
package com.wandrell.tabletop.punkapocalyptic.punkabuilder.service; import java.util.Properties; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.wandrell.tabletop.punkapocalyptic.model.unit.Gang; import com.wandrell.tabletop.punkapocalyptic.service.DefaultRulesetService; import com.wandrell.tabletop.punkapocalyptic.service.RulesetService; @Service("rulesetService") public final class DesktopRulesetService implements RulesetService { private final RulesetService baseService; @Autowired public DesktopRulesetService(final Properties rulesetConfig) { super(); baseService = new DefaultRulesetService(rulesetConfig); } @Override public final Integer getBulletCost() { return getBaseService().getBulletCost(); } @Override public final Integer getGangValoration(final Gang gang) { return getBaseService().getGangValoration(gang); } @Override public final Integer getMaxAllowedUnits(final Integer valoration) { return getBaseService().getMaxAllowedUnits(valoration); } @Override public final Integer getPackMaxSize() { return getBaseService().getPackMaxSize(); } private final RulesetService getBaseService() { return baseService; } }
apache-2.0
nextreports/nextreports-server
src/ro/nextreports/server/web/dashboard/chart/ChartWidgetDescriptor.java
1274
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.server.web.dashboard.chart; import ro.nextreports.server.web.dashboard.WidgetDescriptor; /** * @author Decebal Suiu */ public class ChartWidgetDescriptor implements WidgetDescriptor { public String getDescription() { return "A chart widget."; } public String getName() { return "Chart"; } public String getProvider() { return "Decebal Suiu"; } public String getWidgetClassName() { return ChartWidget.class.getName(); } }
apache-2.0
milindaperera/product-ei
product-scenarios/1-integrating-systems-that-communicate-in-heterogeneous-message-formats/1.6-xml-message-enrichment/1.6.1-Modify-payload-by-adding-a-child-using-enrich-mediator/src/test/java/org.wso2.carbon.ei.scenario.test/AddChildToXMLTest.java
9054
/* * Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.ei.scenario.test; import org.apache.axiom.om.OMElement; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpResponse; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.carbon.esb.scenario.test.common.ScenarioConstants; import org.wso2.carbon.esb.scenario.test.common.ScenarioTestBase; import org.wso2.carbon.esb.scenario.test.common.http.HTTPUtils; import org.wso2.carbon.esb.scenario.test.common.http.SOAPClient; import org.wso2.carbon.esb.scenario.test.common.utils.FileUtils; import org.wso2.carbon.esb.scenario.test.common.utils.XMLUtils; import javax.xml.stream.XMLStreamException; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** * This test class contains tests related to Enrich Mediator being used to enrich payload with child elements. */ public class AddChildToXMLTest extends ScenarioTestBase { private static final Log log = LogFactory.getLog(AddChildToXMLTest.class); private String sourcesFilePath; @BeforeClass(alwaysRun = true) public void init() throws Exception { super.init(); sourcesFilePath = testResourcesDir + File.separator + "source_files"; } @Test(description = "1.6.1.1") public void testAddingInlineContentAsChild() throws IOException, XMLStreamException { String url = getProxyServiceURLHttp("1_6_1_1_Proxy_SoapToPoxMsgEnrichWithChild"); String request = FileUtils.readFile(sourcesFilePath + File.separator + "request_1_6_1_1.xml"); Map<String, String> headers = new HashMap<>(1); headers.put(ScenarioConstants.MESSAGE_ID, "1_6_1_1"); SOAPClient soapClient = new SOAPClient(); HttpResponse response = soapClient.sendSimpleSOAPMessage(url, request, "urn:mediate", headers); String expectedResponse = FileUtils.readFile(sourcesFilePath + File.separator + "response_1_6_1_1.xml"); assertResponse(response, expectedResponse); } @Test(description = "1.6.1.2") public void testPassThroughProxyTemplate() throws IOException, XMLStreamException { String url = getProxyServiceURLHttp("1_6_1_2_Proxy_SoapToPoxEnrichWithXpathAsChild"); String request = FileUtils.readFile(sourcesFilePath + File.separator + "request_1_6_1_2.xml"); Map<String, String> headers = new HashMap<>(1); headers.put(ScenarioConstants.MESSAGE_ID, "1_6_1_2"); SOAPClient soapClient = new SOAPClient(); HttpResponse response = soapClient.sendSimpleSOAPMessage(url, request, "urn:mediate", headers); String expectedResponse = FileUtils.readFile(sourcesFilePath + File.separator + "response_1_6_1_2.xml"); assertResponse(response, expectedResponse); } /** * This test is to verify if payload can be modified by adding current payload * as a child to the new payload */ @Test(description = "1.6.1.3") public void addCurrentPayloadAsChild() throws IOException, XMLStreamException { String url = getProxyServiceURLHttp("1_6_1_3_Proxy_addCurrentPayloadAsChild"); String testCaseId = "1.6.1.3"; String request = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" + " <soapenv:Body>\n" + " <order>\n" + " <price>12</price>\n" + " <productid>IC002</productid>\n" + " <quantity>2</quantity>\n" + " <reference>ref</reference>\n" + " </order>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>"; String expectedResponse = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" + " <soapenv:Body>\n" + " <orders>\n" + " <order>\n" + " <price>12</price>\n" + " <productid>IC002</productid>\n" + " <quantity>2</quantity>\n" + " <reference>ref</reference>\n" + " </order>\n" + " </orders>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>"; HTTPUtils.invokeSoapActionAndAssert(url, request, testCaseId, expectedResponse, 200, "urn:mediate", "addCurrentPayloadAsChild"); } /** * This test is to verify if payload can be modified by adding payload stored in a property * (OM type) as a child to the message body. */ @Test(description = "1.6.1.4") public void addPayloadStoredInPropertyAsChild() throws IOException, XMLStreamException { String url = getProxyServiceURLHttp("1_6_1_4_Proxy_addPayloadStoredInPropertyAsChild"); String testCaseId = "1.6.1.4"; String request = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" + " <soapenv:Body>\n" + " <Company>\n" + " <companyInfo>\n" + " <name>WSO2</name>\n" + " <location>Colombo Sri Lanka</location>\n" + " </companyInfo>\n" + " <Employees />\n" + " </Company>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>"; String expectedResponse = "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" + " <soapenv:Body>\n" + " <Company>\n" + " <companyInfo>\n" + " <name>WSO2</name>\n" + " <location>Colombo Sri Lanka</location>\n" + " </companyInfo>\n" + " <Employees>\n" + " <employee>\n" + " <firstName>Jacque</firstName>\n" + " <lastName>Kallis</lastName>\n" + " <team>EI</team>\n" + " </employee>\n" + " <employee>\n" + " <firstName>Mark</firstName>\n" + " <lastName>Boucher</lastName>\n" + " <team>STL</team>\n" + " </employee>\n" + " </Employees>\n" + " </Company>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>"; HTTPUtils.invokeSoapActionAndAssert(url, request, testCaseId, expectedResponse, 200, "urn:mediate", "addPayloadStoredInPropertyAsChild"); } private void assertResponse(HttpResponse response, String expectedResponse) throws IOException, XMLStreamException { Assert.assertEquals(HTTPUtils.getHTTPResponseCode(response), 200, "Response failed"); OMElement respElement = HTTPUtils.getOMFromResponse(response); Assert.assertNotNull(respElement, "Invalid response"); boolean compareResult = XMLUtils.compareOMElements(XMLUtils.StringASOM(expectedResponse), respElement); Assert.assertEquals(compareResult, true, "expected payload " + expectedResponse + " , but received " + respElement.toString()); } @AfterClass(description = "Server Cleanup", alwaysRun = true) public void cleanup() throws Exception { super.cleanup(); } }
apache-2.0
samwhitlock/mesos-monitoring
src/examples/java/TestExecutor.java
2656
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import org.apache.mesos.*; import org.apache.mesos.Protos.*; public class TestExecutor implements Executor { @Override public void registered(ExecutorDriver driver, ExecutorInfo executorInfo, FrameworkID frameworkId, FrameworkInfo frameworkInfo, SlaveID slaveId, SlaveInfo slaveInfo) { System.out.println("Registered executor on " + slaveInfo.getHostname()); } @Override public void launchTask(final ExecutorDriver driver, final TaskDescription task) { new Thread() { public void run() { try { TaskStatus status = TaskStatus.newBuilder() .setTaskId(task.getTaskId()) .setState(TaskState.TASK_RUNNING).build(); driver.sendStatusUpdate(status); System.out.println("Running task " + task.getTaskId()); if (task.hasData()) { Thread.sleep(Integer.parseInt(task.getData().toStringUtf8())); } else { Thread.sleep(1000); } status = TaskStatus.newBuilder() .setTaskId(task.getTaskId()) .setState(TaskState.TASK_FINISHED).build(); driver.sendStatusUpdate(status); } catch (Exception e) { e.printStackTrace(); } }}.start(); } @Override public void killTask(ExecutorDriver driver, TaskID taskId) {} @Override public void frameworkMessage(ExecutorDriver driver, byte[] data) {} @Override public void shutdown(ExecutorDriver driver) {} @Override public void error(ExecutorDriver driver, int code, String message) {} public static void main(String[] args) throws Exception { MesosExecutorDriver driver = new MesosExecutorDriver(new TestExecutor()); System.exit(driver.run() == Status.OK ? 0 : 1); } }
apache-2.0
hristobakalov/BeFriendy
app/src/main/java/unused/WildcardActivity.java
256
package unused; /* public class WildcardActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_wildcard); } } */
apache-2.0
cowthan/AyoWeibo
ayoview/src/main/java/org/ayo/view/progress/av/indicator/PacmanIndicator.java
3843
package org.ayo.view.progress.av.indicator; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.RectF; import android.view.animation.LinearInterpolator; import com.nineoldandroids.animation.Animator; import com.nineoldandroids.animation.ValueAnimator; import java.util.ArrayList; import java.util.List; /** * Created by Jack on 2015/10/16. */ public class PacmanIndicator extends BaseIndicatorController{ private float translateX; private int alpha; private float degrees1,degrees2; @Override public void draw(Canvas canvas, Paint paint) { drawPacman(canvas,paint); drawCircle(canvas,paint); } private void drawPacman(Canvas canvas,Paint paint){ float x=getWidth()/2; float y=getHeight()/2; canvas.save(); canvas.translate(x, y); canvas.rotate(degrees1); paint.setAlpha(255); RectF rectF1=new RectF(-x/1.7f,-y/1.7f,x/1.7f,y/1.7f); canvas.drawArc(rectF1, 0, 270, true, paint); canvas.restore(); canvas.save(); canvas.translate(x, y); canvas.rotate(degrees2); paint.setAlpha(255); RectF rectF2=new RectF(-x/1.7f,-y/1.7f,x/1.7f,y/1.7f); canvas.drawArc(rectF2,90,270,true,paint); canvas.restore(); } private void drawCircle(Canvas canvas, Paint paint) { float radius=getWidth()/11; paint.setAlpha(alpha); canvas.drawCircle(translateX, getHeight() / 2, radius, paint); } @Override public List<Animator> createAnimation() { List<Animator> animators=new ArrayList<>(); float startT=getWidth()/11; ValueAnimator translationAnim=ValueAnimator.ofFloat(getWidth()-startT,getWidth()/2); translationAnim.setDuration(650); translationAnim.setInterpolator(new LinearInterpolator()); translationAnim.setRepeatCount(-1); translationAnim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { translateX = (float) animation.getAnimatedValue(); postInvalidate(); } }); translationAnim.start(); ValueAnimator alphaAnim=ValueAnimator.ofInt(255,122); alphaAnim.setDuration(650); alphaAnim.setRepeatCount(-1); alphaAnim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { alpha = (int) animation.getAnimatedValue(); postInvalidate(); } }); alphaAnim.start(); ValueAnimator rotateAnim1=ValueAnimator.ofFloat(0, 45, 0); rotateAnim1.setDuration(650); rotateAnim1.setRepeatCount(-1); rotateAnim1.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { degrees1 = (float) animation.getAnimatedValue(); postInvalidate(); } }); rotateAnim1.start(); ValueAnimator rotateAnim2=ValueAnimator.ofFloat(0,-45,0); rotateAnim2.setDuration(650); rotateAnim2.setRepeatCount(-1); rotateAnim2.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { degrees2 = (float) animation.getAnimatedValue(); postInvalidate(); } }); rotateAnim2.start(); animators.add(translationAnim); animators.add(alphaAnim); animators.add(rotateAnim1); animators.add(rotateAnim2); return animators; } }
apache-2.0
youxiaxiaomage/yx-platform
yx-platform-sys-service/src/main/java/com/yx/platform/mapper/SysDicMapper.java
350
package com.yx.platform.mapper; import java.util.List; import java.util.Map; import org.apache.ibatis.annotations.Param; import com.yx.platform.core.base.BaseMapper; import com.yx.platform.model.SysDic; public interface SysDicMapper extends BaseMapper<SysDic> { List<Long> selectIdPage(@Param("cm") Map<String, Object> params); }
apache-2.0
learning-layers/SocialSemanticServer
servs/entity/entity.datatypes/src/main/java/at/tugraz/sss/servs/entity/datatype/SSCircleRemoveRet.java
1550
/** * Code contributed to the Learning Layers project * http://www.learning-layers.eu * Development is partly funded by the FP7 Programme of the European Commission under * Grant Agreement FP7-ICT-318209. * Copyright (c) 2015, Graz University of Technology - KTI (Knowledge Technologies Institute). * For a list of contributors see the AUTHORS file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.tugraz.sss.servs.entity.datatype; import at.tugraz.sss.serv.util.*; import at.tugraz.sss.serv.datatype.*; import at.tugraz.sss.serv.datatype.ret.SSServRetI; public class SSCircleRemoveRet extends SSServRetI{ public SSUri circle = null; public String getCircle() throws SSErr{ return SSStrU.removeTrailingSlash(circle); } public static SSCircleRemoveRet get( final SSUri circleUri){ return new SSCircleRemoveRet(circleUri); } private SSCircleRemoveRet( final SSUri circleUri){ super(SSVarNames.circleRemove); this.circle = circleUri; } }
apache-2.0
yangjiandong/sshapp
examples/showcase/src/test/java/org/springside/examples/showcase/unit/jmx/Log4jMBeanTest.java
3539
package org.springside.examples.showcase.unit.jmx; import static org.junit.Assert.*; import java.util.List; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.springframework.test.context.ContextConfiguration; import org.springside.modules.jmx.JmxClientTemplate; import org.springside.modules.log.Log4jMBean; import org.springside.modules.test.spring.SpringContextTestCase; /** * sprinside-extension中Log4jMBean的测试用例. * * JMX相关用例使用相同的@ContextConfiguration以保证使用同一个ApplicationContext,避免JMX端口重复注册. * * @author calvin */ @ContextConfiguration(locations = { "/applicationContext-test.xml", "/jmx/applicationContext-jmx-server.xml", "/jmx/applicationContext-jmx-client.xml", "/log/applicationContext-log.xml" }) public class Log4jMBeanTest extends SpringContextTestCase { private JmxClientTemplate jmxClientTemplate; @Before public void setUp() throws Exception { jmxClientTemplate = new JmxClientTemplate("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi"); } @After public void tearDown() throws Exception { jmxClientTemplate.close(); } @Test public void accessRootLoggerLevel() { String oldLevel = "WARN"; String newLevel = "ERROR"; //判断原级别 assertEquals(oldLevel, jmxClientTemplate.getAttribute(Log4jMBean.LOG4J_MBEAN_NAME, "RootLoggerLevel")); //设定新级别 jmxClientTemplate.setAttribute(Log4jMBean.LOG4J_MBEAN_NAME, "RootLoggerLevel", newLevel); assertEquals(newLevel, Logger.getRootLogger().getLevel().toString()); //恢复原级别 jmxClientTemplate.setAttribute(Log4jMBean.LOG4J_MBEAN_NAME, "RootLoggerLevel", oldLevel); } @Test public void accessLoggerLevel() { String loggerName = "foo"; String oldLevel = "WARN"; String newLevel = "ERROR"; //判断原级别 assertEquals(oldLevel, jmxClientTemplate.invoke(Log4jMBean.LOG4J_MBEAN_NAME, "getLoggerLevel", new Class[] { String.class }, new Object[] { loggerName })); //设定新级别 jmxClientTemplate.invoke(Log4jMBean.LOG4J_MBEAN_NAME, "setLoggerLevel", new Class[] { String.class, String.class }, new Object[] { loggerName, newLevel }); assertEquals(newLevel, Logger.getLogger(loggerName).getEffectiveLevel().toString()); //恢复原级别 jmxClientTemplate.invoke(Log4jMBean.LOG4J_MBEAN_NAME, "setLoggerLevel", new Class[] { String.class, String.class }, new Object[] { loggerName, oldLevel }); } @Test @SuppressWarnings("unchecked") public void getLoggerAppenders() { List<String> list = (List<String>) jmxClientTemplate.invoke(Log4jMBean.LOG4J_MBEAN_NAME, "getLoggerAppenders", new Class[] { String.class }, new Object[] { "org.springside" }); assertEquals(2, list.size()); assertEquals("Console(parent)", list.get(0)); assertEquals("RollingFile(parent)", list.get(1)); list = (List<String>) jmxClientTemplate.invoke(Log4jMBean.LOG4J_MBEAN_NAME, "getLoggerAppenders", new Class[] { String.class }, new Object[] { "DBLogExample" }); assertEquals(2, list.size()); assertEquals("Console", list.get(0)); //assertEquals("DBLog", list.get(1)); } }
apache-2.0
melthaw/spring-backend-boilerplate
account/rest/src/main/java/in/clouthink/daas/sbb/account/AccountInitializingBean.java
2158
package in.clouthink.daas.sbb.account; import in.clouthink.daas.sbb.account.domain.model.Gender; import in.clouthink.daas.sbb.account.domain.model.SysRole; import in.clouthink.daas.sbb.account.domain.model.User; import in.clouthink.daas.sbb.account.rest.dto.SaveUserParameter; import in.clouthink.daas.sbb.account.service.AccountService; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author dz */ public class AccountInitializingBean implements InitializingBean { private static final Log logger = LogFactory.getLog(AccountInitializingBean.class); @Autowired private AccountService accountService; @Autowired private AdministratorAccountProperties administratorAccountProperties; @Override public void afterPropertiesSet() throws Exception { tryCreateAdministrator(); } private void tryCreateAdministrator() { if (StringUtils.isEmpty(administratorAccountProperties.getUsername())) { logger.debug("The administrator user is not pre-configured, we will skip it"); return; } // initialize System Administrator User adminUser = accountService.findByUsername(administratorAccountProperties.getUsername()); if (adminUser != null) { logger.debug("The administrator user is created before, we will skip it"); return; } SaveUserParameter saveSysUserParameter = new SaveUserParameter(); saveSysUserParameter.setUsername(administratorAccountProperties.getUsername()); saveSysUserParameter.setCellphone(administratorAccountProperties.getCellphone()); saveSysUserParameter.setEmail(administratorAccountProperties.getEmail()); saveSysUserParameter.setPassword(administratorAccountProperties.getPassword()); saveSysUserParameter.setGender(Gender.MALE); accountService.createAccount(saveSysUserParameter, SysRole.ROLE_USER, SysRole.ROLE_ADMIN); } }
apache-2.0
alfasoftware/morf
morf-core/src/main/java/org/alfasoftware/morf/jdbc/SqlDialect.java
144949
/* Copyright 2017 Alfa Financial Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.alfasoftware.morf.jdbc; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.alfasoftware.morf.dataset.Record; import org.alfasoftware.morf.metadata.Column; import org.alfasoftware.morf.metadata.DataSetUtils; import org.alfasoftware.morf.metadata.DataSetUtils.RecordBuilder; import org.alfasoftware.morf.metadata.DataType; import org.alfasoftware.morf.metadata.DataValueLookup; import org.alfasoftware.morf.metadata.Index; import org.alfasoftware.morf.metadata.Schema; import org.alfasoftware.morf.metadata.SchemaUtils; import org.alfasoftware.morf.metadata.Table; import org.alfasoftware.morf.metadata.View; import org.alfasoftware.morf.sql.AbstractSelectStatement; import org.alfasoftware.morf.sql.DeleteStatement; import org.alfasoftware.morf.sql.InsertStatement; import org.alfasoftware.morf.sql.MergeStatement; import org.alfasoftware.morf.sql.SelectFirstStatement; import org.alfasoftware.morf.sql.SelectStatement; import org.alfasoftware.morf.sql.SelectStatementBuilder; import org.alfasoftware.morf.sql.SetOperator; import org.alfasoftware.morf.sql.SqlElementCallback; import org.alfasoftware.morf.sql.SqlUtils; import org.alfasoftware.morf.sql.Statement; import org.alfasoftware.morf.sql.TruncateStatement; import org.alfasoftware.morf.sql.UnionSetOperator; import org.alfasoftware.morf.sql.UnionSetOperator.UnionStrategy; import org.alfasoftware.morf.sql.UpdateStatement; import org.alfasoftware.morf.sql.element.AliasedField; import org.alfasoftware.morf.sql.element.BlobFieldLiteral; import org.alfasoftware.morf.sql.element.BracketedExpression; import org.alfasoftware.morf.sql.element.CaseStatement; import org.alfasoftware.morf.sql.element.Cast; import org.alfasoftware.morf.sql.element.ConcatenatedField; import org.alfasoftware.morf.sql.element.Criterion; import org.alfasoftware.morf.sql.element.FieldFromSelect; import org.alfasoftware.morf.sql.element.FieldFromSelectFirst; import org.alfasoftware.morf.sql.element.FieldLiteral; import org.alfasoftware.morf.sql.element.FieldReference; import org.alfasoftware.morf.sql.element.Function; import org.alfasoftware.morf.sql.element.Join; import org.alfasoftware.morf.sql.element.JoinType; import org.alfasoftware.morf.sql.element.MathsField; import org.alfasoftware.morf.sql.element.MathsOperator; import org.alfasoftware.morf.sql.element.NullFieldLiteral; import org.alfasoftware.morf.sql.element.Operator; import org.alfasoftware.morf.sql.element.SqlParameter; import org.alfasoftware.morf.sql.element.TableReference; import org.alfasoftware.morf.sql.element.WhenCondition; import org.alfasoftware.morf.sql.element.WindowFunction; import org.alfasoftware.morf.upgrade.ChangeColumn; import org.alfasoftware.morf.util.ObjectTreeTraverser; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; import org.joda.time.LocalDate; import org.joda.time.Months; import com.google.common.base.Joiner; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.hash.Hashing; import com.google.common.io.CharSource; /** * Provides functionality for generating SQL statements. * * @author Copyright (c) Alfa Financial Software 2010 */ public abstract class SqlDialect { /** * */ protected static final String ID_INCREMENTOR_TABLE_COLUMN_VALUE = "value"; /** * */ protected static final String ID_INCREMENTOR_TABLE_COLUMN_NAME = "name"; /** * The width of the id field */ public static final int ID_COLUMN_WIDTH = 19; /** * Label to identify the real name of an object in an SQL comment */ public static final String REAL_NAME_COMMENT_LABEL = "REALNAME"; /** * Empty collection of strings that implementations can return if required. */ public static final Collection<String> NO_STATEMENTS = ImmutableList.of(); /** * IllegalArgumentException message */ private static final String CANNOT_CONVERT_NULL_STATEMENT_TO_SQL = "Cannot convert a null statement to SQL"; /** * Used as the alias for the select statement in merge statements. */ protected static final String MERGE_SOURCE_ALIAS = "xmergesource"; /** * Database schema name. */ private final String schemaName; /** * Returns the database schema name. May be null. * @return The schema name */ public String getSchemaName() { return schemaName; } /** * @param schemaName The schema to use for statements. */ public SqlDialect(String schemaName) { super(); this.schemaName = schemaName; } /** * Creates SQL to deploy a database table and its associated indexes. * * @param table The meta data for the table to deploy. * @return The statements required to deploy the table and its indexes. */ public Collection<String> tableDeploymentStatements(Table table) { Builder<String> statements = ImmutableList.<String>builder(); statements.addAll(internalTableDeploymentStatements(table)); for (Index index : table.indexes()) { statements.addAll(indexDeploymentStatements(table, index)); } return statements.build(); } /** * Creates the SQL to deploy a database table. * * @param table The meta data for the table to deploy. * @return The statements required to deploy the table. */ protected abstract Collection<String> internalTableDeploymentStatements(Table table); /** * Creates SQL to deploy a database view. * * @param view The meta data for the view to deploy. * @return The statements required to deploy the view. */ public Collection<String> viewDeploymentStatements(View view) { List<String> statements = new ArrayList<>(); // Create the table deployment statement StringBuilder createTableStatement = new StringBuilder(); createTableStatement.append("CREATE "); createTableStatement.append("VIEW "); createTableStatement.append(schemaNamePrefix()); createTableStatement.append(view.getName()); createTableStatement.append(" AS ("); createTableStatement.append(convertStatementToSQL(view.getSelectStatement())); createTableStatement.append(")"); statements.add(createTableStatement.toString()); return statements; } /** * Creates SQL script to deploy a database view. * * @param view The meta data for the view to deploy. * @return The statements required to deploy the view joined into a script. */ public String viewDeploymentStatementsAsScript(View view) { final String firstLine = "-- " + getDatabaseType().identifier() + "\n"; return viewDeploymentStatements(view) .stream().collect(Collectors.joining(";\n", firstLine, ";")); } /** * Creates SQL script to deploy a database view. * * @param view The meta data for the view to deploy. * @return The statements required to deploy the view joined into a script and prepared as literals. */ public AliasedField viewDeploymentStatementsAsLiteral(View view) { return SqlUtils.literal(viewDeploymentStatementsAsScript(view)); } /** * Creates SQL to truncate a table (may require DBA rights on some databases * e.g. Oracle). * * @param table The database table. * @return SQL statements required to clear a table and prepare it for data * population. */ public Collection<String> truncateTableStatements(Table table) { return ImmutableList.of("TRUNCATE TABLE " + schemaNamePrefix(table) + table.getName()); } /** * Creates SQL to rename a table. * * @param from - table to rename * @param to - table with new name * @return SQL statements required to change a table name. */ public Collection<String> renameTableStatements(Table from, Table to) { return ImmutableList.of("ALTER TABLE " + schemaNamePrefix(from) + from.getName() + " RENAME TO " + to.getName()); } /** * Creates SQL to rename an index. * * @param table table on which the index exists * @param fromIndexName The index to rename * @param toIndexName The new index name * @return SQL Statements required to rename an index */ public Collection<String> renameIndexStatements(Table table, String fromIndexName, String toIndexName) { return ImmutableList.of("ALTER INDEX " + schemaNamePrefix(table) + fromIndexName + " RENAME TO " + toIndexName); } /** * @param table - table to perform this action on * @param oldPrimaryKeyColumns - the existing primary key columns * @param newPrimaryKeyColumns - the new primary key columns * @return SQL Statements required to change the primary key columns */ public abstract Collection<String> changePrimaryKeyColumns(Table table, List<String> oldPrimaryKeyColumns, List<String> newPrimaryKeyColumns); /** * Creates SQL to delete all records from a table (doesn't use truncate). * * @param table the database table to clear * @return SQL statements required to clear the table. */ public Collection<String> deleteAllFromTableStatements(Table table) { return ImmutableList.of("DELETE FROM " + schemaNamePrefix(table) + table.getName()); } /** * Creates SQL to execute prior to bulk-inserting to a table. * * @param table {@link Table} to be inserted to. * @param insertingUnderAutonumLimit Determines whether we are inserting under an auto-numbering limit. * @return SQL statements to be executed prior to insert. */ @SuppressWarnings("unused") public Collection<String> preInsertWithPresetAutonumStatements(Table table, boolean insertingUnderAutonumLimit) { return ImmutableList.of(); } /** * Creates SQL to execute after bulk-inserting to a table. * * @param table The table that was populated. * @param executor The executor to use * @param connection The connection to use * @param insertingUnderAutonumLimit Determines whether we are inserting under an auto-numbering limit. */ @SuppressWarnings("unused") public void postInsertWithPresetAutonumStatements(Table table, SqlScriptExecutor executor, Connection connection, boolean insertingUnderAutonumLimit) { } /** * Make sure the table provided has its next autonum value set to at least the value specified in the column metadata. * * <p>Generally databases do not need to do anything special here, but MySQL can lose the value.</p> * * @param table The table to repair. * @param executor The executor to use * @param connection The connection to use */ @SuppressWarnings("unused") public void repairAutoNumberStartPosition(Table table, SqlScriptExecutor executor, Connection connection) { } /** * Returns an SQL statement which can be used to test that a connection * remains alive and usable. * <p> * The connection has to be fast to execute and side-effect free. * </p> * * @return A connection test statement, or null if none is available for this * dialect. */ public abstract String connectionTestStatement(); /** * Converts a {@link Statement} to the equivalent SQL text. * * @param statement the statement to convert * @param databaseMetadata the database schema. If null, no defaulting is * performed. * @param idTable the id table. If null, no automatic setting of id numbers is * performed. * @return a string containing the SQL to run against the database */ public List<String> convertStatementToSQL(Statement statement, Schema databaseMetadata, Table idTable) { if (statement instanceof InsertStatement) { InsertStatement insert = (InsertStatement) statement; if (databaseMetadata == null || isAutonumbered(insert, databaseMetadata)) { return convertStatementToSQL(insert); } else { return convertStatementToSQL(insert, databaseMetadata, idTable); } } else if (statement instanceof UpdateStatement) { UpdateStatement update = (UpdateStatement) statement; return ImmutableList.of(convertStatementToSQL(update)); } else if (statement instanceof DeleteStatement) { DeleteStatement delete = (DeleteStatement) statement; return ImmutableList.of(convertStatementToSQL(delete)); } else if (statement instanceof TruncateStatement) { TruncateStatement truncateStatement = (TruncateStatement) statement; return ImmutableList.of(convertStatementToSQL(truncateStatement)); } else if (statement instanceof MergeStatement) { MergeStatement merge = (MergeStatement) statement; return ImmutableList.of(convertStatementToSQL(merge)); } else { throw new UnsupportedOperationException("Executed statement operation not supported for [" + statement.getClass() + "]"); } } /** * Checks whether the table the InsertStatement is referring to has any * autonumbered columns * * @param statement The statement to check * @param databaseMetadata the database schema * @return true if autonumbered, false otherwise */ protected boolean isAutonumbered(InsertStatement statement, Schema databaseMetadata) { if (statement.getTable() != null) { Table tableInserting = databaseMetadata.getTable(statement.getTable().getName()); for (Column col : tableInserting.columns()) { if (col.isAutoNumbered()) { return true; } } } return false; } /** * Converts a simple parameterised insert statement into a single SQL string. * * @param statement the statement to convert * @param databaseMetadata the database schema. * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(InsertStatement statement, Schema databaseMetadata) { if (!statement.isParameterisedInsert()) { throw new IllegalArgumentException("Non-parameterised insert statements must supply the id table."); } return buildParameterisedInsert(statement, databaseMetadata); } /** * Converts a structured {@link InsertStatement} to the equivalent SQL text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public List<String> convertStatementToSQL(InsertStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } expandInnerSelectFields(statement.getSelectStatement()); // If this is a specific values insert then use a standard converter if (statement.isSpecificValuesInsert()) { return buildSpecificValueInsert(statement, null, null); } return getSqlFromInsert(statement, null, null); } /** * Converts a structured {@link InsertStatement} to the equivalent SQL text. * * @param statement the statement to convert * @param databaseMetadata the database schema. If null, no defaulting is * performed. * @param idTable the id table. If null, no automatic setting of id numbers is * performed. * @return a string containing the SQL to run against the database */ public List<String> convertStatementToSQL(InsertStatement statement, Schema databaseMetadata, Table idTable) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } expandInnerSelectFields(statement.getSelectStatement()); InsertStatement defaultedStatement = new InsertStatementDefaulter(databaseMetadata).defaultMissingFields(statement); // If this is a parameterised insert then use a standard converter if (statement.isParameterisedInsert()) { return ImmutableList.of(buildParameterisedInsert(defaultedStatement, databaseMetadata)); } // If this is a specific values insert then use a standard converter if (statement.isSpecificValuesInsert()) { return buildSpecificValueInsert(defaultedStatement, databaseMetadata, idTable); } return getSqlFromInsert(expandInsertStatement(defaultedStatement, databaseMetadata), databaseMetadata, idTable); } /** * Creates the fields from any inner selects on the outer select. * * @param statement the select statement to expand. */ private void expandInnerSelectFields(SelectStatement statement) { if (statement == null || !statement.getFields().isEmpty() || statement.getFromSelects().isEmpty()) { return; } for (SelectStatement selectStatement : statement.getFromSelects()) { expandInnerSelectFields(selectStatement); for (AliasedField field : selectStatement.getFields()) { statement.appendFields(new FieldReference(new TableReference(selectStatement.getAlias()), field.getAlias())); } } } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(SelectStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(SelectFirstStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(MergeStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(UpdateStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(DeleteStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(TruncateStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * Extracts the parameters from a SQL statement. * * @param statement the SQL statement. * @return the list of parameters. */ public List<SqlParameter> extractParameters(InsertStatement statement) { SqlParameterExtractor extractor = new SqlParameterExtractor(); ObjectTreeTraverser.forCallback(extractor).dispatch(statement); return extractor.list; } /** * SQL visitor which extracts parameters to a list. * * @author Copyright (c) Alfa Financial Software 2014 */ private static class SqlParameterExtractor extends SqlElementCallback { final List<SqlParameter> list = Lists.newArrayList(); @Override public void visit(AliasedField field) { if (field instanceof SqlParameter) { list.add((SqlParameter)field); } } } /** * Converts a structured {@link UpdateStatement} to the equivalent SQL text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(UpdateStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } return getSqlFrom(statement); } /** * Converts a structured {@link MergeStatement} to the equivalent SQL text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(MergeStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } return getSqlFrom(statement); } /** * Converts a structured {@link DeleteStatement} to the equivalent SQL text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(DeleteStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } return getSqlFrom(statement); } /** * Converts a {@link TruncateStatement} to SQL. * * @param statement The statement to convert * @return The SQL represented by the statement */ public String convertStatementToSQL(TruncateStatement statement) { return truncateTableStatements(SchemaUtils.table(statement.getTable().getName())).iterator().next(); } /** * Whether insert statement batching should be used for this dialect to * improve performance. * * @return <var>true</var> if code should use statement batches to reduce * overhead when bulk inserting data. */ public boolean useInsertBatching() { return true; } /** * Different JDBC drivers and platforms have different behaviour for paging results * into a {@link ResultSet} as they are fetched. For example, MySQL defaults * to <a href="http://stackoverflow.com/questions/20496616/fetchsize-in-resultset-set-to-0-by-default">fetching * <em>all</em> records</a> into memory, whereas Oracle defaults to fetching * <a href="https://docs.oracle.com/cd/A87860_01/doc/java.817/a83724/resltse5.htm">10 * records</a> at a time. * * <p>The impact mostly rears its head during bulk loads (when loading large numbers * of records). MySQL starts to run out of memory, and Oracle does not run at * optimal speed due to unnecessary round-trips.</p> * * <p>This provides the ability for us to specify different fetch sizes for bulk loads * on different platforms. Refer to the individual implementations for reasons for * the choices there.</p> * * @return The number of rows to try and fetch at a time (default) when * performing bulk select operations. * @see #fetchSizeForBulkSelectsAllowingConnectionUseDuringStreaming() */ public int fetchSizeForBulkSelects() { return 1; } /** * When using a "streaming" {@link ResultSet} (i.e. any where the fetch size indicates that fewer * than all the records should be returned at a time), MySQL does not permit the connection * to be used for anything else. Therefore we have an alternative fetch size here specifically * for the scenario where this is unavoidable. * * <p>In practice this returns the same value except for on MySQL, where we use it to * effectively disable streaming if we know the connection will be used. This means * certain types of processing are liable to cause high memory usage on MySQL.</p> * * @return The number of rows to try and fetch at a time (default) when * performing bulk select operations and needing to use the connection while * the {@link ResultSet} is open. * @see #fetchSizeForBulkSelects() */ public int fetchSizeForBulkSelectsAllowingConnectionUseDuringStreaming() { return fetchSizeForBulkSelects(); } /** * @return The schema prefix (including the dot) or blank if the schema's blank. */ public String schemaNamePrefix() { if (StringUtils.isEmpty(schemaName)) { return ""; } return schemaName.toUpperCase() + "."; } /** * @param table The table for which the schema name will be retrieved * @return Base implementation calls {@link #schemaNamePrefix()}. */ protected String schemaNamePrefix(@SuppressWarnings("unused") Table table) { return schemaNamePrefix(); } /** * @param tableRef The table reference from which the schema name will be extracted * @return The schema prefix of the specified table (including the dot), the * dialect's schema prefix or blank if neither is specified (in that order). */ protected String schemaNamePrefix(TableReference tableRef) { if (StringUtils.isEmpty(tableRef.getSchemaName())) { return schemaNamePrefix(); } else { return tableRef.getSchemaName().toUpperCase() + "."; } } /** * Creates SQL to drop the named table. * * @param table The table to drop * @return The SQL statements as strings. */ public Collection<String> dropStatements(Table table) { return ImmutableList.of("DROP TABLE " + schemaNamePrefix(table) + table.getName()); } /** * Creates SQL to drop the named view. * * @param view The view to drop * @return The SQL statements as strings. */ public Collection<String> dropStatements(View view) { return ImmutableList.of("DROP VIEW " + schemaNamePrefix() + view.getName() + " IF EXISTS CASCADE"); } /** * Convert a {@link SelectStatement} into standards compliant SQL. * <p> * For example, the following code: * </p> * <blockquote> * * <pre> * SelectStatement stmt = new SelectStatement().from(new Table(&quot;agreement&quot;)); * String result = sqlgen.getSqlFrom(stmt); * </pre> * * </blockquote> * <p> * Will populate {@code result} with: * </p> * <blockquote> * * <pre> * SELECT * FROM agreement * </pre> * * </blockquote> * * @param stmt the select statement to generate SQL for * @return a standards compliant SQL SELECT statement */ protected String getSqlFrom(SelectStatement stmt) { StringBuilder result = new StringBuilder("SELECT "); // Any hint directives which should be inserted before the field list result.append(selectStatementPreFieldDirectives(stmt)); // Start by checking if this is a distinct call, then add the field list if (stmt.isDistinct()) { result.append("DISTINCT "); } if (stmt.getFields().isEmpty()) { result.append("*"); } else { boolean firstField = true; for (AliasedField currentField : stmt.getFields()) { if (!firstField) { result.append(", "); } result.append(getSqlFrom(currentField)); // Put an alias in, if requested appendAlias(result, currentField); firstField = false; } } appendFrom(result, stmt); appendJoins(result, stmt, innerJoinKeyword(stmt)); appendWhere(result, stmt); appendGroupBy(result, stmt); appendHaving(result, stmt); appendUnionSet(result, stmt); appendOrderBy(result, stmt); if (stmt.isForUpdate()) { if (stmt.isDistinct() || !stmt.getGroupBys().isEmpty() || !stmt.getJoins().isEmpty()) { throw new IllegalArgumentException("GROUP BY, JOIN or DISTINCT cannot be combined with FOR UPDATE (H2 limitations)"); } result.append(getForUpdateSql()); } // Any hint directives which should be inserted right at the end of the statement result.append(selectStatementPostStatementDirectives(stmt)); return result.toString(); } /** * Returns any SQL code which should be added between a <code>SELECT</code> and the field * list for dialect-specific reasons. * * @param selectStatement The select statement * @return Any hint code required. */ protected String selectStatementPreFieldDirectives(@SuppressWarnings("unused") SelectStatement selectStatement) { return StringUtils.EMPTY; } /** * Returns any SQL code which should be added between a <code>UPDATE</code> and the table * for dialect-specific reasons. * * @param updateStatement The update statement * @return Any hint code required. */ protected String updateStatementPreTableDirectives(@SuppressWarnings("unused") UpdateStatement updateStatement) { return StringUtils.EMPTY; } /** * Returns any SQL code which should be added at the end of a statement for dialect-specific reasons. * * @param selectStatement The select statement * @return Any hint code required. */ protected String selectStatementPostStatementDirectives(@SuppressWarnings("unused") SelectStatement selectStatement) { return StringUtils.EMPTY; } /** * Default behaviour for FOR UPDATE. Can be overridden. * @return The String representation of the FOR UPDATE clause. */ protected String getForUpdateSql() { return " FOR UPDATE"; } /** * appends alias to the result * * @param result alias will be appended to this * @param currentField field to be aliased */ protected void appendAlias(StringBuilder result, AliasedField currentField) { if (!StringUtils.isBlank(currentField.getAlias())) { result.append(String.format(" AS %s", currentField.getAlias())); } } /** * Convert a {@link SelectFirstStatement} into SQL. This is the same format * used for H2, MySQL. Oracle and SqlServer implementation override this * function. * * @param stmt the select statement to generate SQL for * @return SQL string specific for database platform */ protected String getSqlFrom(SelectFirstStatement stmt) { StringBuilder result = new StringBuilder("SELECT "); // Start by adding the field result.append(getSqlFrom(stmt.getFields().get(0))); appendFrom(result, stmt); appendJoins(result, stmt, innerJoinKeyword(stmt)); appendWhere(result, stmt); appendOrderBy(result, stmt); result.append(" LIMIT 0,1"); return result.toString().trim(); } /** * appends joins clauses to the result * * @param result joins will be appended here * @param stmt statement with joins clauses * @param innerJoinKeyword The keyword for INNER JOIN * @param <T> The type of {@link AbstractSelectStatement} */ protected <T extends AbstractSelectStatement<T>> void appendJoins(StringBuilder result, AbstractSelectStatement<T> stmt, String innerJoinKeyword) { for (Join currentJoin : stmt.getJoins()) { appendJoin(result, currentJoin, innerJoinKeyword); } } /** * appends order by clause to the result * * @param result order by clause will be appended here * @param stmt statement with order by clause * @param <T> The type of AbstractSelectStatement */ protected <T extends AbstractSelectStatement<T>> void appendOrderBy(StringBuilder result, AbstractSelectStatement<T> stmt) { if (!stmt.getOrderBys().isEmpty()) { result.append(" ORDER BY "); boolean firstOrderByField = true; for (AliasedField currentOrderByField : stmt.getOrderBys()) { if (!firstOrderByField) { result.append(", "); } result.append(getSqlForOrderByField(currentOrderByField)); firstOrderByField = false; } } } /** * appends union set operators to the result * * @throws UnsupportedOperationException if any other than * {@link UnionSetOperator} set operation found * @param result union set operators will be appended here * @param stmt statement with set operators */ protected void appendUnionSet(StringBuilder result, SelectStatement stmt) { if (stmt.getSetOperators() != null) { for (SetOperator operator : stmt.getSetOperators()) { if (operator instanceof UnionSetOperator) { result.append(getSqlFrom((UnionSetOperator) operator)); } else { throw new UnsupportedOperationException("Unsupported set operation"); } } } } /** * appends having clause to the result * * @param result having clause will be appended here * @param stmt statement with having clause */ protected void appendHaving(StringBuilder result, SelectStatement stmt) { if (stmt.getHaving() != null) { result.append(" HAVING "); result.append(getSqlFrom(stmt.getHaving())); } } /** * appends group by clause to the result * * @param result group by clause will be appended here * @param stmt statement with group by clause */ protected void appendGroupBy(StringBuilder result, SelectStatement stmt) { if (stmt.getGroupBys().size() > 0) { result.append(" GROUP BY "); boolean firstGroupByField = true; for (AliasedField currentGroupByField : stmt.getGroupBys()) { if (!firstGroupByField) { result.append(", "); } result.append(getSqlFrom(currentGroupByField)); firstGroupByField = false; } } } /** * appends where clause to the result * * @param result where clause will be appended here * @param stmt statement with where clause * @param <T> The type of AbstractSelectStatement */ protected <T extends AbstractSelectStatement<T>> void appendWhere(StringBuilder result, AbstractSelectStatement<T> stmt) { if (stmt.getWhereCriterion() != null) { result.append(" WHERE "); result.append(getSqlFrom(stmt.getWhereCriterion())); } } /** * appends from clause to the result * * @param result from clause will be appended here * @param stmt statement with from clause * @param <T> The type of AbstractSelectStatement */ protected <T extends AbstractSelectStatement<T>> void appendFrom(StringBuilder result, AbstractSelectStatement<T> stmt) { if (stmt.getTable() != null) { result.append(" FROM "); result.append(schemaNamePrefix(stmt.getTable())); result.append(stmt.getTable().getName()); // Add a table alias if necessary if (!stmt.getTable().getAlias().equals("")) { result.append(" "); result.append(stmt.getTable().getAlias()); } } else if (!stmt.getFromSelects().isEmpty()) { result.append(" FROM "); boolean first = true; for (SelectStatement innerSelect : stmt.getFromSelects()) { checkSelectStatementHasNoHints(innerSelect, "Hints not currently permitted on subqueries"); if (!first) { result.append(", "); } first = false; result.append(String.format("(%s)", getSqlFrom(innerSelect))); if (StringUtils.isNotBlank(innerSelect.getAlias())) { result.append(String.format(" %s", innerSelect.getAlias())); } } } else { result.append(getFromDummyTable()); } } /** * gets SQL for field inside order by clause. it covers null values handling and direction. * <p> * If the field if {@link FieldReference} then overloaded method is called, * otherwise default SQL (with default null values handling type and direction) is applied * </p> * * @param currentOrderByField field within order by clause * @return SQL for the field inside order by clause */ protected String getSqlForOrderByField(AliasedField currentOrderByField) { if (currentOrderByField instanceof FieldReference) { return getSqlForOrderByField((FieldReference) currentOrderByField); } StringBuilder result = new StringBuilder(getSqlFrom(currentOrderByField)); result.append(" ").append(defaultNullOrder()); return result.toString().trim(); } /** * When executing order by a database platform may apply its defaults for: - * null values handling (to either put them first or last) - direction * (ascending or descending) * <p> * In order for explicit settings. e.g: order by field asc nulls last. * </p> * <p> * This method is an implementation for H2 and Oracle. For other dialects it required being overridden. * </p> * * @param orderByField field reference ordered by * @return sql for order by field */ protected String getSqlForOrderByField(FieldReference orderByField) { StringBuilder result = new StringBuilder(getSqlFrom(orderByField)); switch (orderByField.getDirection()) { case DESCENDING: result.append(" DESC"); break; case ASCENDING: case NONE: default: break; } result.append(getSqlForOrderByFieldNullValueHandling(orderByField)); return result.toString().trim(); } /** * Get the SQL expression for NULL values handling. * @param orderByField The order by clause * @return The resulting SQL String * */ protected String getSqlForOrderByFieldNullValueHandling(FieldReference orderByField) { if (orderByField.getNullValueHandling().isPresent()) { switch (orderByField.getNullValueHandling().get()) { case FIRST: return " NULLS FIRST"; case LAST: return " NULLS LAST"; case NONE: default: return ""; } } else { return " " + defaultNullOrder(); } } /** * An additional clause to use in SELECT statements where there is no select * source, which allows us to include "FROM &lt;dummy table&gt;" on RDBMSes such as * Oracle where selecting from no table is not allowed but the RDBMS provides * a dummy table (such as "dual"). * * @return the additional clause. */ protected String getFromDummyTable() { return StringUtils.EMPTY; } /** * @param result the string builder to append to * @param join the join statement * @param innerJoinKeyword usually an INNER JOIN, but this can be changed for optimisations */ protected void appendJoin(StringBuilder result, Join join, String innerJoinKeyword) { // Put the type in switch (join.getType()) { case INNER_JOIN: result.append(" ").append(innerJoinKeyword).append(" "); break; case LEFT_OUTER_JOIN: result.append(" LEFT OUTER JOIN "); break; case FULL_OUTER_JOIN: result.append(" FULL OUTER JOIN "); break; default: throw new UnsupportedOperationException("Cannot perform join of type [" + join.getType() + "] on database"); } if (join.getTable() == null && (join.getSubSelect() == null || join.getSubSelect().getAlias() == null)) { throw new IllegalArgumentException("Join clause does not specify table or sub-select with an alias"); } if (join.getTable() == null) { result.append('('); result.append(getSqlFrom(join.getSubSelect())); result.append(") "); result.append(join.getSubSelect().getAlias()); } else { // Now add the table name result.append(String.format("%s%s", schemaNamePrefix(join.getTable()), join.getTable().getName())); // And add an alias if necessary if (!join.getTable().getAlias().isEmpty()) { result.append(" ").append(join.getTable().getAlias()); } } if (join.getCriterion() != null) { result.append(" ON "); // Then put the join fields into the output result.append(getSqlFrom(join.getCriterion())); } else if (join.getType() == JoinType.LEFT_OUTER_JOIN || join.getType() == JoinType.FULL_OUTER_JOIN) { throw new IllegalArgumentException(join.getType() + " must have ON criteria"); } else { // MySql supports no ON criteria and ON TRUE, but the other platforms // don't, so just keep things simple. result.append(String.format(" ON 1=1")); } } /** * @param stmt The statement. * @return The keyword to use for an inner join on the specified statement. This only differs * in response to hints. */ protected String innerJoinKeyword(@SuppressWarnings("unused") AbstractSelectStatement<?> stmt) { return "INNER JOIN"; } /** * Converts a {@link UnionSetOperator} into SQL. * * @param operator the union to convert. * @return a string representation of the field. */ protected String getSqlFrom(UnionSetOperator operator) { return String.format(" %s %s", operator.getUnionStrategy() == UnionStrategy.ALL ? "UNION ALL" : "UNION", getSqlFrom(operator.getSelectStatement())); } /** * A database platform may need to specify the null order. * <p> * If a null order is not required for a SQL dialect descendant classes need * to implement this method. * </p> * * @return the null order for an SQL dialect */ protected String defaultNullOrder() { return StringUtils.EMPTY; } /** * Convert an {@link InsertStatement} into standards compliant SQL. * <p> * For example, the following code: * </p> * <blockquote> * * <pre> * InsertStatement stmt = new InsertStatement().into(new Table(&quot;agreement&quot;)).from(new Table(&quot;agreement&quot;)); * String result = dialect * .getSqlFrom(stmt); * </pre> * * </blockquote> * <p> * Will populate {@code result} with: * </p> * <blockquote> * * <pre> * INSERT INTO agreement (id, version, ...) SELECT id, version, ... FROM agreement * </pre> * * </blockquote> * * @param stmt the insert statement to generate SQL for * @param metadata the database schema. * @param idTable the ID Table. * @return a standards compliant SQL INSERT statement */ protected List<String> getSqlFromInsert(InsertStatement stmt, Schema metadata, Table idTable) { if (stmt.getTable() == null) { throw new IllegalArgumentException("Cannot specify a null destination table in an insert statement"); } if (stmt.getSelectStatement() == null) { throw new IllegalArgumentException("Cannot specify a null for the source select statement in getSqlFrom"); } SelectStatement sourceStatement = stmt.getSelectStatement(); List<String> result = new LinkedList<>(); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(getSqlForInsertInto(stmt)); stringBuilder.append(schemaNamePrefix(stmt.getTable())); stringBuilder.append(stmt.getTable().getName()); stringBuilder.append(" "); // Add the destination fields if (!stmt.getFields().isEmpty()) { // Only check the field count if we're operating with full knowledge of // the schema. // If we're not, then frankly the code concerned should know what it's // doing (e.g. // using DB auto-incremement columns or allowing fields to self-default) if (metadata != null && stmt.getFields().size() != sourceStatement.getFields().size()) { throw new IllegalArgumentException(String.format( "Insert statement and source select statement must use the same number of columns. Insert has [%d] but select has [%d].", stmt.getFields().size(), sourceStatement.getFields().size())); } // Use the fields specified by the caller stringBuilder.append("("); boolean firstRun = true; boolean explicitIdColumn = false; boolean explicitVersionColumn = false; for (AliasedField currentField : stmt.getFields()) { if (!(currentField instanceof FieldReference)) { throw new IllegalArgumentException("Cannot use a non-field reference in the fields section of an insert statement: [" + currentField.getAlias() + "]"); } FieldReference fieldRef = (FieldReference) currentField; if (!firstRun) { stringBuilder.append(", "); } stringBuilder.append(fieldRef.getName()); // Track if we have an id column (i.e. we don't need to default one in) explicitIdColumn |= fieldRef.getName().equalsIgnoreCase("id"); // Track if we have a version column (i.e. we don't need to default one // in) explicitVersionColumn |= fieldRef.getName().equalsIgnoreCase("version"); firstRun = false; } // Only augment the statement if we have the schema to work from if (metadata != null && idTable != null) { if (!explicitIdColumn && hasColumnNamed(stmt.getTable().getName(), metadata, "id")) { result.addAll(buildSimpleAutonumberUpdate(stmt.getTable(), "id", idTable, ID_INCREMENTOR_TABLE_COLUMN_NAME, ID_INCREMENTOR_TABLE_COLUMN_VALUE)); AliasedField idValue = nextIdValue(stmt.getTable(), stmt.getSelectStatement().getTable(), idTable, ID_INCREMENTOR_TABLE_COLUMN_NAME, ID_INCREMENTOR_TABLE_COLUMN_VALUE); stringBuilder.append(", id"); // Augment the select statement sourceStatement = sourceStatement.shallowCopy().fields(idValue).build(); } if (!explicitVersionColumn && hasColumnNamed(stmt.getTable().getName(), metadata, "version")) { stringBuilder.append(", version"); // Augment the select statement sourceStatement = sourceStatement.shallowCopy().fields(SqlUtils.literal(0).as("version")).build(); } } stringBuilder.append(") "); } // Add the select statement stringBuilder.append(getSqlFrom(sourceStatement)); result.add(stringBuilder.toString()); return result; } /** * Checks the schema to see if the {@code tableName} has a named column as * provided. * * @param tableName the table name. * @param metadata the schema. * @param columnName the column name to check for. * @return true if a column with the name 'id' is found. */ private boolean hasColumnNamed(String tableName, Schema metadata, String columnName) { for (Column currentColumn : metadata.getTable(tableName).columns()) { if (currentColumn.getName().equalsIgnoreCase(columnName)) { return true; } } return false; } /** * Convert a {@link Criterion} to a standards compliant expression. * * @param criterion the criterion to convert into a standard SQL statement * @return the string representation of the criterion */ protected String getSqlFrom(Criterion criterion) { if (criterion == null) { throw new IllegalArgumentException("Cannot get SQL for a null criterion object"); } // Start building the string StringBuilder result = new StringBuilder("("); boolean firstInList = true; switch (criterion.getOperator()) { case AND: case OR: for (Criterion currentCriterion : criterion.getCriteria()) { if (!firstInList) { result.append(String.format(" %s ", criterion.getOperator())); } result.append(getSqlFrom(currentCriterion)); firstInList = false; } break; case EQ: result.append(getOperatorLine(criterion, "=")); break; case NEQ: result.append(getOperatorLine(criterion, "<>")); break; case GT: result.append(getOperatorLine(criterion, ">")); break; case GTE: result.append(getOperatorLine(criterion, ">=")); break; case LT: result.append(getOperatorLine(criterion, "<")); break; case LTE: result.append(getOperatorLine(criterion, "<=")); break; case LIKE: result.append(getOperatorLine(criterion, "LIKE") + likeEscapeSuffix()); break; case ISNULL: result.append(String.format("%s IS NULL", getSqlFrom(criterion.getField()))); break; case ISNOTNULL: result.append(String.format("%s IS NOT NULL", getSqlFrom(criterion.getField()))); break; case NOT: result.append(String.format("NOT %s", getSqlFrom(criterion.getCriteria().get(0)))); break; case EXISTS: result.append(String.format("EXISTS (%s)", getSqlFrom(criterion.getSelectStatement()))); break; case IN: String content; if (criterion.getSelectStatement() == null) { content = getSqlForCriterionValueList(criterion); } else { content = getSqlFrom(criterion.getSelectStatement()); } result.append(String.format("%s IN (%s)", getSqlFrom(criterion.getField()), content)); break; default: throw new UnsupportedOperationException("Operator of type [" + criterion.getOperator() + "] is not supported in this database"); } result.append(")"); return result.toString().trim(); } /** * @return The string used to set the SQL LIKE escape character - specified after all LIKE expressions */ protected String likeEscapeSuffix() { return " ESCAPE '\\'"; } /** * Converts a list of values on a criterion into a comma-separated list. * * @param criterion The criterion to convert * @return The converted criterion as a String */ @SuppressWarnings("unchecked") protected String getSqlForCriterionValueList(Criterion criterion) { if (!(criterion.getValue() instanceof List)) { throw new IllegalStateException("Invalid parameter for IN criterion"); } StringBuilder builder = new StringBuilder(); boolean first = true; for (Object o : (List<Object>) criterion.getValue()) { if (!first) { builder.append(", "); } builder.append(getSqlForCriterionValue(o)); first = false; } return builder.toString(); } /** * Convert a {@link AliasedField} into a standard field reference. If the * field has a table reference then this will be used as a prefix to the * field. * * @param field the field to generate a reference for * @return a string representation of the field */ protected String getSqlFrom(AliasedField field) { if (field instanceof SqlParameter) { return getSqlFrom((SqlParameter)field); } if (field instanceof BlobFieldLiteral) { return getSqlFrom((BlobFieldLiteral)field); } if (field instanceof FieldLiteral) { return getSqlFrom((FieldLiteral) field); } if (field instanceof Function) { return getSqlFrom((Function) field); } if (field instanceof FieldReference) { FieldReference fieldRef = (FieldReference) field; String prefix = ""; if (fieldRef.getTable() != null) { if (StringUtils.isEmpty(fieldRef.getTable().getAlias())) { prefix = fieldRef.getTable().getName() + "."; } else { prefix = fieldRef.getTable().getAlias() + "."; } } return prefix + fieldRef.getName(); } if (field instanceof FieldFromSelect) { return "(" + getSqlFrom((FieldFromSelect) field) + ")"; } if (field instanceof FieldFromSelectFirst) { return "(" + getSqlFrom((FieldFromSelectFirst) field) + ")"; } if (field instanceof CaseStatement) { return getSqlFrom((CaseStatement) field); } if (field instanceof ConcatenatedField) { return getSqlFrom((ConcatenatedField) field); } if (field instanceof MathsField) { return getSqlFrom((MathsField) field); } if (field instanceof BracketedExpression) { return getSqlFrom((BracketedExpression) field); } if (field instanceof Cast) { return getSqlFrom((Cast) field); } if(field instanceof WindowFunction) { return getSqlFrom((WindowFunction) field); } if (field instanceof MergeStatement.InputField) { return getSqlFrom((MergeStatement.InputField) field); } throw new IllegalArgumentException("Aliased Field of type [" + field.getClass().getSimpleName() + "] is not supported"); } /** * Convert {@link SqlParameter} into its SQL representation. * * @param field representing a SQL parameter * @return Returns the SQL representation of {@link SqlParameter}. */ protected String getSqlFrom(SqlParameter field) { return String.format(":%s", field.getMetadata().getName()); } /** * Convert {@link CaseStatement} into its SQL representation. * * @param field representing a case statement * @return Returns the SQL representation of {@link CaseStatement}. */ protected String getSqlFrom(CaseStatement field) { if (field.getWhenConditions().isEmpty()) { throw new IllegalArgumentException("Need to specify when conditions for a case statement"); } if (field.getDefaultValue() == null) { throw new IllegalArgumentException("default value needs to be specified"); } StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("CASE"); for (WhenCondition when : field.getWhenConditions()) { sqlBuilder.append(" WHEN "); sqlBuilder.append(getSqlFrom(when.getCriterion())); sqlBuilder.append(" THEN "); sqlBuilder.append(getSqlFrom(when.getValue())); } sqlBuilder.append(" ELSE "); sqlBuilder.append(getSqlFrom(field.getDefaultValue())); sqlBuilder.append(" END"); return sqlBuilder.toString(); } /** * Convert a {@link FieldLiteral} into a standard literal. * * @param field the field to generate a literal for * @return a string representation of the field literal */ protected String getSqlFrom(FieldLiteral field) { switch (field.getDataType()) { case BOOLEAN: return getSqlFrom(Boolean.valueOf(field.getValue())); case STRING: return makeStringLiteral(field.getValue()); case DATE: // This is the ISO standard date literal format return String.format("DATE '%s'", field.getValue()); case DECIMAL: case BIG_INTEGER: case INTEGER: case CLOB: return field.getValue(); case NULL: if (field.getValue() != null) { throw new UnsupportedOperationException("Literals of type NULL must have a null value. Got [" + field.getValue() + "]"); } return "null"; default: throw new UnsupportedOperationException("Cannot convert the specified field literal into an SQL literal: [" + field.getValue() + "]"); } } /** * Default implementation will just return the Base64 representation of the binary data, which may not necessarily work with all SQL dialects. * Hence appropriate conversions to the appropriate type based on facilities provided by the dialect's SQL vendor implementation should be used. * * @param field the BLOB field literal * @return the SQL construct or base64 string representation of the binary value */ protected String getSqlFrom(BlobFieldLiteral field) { return String.format("'%s'", field.getValue()); } /** * Turn a string value into an SQL string literal which has that value. * <p> * We use {@linkplain StringUtils#isEmpty(CharSequence)} because we want to * differentiate between a single space and an empty string. * </p> * <p> * This is necessary because char types cannot be null and must contain a * single space. * <p> * * @param literalValue the literal value of the string. * @return SQL String Literal */ protected String makeStringLiteral(String literalValue) { if (StringUtils.isEmpty(literalValue)) { return "NULL"; } return String.format("'%s'", escapeSql(literalValue)); } /** * Turn a string value into an SQL string literal which has that value. * This escapes single quotes as double-single quotes. * @param literalValue the value to escape * @return escaped value */ protected String escapeSql(String literalValue) { if (literalValue == null) { return null; } return StringUtils.replace(literalValue, "'", "''"); } /** * Convert a {@link ConcatenatedField} into standards compliant sql. * * @param concatenatedField the field to generate SQL for * @return a string representation of the field literal */ protected String getSqlFrom(ConcatenatedField concatenatedField) { List<String> sql = new ArrayList<>(); for (AliasedField field : concatenatedField.getConcatenationFields()) { // Interpret null values as empty strings sql.add("COALESCE(" + getSqlFrom(field) + ",'')"); } return StringUtils.join(sql, " || "); } /** * Get the name of a function. * * @param function the function to get the name of * @return a string which is the name of the function */ protected String getSqlFrom(Function function) { switch (function.getType()) { case COUNT: if (function.getArguments().isEmpty()) { return getSqlForCount(); } if (function.getArguments().size() == 1) { return getSqlForCount(function); } throw new IllegalArgumentException("The COUNT function should have only have one or zero arguments. This function has " + function.getArguments().size()); case COUNT_DISTINCT: checkSingleArgument(function); return getSqlForCountDistinct(function); case AVERAGE: checkSingleArgument(function); return getSqlForAverage(function); case AVERAGE_DISTINCT: checkSingleArgument(function); return getSqlForAverageDistinct(function); case LENGTH: checkSingleArgument(function); return getSqlforLength(function); case BLOB_LENGTH: checkSingleArgument(function); return getSqlforBlobLength(function); case SOME: checkSingleArgument(function); return getSqlForSome(function); case EVERY: checkSingleArgument(function); return getSqlForEvery(function); case MAX: checkSingleArgument(function); return getSqlForMax(function); case MIN: checkSingleArgument(function); return getSqlForMin(function); case SUM: checkSingleArgument(function); return getSqlForSum(function); case SUM_DISTINCT: checkSingleArgument(function); return getSqlForSumDistinct(function); case IS_NULL: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The IS_NULL function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForIsNull(function); case MOD: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The MOD function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForMod(function); case SUBSTRING: if (function.getArguments().size() != 3) { throw new IllegalArgumentException("The SUBSTRING function should have three arguments. This function has " + function.getArguments().size()); } return getSqlForSubstring(function); case YYYYMMDD_TO_DATE: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The YYYYMMDD_TO_DATE function should have one argument. This function has " + function.getArguments().size()); } return getSqlForYYYYMMDDToDate(function); case DATE_TO_YYYYMMDD: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The DATE_TO_YYYYMMDD function should have one argument. This function has " + function.getArguments().size()); } return getSqlForDateToYyyymmdd(function); case DATE_TO_YYYYMMDDHHMMSS: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The DATE_TO_YYYYMMDDHHMMSS function should have one argument. This function has " + function.getArguments().size()); } return getSqlForDateToYyyymmddHHmmss(function); case NOW: if (!function.getArguments().isEmpty()) { throw new IllegalArgumentException("The NOW function should have zero arguments. This function has " + function.getArguments().size()); } return getSqlForNow(function); case DAYS_BETWEEN: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The DAYS_BETWEEN function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForDaysBetween(function.getArguments().get(0), function.getArguments().get(1)); case MONTHS_BETWEEN: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The MONTHS_BETWEEN function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForMonthsBetween(function.getArguments().get(0), function.getArguments().get(1)); case COALESCE: if (function.getArguments().size() == 0) { throw new IllegalArgumentException("The COALESCE function requires at least one argument. This function has " + function.getArguments().size()); } return getSqlForCoalesce(function); case GREATEST: if (function.getArguments().size() == 0) { throw new IllegalArgumentException("The GREATEST function requires at least one argument. This function has " + function.getArguments().size()); } return getSqlForGreatest(function); case LEAST: if (function.getArguments().size() == 0) { throw new IllegalArgumentException("The LEAST function requires at least one argument. This function has " + function.getArguments().size()); } return getSqlForLeast(function); case TRIM: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The TRIM function should have one argument. This function has " + function.getArguments().size()); } return getSqlForTrim(function); case LEFT_TRIM: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The LEFT_TRIM function should have one argument. This function has " + function.getArguments().size()); } return getSqlForLeftTrim(function); case RIGHT_TRIM: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The RIGHT_TRIM function should have one argument. This function has " + function.getArguments().size()); } return getSqlForRightTrim(function); case ADD_DAYS: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The ADD_DAYS function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForAddDays(function); case ADD_MONTHS: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The ADD_MONTHS function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForAddMonths(function); case ROUND: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The ROUND function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForRound(function); case FLOOR: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The FLOOR function should have one argument. This function has " + function.getArguments().size()); } return getSqlForFloor(function); case RANDOM: if (function.getArguments().size() != 0) { throw new IllegalArgumentException("The " + function.getType() + " function should have no arguments. This function has " + function.getArguments().size()); } return getSqlForRandom(); case RANDOM_STRING: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The RANDOM_STRING function should have one argument. This function has " + function.getArguments().size()); } return getSqlForRandomString(function); case LOWER: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The LOWER function should have one argument. This function has " + function.getArguments().size()); } return getSqlForLower(function); case UPPER: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The UPPER function should have one argument. This function has " + function.getArguments().size()); } return getSqlForUpper(function); case POWER: if (function.getArguments().size() != 2) { throw new IllegalArgumentException("The POWER function should have two arguments. This function has " + function.getArguments().size()); } return getSqlForPower(function); case LEFT_PAD: if (function.getArguments().size() != 3) { throw new IllegalArgumentException("The LEFT_PAD function should have three arguments. This function has " + function.getArguments().size()); } return getSqlForLeftPad(function.getArguments().get(0), function.getArguments().get(1), function.getArguments().get(2)); case LAST_DAY_OF_MONTH: if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The LAST_DAY_OF_MONTH function should have one argument. This function has " + function.getArguments().size()); } return getSqlForLastDayOfMonth(function.getArguments().get(0)); default: throw new UnsupportedOperationException("This database does not currently support the [" + function.getType() + "] function"); } } private void checkSingleArgument(Function function) { if (function.getArguments().size() != 1) { throw new IllegalArgumentException("The " + function.getType() + " function should have only one argument. This function has " + function.getArguments().size()); } } /** * Converts the count function into SQL. * * @return a string representation of the SQL */ protected String getSqlForCount() { return "COUNT(*)"; } /** * Converts the count function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForCount(Function function) { return "COUNT(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the count function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForCountDistinct(Function function) { return "COUNT(DISTINCT " + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the average function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForAverage(Function function) { return "AVG(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the average function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForAverageDistinct(Function function) { return "AVG(DISTINCT " + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the substring function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForSubstring(Function function) { return getSubstringFunctionName() + "(" + getSqlFrom(function.getArguments().get(0)) + ", " + getSqlFrom(function.getArguments().get(1)) + ", " + getSqlFrom(function.getArguments().get(2)) + ")"; } /** * Converts the coalesce function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForCoalesce(Function function) { StringBuilder expression = new StringBuilder(); expression.append(getCoalesceFunctionName()).append('('); boolean first = true; for (AliasedField f : function.getArguments()) { if (!first) { expression.append(", "); } expression.append(getSqlFrom(f)); first = false; } expression.append(')'); return expression.toString(); } /** * Converts the greatest function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForGreatest(Function function) { return getGreatestFunctionName() + '(' + Joiner.on(", ").join(function.getArguments().stream().map(f -> getSqlFrom(f)).iterator()) + ')'; } /** * Converts the least function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForLeast(Function function) { return getLeastFunctionName() + '(' + Joiner.on(", ").join(function.getArguments().stream().map(f -> getSqlFrom(f)).iterator()) + ')'; } /** * Converts the max function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForMax(Function function) { return "MAX(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the min function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForMin(Function function) { return "MIN(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the sum function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForSum(Function function) { return "SUM(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the sum function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForSumDistinct(Function function) { return "SUM(DISTINCT " + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the some function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForSome(Function function) { return getSqlForMax(function); } /** * Converts the every function into SQL. * * @param function the function details * @return a string representation of the SQL */ protected String getSqlForEvery(Function function) { return getSqlForMin(function); }; /** * Converts the power function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#power(AliasedField, * AliasedField) */ protected String getSqlForPower(Function function) { return String.format("POWER(%s, %s)", getSqlFrom(function.getArguments().get(0)), getSqlFrom(function.getArguments().get(1))); } /** * Converts the mod function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForMod(Function function) { return String.format("MOD(%s, %s)", getSqlFrom(function.getArguments().get(0)), getSqlFrom(function.getArguments().get(1))); } /** * Converts the ROUND function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#round(AliasedField, * AliasedField) */ protected String getSqlForRound(Function function) { return "ROUND(" + getSqlFrom(function.getArguments().get(0)) + ", " + getSqlFrom(function.getArguments().get(1)) + ")"; } /** * Converts the FLOOR function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#floor(AliasedField) */ protected String getSqlForFloor(Function function) { return "FLOOR(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the LENGTH function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#length(AliasedField) */ protected String getSqlforLength(Function function) { return String.format("LENGTH(%s)", getSqlFrom(function.getArguments().get(0))); } /** * Converts the function get LENGTH of Blob data or field into SQL. * Use LENGTH instead of OCTET_LENGTH as they are synonymous in MySQl and PostGreSQL. In H2 LENGTH returns the correct * number of bytes, whereas OCTET_LENGTH returns 2 times the byte length. * @param function the function to convert. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#blobLength(AliasedField) */ protected String getSqlforBlobLength(Function function) { return String.format("LENGTH(%s)", getSqlFrom(function.getArguments().get(0))); } /** * @return The name of the coalesce function */ protected String getCoalesceFunctionName() { return "COALESCE"; } /** * @return The name of the GREATEST function */ protected String getGreatestFunctionName() { return "GREATEST"; } /** * @return The name of the LEAST function */ protected String getLeastFunctionName() { return "LEAST"; } /** * Performs the ANSI SQL date difference returning an interval in days. * * @param toDate The date we are subtracting from * @param fromDate The date we are subtracting * @return a string representation of the SQL */ protected abstract String getSqlForDaysBetween(AliasedField toDate, AliasedField fromDate); /** * The number of whole months between two dates. The logic used is equivalent * to * {@link Months#monthsBetween(org.joda.time.ReadableInstant, org.joda.time.ReadableInstant)} * . * <p> * As an example, assuming two dates are in the same year and the * {@code fromDate} is from two months prior to the {@code toDate} (i.e. * {@code MONTH(toDate) - MONTH(fromDate) = 2)} then: * </p> * <ul> * <li>If the {@code toDate} day of the month is greater than or equal to the * {@code fromDate} day of the month, then the difference is two months; * <li>If the {@code toDate} day of the month lies on the end of the month, * then the difference is two months, to account for month length differences * (e.g. 31 Jan &gt; 28 Feb = 1; 30 Jan &gt; 27 Feb = 0); * <li>Otherwise, the difference is one (e.g. if the day of {@code fromDate} * &gt; day of {@code toDate}). * </ul> * * @param toDate The date we are subtracting from * @param fromDate The date we are subtracting * @return a string representation of the SQL */ protected abstract String getSqlForMonthsBetween(AliasedField toDate, AliasedField fromDate); /** * Produce SQL for finding the last day of the month * * @param date the date for which the last day of its month will be found. * @return a string representation of the SQL for finding the last day of the month. */ protected abstract String getSqlForLastDayOfMonth(AliasedField date); /** * Gets the function name required to perform a substring command. * <p> * The default is provided here and should be overridden in child classes as * neccessary. * </p> * * @return The substring function name. */ protected String getSubstringFunctionName() { return "SUBSTRING"; } /** * Converts the cast function into SQL. * * @param cast the cast to convert. * @return a string representation of the SQL. */ protected String getSqlFrom(Cast cast) { return String.format("CAST(%s AS %s)", getSqlFrom(cast.getExpression()), getDataTypeRepresentation(cast.getDataType(), cast.getWidth(), cast.getScale())); } /** * Gets the column representation for the datatype, etc. * * @param dataType the column datatype. * @param width the column width. * @param scale the column scale. * @return a string representation of the column definition. */ protected String getDataTypeRepresentation(DataType dataType, int width, int scale) { return getColumnRepresentation(dataType, width, scale); } /** * Gets the column representation for the datatype, etc. * * @param dataType the column datatype. * @param width the column width. * @param scale the column scale. * @return a string representation of the column definition. */ protected String getColumnRepresentation(DataType dataType, int width, int scale) { switch (dataType) { case STRING: return width == 0 ? "VARCHAR" : String.format("VARCHAR(%d)", width); case DECIMAL: return width == 0 ? "DECIMAL" : String.format("DECIMAL(%d,%d)", width, scale); case DATE: return "DATE"; case BOOLEAN: return "BIT"; case BIG_INTEGER: return "BIGINT"; case INTEGER: return "INTEGER"; case BLOB: return "BLOB"; case CLOB: return "CLOB"; default: throw new UnsupportedOperationException("Cannot map column with type [" + dataType + "]"); } } /** * Converts the isNull function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForIsNull(Function function) { return getSqlForCoalesce(function); } /** * Converts the DateToYyyymmdd function into SQL. Assumes an 8 digit date is * supplied in YYYYMMDD using the {@linkplain DataType#STRING} format. TODO: * does it? * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForDateToYyyymmdd(Function function); /** * Converts the DateToYyyymmddHHmmss function into SQL. Assumes an 8 digit * date concatenated with a 6 digit time is supplied in YYYYMMDDHHmmss using * the {@linkplain DataType#STRING} format. * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForDateToYyyymmddHHmmss(Function function); /** * Converts the YYYYMMDDToDate function into SQL. Assumes an 8 digit date is * supplied in YYYYMMDD using the {@linkplain DataType#STRING} format. * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForYYYYMMDDToDate(Function function); /** * Converts the current time function into SQL and returns the timestamp of the database in UTC. * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForNow(Function function); /** * Converts the TRIM function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForTrim(Function function) { return "TRIM(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the LEFT_TRIM function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForLeftTrim(Function function) { return "LTRIM(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the RIGHT_TRIM function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForRightTrim(Function function) { return "RTRIM(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the LEFT_PAD function into SQL. This is the same format used for * H2, MySQL and Oracle. SqlServer implementation overrides this function. * * @param field The field to pad * @param length The length of the padding * @param character The character to use for the padding * @return string representation of the SQL. */ protected String getSqlForLeftPad(AliasedField field, AliasedField length, AliasedField character) { return "LPAD(" + getSqlFrom(field) + ", " + getSqlFrom(length) + ", " + getSqlFrom(character) + ")"; } /** * Converts the ADD_DAYS function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForAddDays(Function function); /** * Converts the ADD_MONTHS function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected abstract String getSqlForAddMonths(Function function); /** * Converts the RANDOM function into SQL. This returns a random number between 0 and 1. * * @return a string representation of the SQL. */ protected String getSqlForRandom() { return "RAND()"; } /** * Converts the RANDOM_STRING function into SQL. * * @param function the function representing the desired length of the * generated string. * @return a string representation of the SQL. * @see org.alfasoftware.morf.sql.element.Function#randomString(AliasedField) */ protected abstract String getSqlForRandomString(Function function); /** * Converts the <code>LOWER</code> function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForLower(Function function) { return "LOWER(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Converts the <code>UPPER</code> function into SQL. * * @param function the function to convert. * @return a string representation of the SQL. */ protected String getSqlForUpper(Function function) { return "UPPER(" + getSqlFrom(function.getArguments().get(0)) + ")"; } /** * Convert a {@link FieldFromSelect} into sql select. * * @param field the field to generate a sql for * @return a string representation of the field from select */ protected String getSqlFrom(FieldFromSelect field) { return getSqlFrom(field.getSelectStatement()); } /** * Convert a {@link FieldFromSelect} into sql select. * * @param field the field to generate a sql for * @return a string representation of the field from select */ protected String getSqlFrom(FieldFromSelectFirst field) { return getSqlFrom(field.getSelectFirstStatement()); } /** * Converts a {@link MathsField} into SQL. * * @param field the field to convert. * @return a string representation of the field. */ protected String getSqlFrom(MathsField field) { return String.format("%s %s %s", getSqlFrom(field.getLeftField()), field.getOperator(), getSqlFrom(field.getRightField())); } /** * Converts a {@link BracketedExpression} into SQL. * * @param expression the bracket expression to convert. * @return a string representation of the expression. */ protected String getSqlFrom(BracketedExpression expression) { return String.format("(%s)", getSqlFrom(expression.getInnerExpression())); } /** * Convert a {@link String} containing a value into a SQL string literal. * * @param literalValue value of string literal. * @return quoted string literal. */ protected String getSqlFrom(String literalValue) { return makeStringLiteral(literalValue); } /** * Convert a {@link LocalDate} to a SQL string literal. * * @param literalValue value of date literal. * @return SQL date literal. */ protected String getSqlFrom(LocalDate literalValue) { return String.format("DATE '%s'", literalValue.toString("yyyy-MM-dd")); } /** * Convert a boolean to a SQL string literal. * * @param literalValue value of boolean literal. * @return SQL boolean literal. */ protected String getSqlFrom(Boolean literalValue) { return literalValue ? "1" : "0"; } /** * Convert the an Object criterion value (i.e. right hand side) to valid SQL * based on its type. * * @param value the object to convert to a string * @return a string representation of the object */ protected String getSqlForCriterionValue(Object value) { if (value instanceof String) { return getSqlFrom((String) value); } if (value instanceof Boolean) { return getSqlFrom((Boolean) value); } if (value instanceof LocalDate) { return getSqlFrom((LocalDate) value); } if (value instanceof Criterion) { return getSqlFrom((Criterion) value); } if (value instanceof AliasedField) { return getSqlFrom((AliasedField) value); } return value.toString(); } /** * Convert a criterion into a string expression of the form * "[operand] [operator] [operand]". * * @param criterion the criterion to convert * @param operator the operator to use in the expression * @return a string representation of the criterion */ protected String getOperatorLine(Criterion criterion, String operator) { return getSqlFrom(criterion.getField()) + " " + operator + " " + getSqlForCriterionValue(criterion.getValue()); } /** * Converts a structured {@link SelectStatement} to the equivalent SQL text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(SelectStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } return getSqlFrom(statement); } /** * Converts a structured {@link SelectFirstStatement} to the equivalent SQL * text. * * @param statement the statement to convert * @return a string containing the SQL to run against the database */ public String convertStatementToSQL(SelectFirstStatement statement) { if (statement == null) { throw new IllegalArgumentException(CANNOT_CONVERT_NULL_STATEMENT_TO_SQL); } if (statement.getOrderBys().isEmpty()) { throw new IllegalArgumentException("Invalid select first statement - missing order by clause"); } return getSqlFrom(statement); } /** * Converts a structured {@code SELECT} statement to a hash representation. * * @param statement the statement to convert * @return A hash representation of {@code statement}. */ public String convertStatementToHash(SelectStatement statement) { return md5HashHexEncoded(convertStatementToSQL(statement)); } /** * Converts a structured {@code SELECT} statement to a hash representation. * * @param statement the statement to convert * @return A hash representation of {@code statement}. */ public String convertStatementToHash(SelectFirstStatement statement) { return md5HashHexEncoded(convertStatementToSQL(statement)); } /** * @param toHash the String to convert * @return the md5 hash of the string. */ @SuppressWarnings("deprecation") private String md5HashHexEncoded(String toHash) { try { return CharSource.wrap(toHash).asByteSource(StandardCharsets.UTF_8).hash(Hashing.md5()).toString(); } catch (IOException e) { throw new RuntimeException("error when hashing string [" + toHash + "]", e); } } /** * Creates an SQL statement to insert values with positional parameterised * fields based on the insert statement specified. * * @param statement the insert statement to build an SQL query for * @param metadata the metadata for the database * @return a string containing a parameterised insert query for the specified * table */ public String buildParameterisedInsert(InsertStatement statement, Schema metadata) { String destinationTableName = statement.getTable().getName(); if (StringUtils.isBlank(destinationTableName)) { throw new IllegalArgumentException("Cannot create parameterised SQL for a blank table"); } if (metadata == null) { throw new IllegalArgumentException("Cannot specify null for the source metadata"); } if (!metadata.tableExists(destinationTableName)) { throw new IllegalArgumentException("Cannot create parameterised SQL for table [" + destinationTableName + "] without metadata"); } Table destinationTable = metadata.getTable(destinationTableName); StringBuilder sqlBuilder = new StringBuilder(); StringBuilder values = new StringBuilder(") VALUES ("); // -- Work out the literal values... // Map<String, String> literalValueMap = new HashMap<>(); for (AliasedField f : statement.getFields()) { literalValueMap.put(f.getAlias().toUpperCase(), literalValue(f)); } for (Entry<String, AliasedField> value : statement.getFieldDefaults().entrySet()) { literalValueMap.put(value.getKey().toUpperCase(), literalValue(value.getValue())); } // -- Add the preamble... // sqlBuilder.append(getSqlForInsertInto(statement)); sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); sqlBuilder.append(" ("); boolean first = true; for (Column currentColumn : destinationTable.columns()) { if (!first) { sqlBuilder.append(", "); values.append(", "); } first = false; sqlBuilder.append(currentColumn.getName()); String literalValue = literalValueMap.get(currentColumn.getName().toUpperCase()); if (literalValue == null) { values.append(getSqlFrom(new SqlParameter(currentColumn))); } else { values.append(literalValue); } } values.append(")"); sqlBuilder.append(values); return sqlBuilder.toString(); } /** * Creates an SQL statement to insert specific values into the columns * specified. * * @param statement The insert statement to build an SQL query for. * @param metadata the database schema. If null, the SQL statement will be * treated "as is". If not null, the schema will be used to decorate * the statement further with the default values from any columns not * specified. * @param idTable the ID table. Only required if the table has a * non-autonumbered id column and the schema has been supplied. * @return a string containing a specific value insert query for the specified * table and column values. */ protected List<String> buildSpecificValueInsert(InsertStatement statement, Schema metadata, Table idTable) { List<String> result = new LinkedList<>(); String destinationTableName = statement.getTable().getName(); if (StringUtils.isBlank(destinationTableName)) { throw new IllegalArgumentException("Cannot create specified value insert SQL for a blank table"); } StringBuilder sqlBuilder = new StringBuilder(); StringBuilder values = new StringBuilder("VALUES ("); // -- Add the preamble... // sqlBuilder.append(getSqlForInsertInto(statement)); sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); sqlBuilder.append(" ("); Set<String> columnNamesAdded = new HashSet<>(); boolean firstField = true; for (AliasedField fieldWithValue : statement.getValues()) { if (!firstField) { sqlBuilder.append(", "); values.append(", "); } if (StringUtils.isBlank(fieldWithValue.getAlias())) { throw new IllegalArgumentException("Field value in insert statement does not have an alias"); } sqlBuilder.append(fieldWithValue.getAlias()); values.append(getSqlFrom(fieldWithValue)); columnNamesAdded.add(fieldWithValue.getAlias().toUpperCase()); firstField = false; } // If we have a schema, then we can add defaults for missing column values if (metadata != null) { for (Column currentColumn : metadata.getTable(destinationTableName).columns()) { // Default date columns to null and skip columns we've already added. if (columnNamesAdded.contains(currentColumn.getName().toUpperCase())) { continue; } // Allow identity columns to be defaulted by the database - nothing to // do if (currentColumn.isAutoNumbered()) { continue; } // Non-autonumbered identity columns should be populated using the id // table if (currentColumn.getName().equalsIgnoreCase("id")) { sqlBuilder.append(", "); values.append(", "); result.addAll(buildSimpleAutonumberUpdate(statement.getTable(), "id", idTable, ID_INCREMENTOR_TABLE_COLUMN_NAME, ID_INCREMENTOR_TABLE_COLUMN_VALUE)); String fieldValue = autoNumberId(statement, idTable); if (StringUtils.isNotEmpty(fieldValue)) { sqlBuilder.append("id"); values.append(fieldValue); } continue; } // If there is a default for the field, use it if (statement.getFieldDefaults().containsKey(currentColumn.getName())) { AliasedField fieldWithValue = statement.getFieldDefaults().get(currentColumn.getName()); sqlBuilder.append(", "); values.append(", "); sqlBuilder.append(fieldWithValue.getAlias()); values.append(literalValue(fieldWithValue)); continue; } } } sqlBuilder.append(") "); values.append(")"); sqlBuilder.append(values); result.add(sqlBuilder.toString()); return result; } /** * Builds SQL to get the autonumber value. * * @param statement the insert statement to get for. * @param idTable the ID Table. * @return SQL fetching the AutoNumber value. */ private String autoNumberId(InsertStatement statement, Table idTable) { AliasedField idValue = nextIdValue(statement.getTable(), null, idTable, ID_INCREMENTOR_TABLE_COLUMN_NAME, ID_INCREMENTOR_TABLE_COLUMN_VALUE); return getSqlFrom(idValue); } /** * Infers the value of the field in a format suitable for direct substitution * into an sql statement. * <p> * If the supplied field is not a {@link FieldLiteral}, {@code null} will be * returned. If it is, the type will be taken directly from the * {@linkplain FieldLiteral} itself and single quotes (') added to the value * as appropriate. * </p> * <p> * This method also escapes the characters in the value to be suitable to pass * to an SQL query. * </p> * * @param field The field to generate the SQL literal for. * @return The literal value. */ private String literalValue(AliasedField field) { if (field instanceof FieldLiteral && !(field instanceof NullFieldLiteral)) { return getSqlFrom((FieldLiteral) field); } if (field instanceof NullFieldLiteral) { return "null"; } return null; } /** * Creates an SQL statement to delete rows from a table based on the * {@linkplain DeleteStatement} specified. * * @param statement the delete statement to build an SQL query for * @return a string containing a parameterised delete query for the specified * table */ protected String getSqlFrom(DeleteStatement statement) { String destinationTableName = statement.getTable().getName(); if (StringUtils.isBlank(destinationTableName)) { throw new IllegalArgumentException("Cannot create SQL for a blank table"); } StringBuilder sqlBuilder = new StringBuilder(); // Add the preamble sqlBuilder.append("DELETE "); // For appropriate dialects, append the delete limit here if (statement.getLimit().isPresent() && getDeleteLimitPreFromClause(statement.getLimit().get()).isPresent()) { sqlBuilder.append(getDeleteLimitPreFromClause(statement.getLimit().get()).get() + " "); } sqlBuilder.append("FROM "); // Now add the from clause sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); // Add a table alias if necessary if (!statement.getTable().getAlias().equals("")) { sqlBuilder.append(String.format(" %s", statement.getTable().getAlias())); } // Prepare to append the where clause or, for appropriate dialects, the delete limit if (statement.getWhereCriterion() != null || statement.getLimit().isPresent() && getDeleteLimitWhereClause(statement.getLimit().get()).isPresent()) { sqlBuilder.append(" WHERE "); } // Now put the where clause in if (statement.getWhereCriterion() != null) { sqlBuilder.append(getSqlFrom(statement.getWhereCriterion())); } // Append the delete limit, for appropriate dialects if (statement.getLimit().isPresent() && getDeleteLimitWhereClause(statement.getLimit().get()).isPresent()) { if (statement.getWhereCriterion() != null) { sqlBuilder.append(" AND "); } sqlBuilder.append(getDeleteLimitWhereClause(statement.getLimit().get()).get()); } // For appropriate dialects, append the delete limit suffix if (statement.getLimit().isPresent() && getDeleteLimitSuffix(statement.getLimit().get()).isPresent()) { sqlBuilder.append(" " + getDeleteLimitSuffix(statement.getLimit().get()).get()); } return sqlBuilder.toString(); } /** * Returns the SQL that specifies the deletion limit ahead of the FROM clause, if any, for the dialect. * * @param limit The delete limit. * @return The SQL fragment. */ protected Optional<String> getDeleteLimitPreFromClause(@SuppressWarnings("unused") int limit) { return Optional.empty(); }; /** * Returns the SQL that specifies the deletion limit in the WHERE clause, if any, for the dialect. * * @param limit The delete limit. * @return The SQL fragment. */ protected Optional<String> getDeleteLimitWhereClause(@SuppressWarnings("unused") int limit) { return Optional.empty(); }; /** * Returns the SQL that specifies the deletion limit as a suffix, if any, for the dialect. * * @param limit The delete limit. * @return The SQL fragment. */ protected Optional<String> getDeleteLimitSuffix(@SuppressWarnings("unused") int limit) { return Optional.empty(); }; /** * Creates an SQL statement to update values with positional parameterised * fields based on the update statement specified. * * @param statement the insert statement to build an SQL query for * @return a string containing a parameterised insert query for the specified * table */ protected String getSqlFrom(UpdateStatement statement) { String destinationTableName = statement.getTable().getName(); if (StringUtils.isBlank(destinationTableName)) { throw new IllegalArgumentException("Cannot create SQL for a blank table"); } StringBuilder sqlBuilder = new StringBuilder(); // Add the preamble sqlBuilder.append("UPDATE "); sqlBuilder.append(updateStatementPreTableDirectives(statement)); // Now add the from clause sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); // Add a table alias if necessary if (!statement.getTable().getAlias().equals("")) { sqlBuilder.append(String.format(" %s", statement.getTable().getAlias())); } // Put in the standard fields sqlBuilder.append(getUpdateStatementSetFieldSql(statement.getFields())); // Now put the where clause in if (statement.getWhereCriterion() != null) { sqlBuilder.append(" WHERE "); sqlBuilder.append(getSqlFrom(statement.getWhereCriterion())); } return sqlBuilder.toString(); } /** * Creates an SQL statement to merge values with into a table. * * @param statement the insert statement to build an SQL query for. * @return a string containing a parameterised insert query for the specified * table. */ protected String getSqlFrom(MergeStatement statement) { if (StringUtils.isBlank(statement.getTable().getName())) { throw new IllegalArgumentException("Cannot create SQL for a blank table"); } checkSelectStatementHasNoHints(statement.getSelectStatement(), "MERGE may not be used with SELECT statement hints"); final StringBuilder sqlBuilder = new StringBuilder(); // MERGE INTO schema.Table sqlBuilder.append("MERGE INTO ") .append(schemaNamePrefix(statement.getTable())) .append(statement.getTable().getName()); // USING (SELECT ...) xmergesource sqlBuilder.append(" USING (") .append(getSqlFrom(statement.getSelectStatement())) .append(") ") .append(MERGE_SOURCE_ALIAS); // ON (Table.id = xmergesource.id) sqlBuilder.append(" ON (") .append(matchConditionSqlForMergeFields(statement, MERGE_SOURCE_ALIAS, statement.getTable().getName())) .append(")"); // WHEN MATCHED THEN UPDATE ... if (getNonKeyFieldsFromMergeStatement(statement).iterator().hasNext()) { Iterable<AliasedField> updateExpressions = getMergeStatementUpdateExpressions(statement); String updateExpressionsSql = getMergeStatementAssignmentsSql(updateExpressions); sqlBuilder.append(" WHEN MATCHED THEN UPDATE SET ") .append(updateExpressionsSql); } // WHEN NOT MATCHED THEN INSERT ... String insertFieldsSql = Joiner.on(", ").join(FluentIterable.from(statement.getSelectStatement().getFields()).transform(AliasedField::getImpliedName)); String insertValuesSql = Joiner.on(", ").join(FluentIterable.from(statement.getSelectStatement().getFields()).transform(field -> MERGE_SOURCE_ALIAS + "." + field.getImpliedName())); sqlBuilder.append(" WHEN NOT MATCHED THEN INSERT (") .append(insertFieldsSql) .append(") VALUES (") .append(insertValuesSql) .append(")"); return sqlBuilder.toString(); } /** * Convert a {@link MergeStatement.InputField} into SQL. * * @param field the field to generate SQL for * @return a string representation of the field */ protected String getSqlFrom(MergeStatement.InputField field) { return MERGE_SOURCE_ALIAS + "." + field.getName(); } /** * Throws {@link IllegalArgumentException} if the select statement has hints. * * @param statement The select statement. * @param errorMessage The message for the exception. */ protected void checkSelectStatementHasNoHints(SelectStatement statement, String errorMessage) { if (!statement.getHints().isEmpty()) { throw new IllegalArgumentException(errorMessage); } } /** * Returns the SET clause for an SQL UPDATE statement based on the * {@link List} of {@link AliasedField}s provided. * * @param fields The {@link List} of {@link AliasedField}s to create the SET * statement from * @return The SET clause as a string */ protected String getUpdateStatementSetFieldSql(List<AliasedField> fields) { return " SET " + getUpdateStatementAssignmentsSql(fields); } /** * Returns the assignments for the SET clause of an SQL UPDATE statement * based on the {@link List} of {@link AliasedField}s provided. * * @param fields The {@link List} of {@link AliasedField}s to create the assignments from * @return the assignments for the SET clause as a string */ protected String getUpdateStatementAssignmentsSql(Iterable<AliasedField> fields) { Iterable<String> setStatements = Iterables.transform(fields, field -> field.getAlias() + " = " + getSqlFrom(field)); return Joiner.on(", ").join(setStatements); } /** * Creates a new {@link InsertStatement} where the source table has been * expanded out into a {@link SelectStatement}. * <p> * The expansion will match fields in the destination to fields in the source * table using their names. If a field with the matching name cannot be found * then the literal value will be firstly sourced from the * <i>fieldDefaults</i> map. If it cannot be found in that map, then the * default for the field type will be used. * </p> * * @param insertStatement the source statement to expand * @param metadata the table metadata from the database * @return a new instance of {@link InsertStatement} with an expanded from * table definition */ protected InsertStatement expandInsertStatement(InsertStatement insertStatement, Schema metadata) { // If we're neither specified the source table nor the select statement then // throw and exception if (insertStatement.getFromTable() == null && insertStatement.getSelectStatement() == null) { throw new IllegalArgumentException("Cannot expand insert statement as it has no from table specified"); } // If we've already got a select statement then just return a copy of the // source insert statement if (insertStatement.getSelectStatement() != null) { return copyInsertStatement(insertStatement); } Map<String, AliasedField> fieldDefaults = insertStatement.getFieldDefaults(); // Expand the from table String sourceTableName = insertStatement.getFromTable().getName(); String destinationTableName = insertStatement.getTable().getName(); // Perform a couple of checks if (!metadata.tableExists(sourceTableName)) { throw new IllegalArgumentException("Source table [" + sourceTableName + "] is not available in the database metadata"); } if (!metadata.tableExists(destinationTableName)) { throw new IllegalArgumentException("Destination table [" + destinationTableName + "] is not available in the database metadata"); } // Convert the source table field list to a map for convenience Map<String, Column> sourceColumns = new HashMap<>(); for (Column currentColumn : metadata.getTable(sourceTableName).columns()) { // Convert everything to the same case to avoid match failure based on // case. sourceColumns.put(currentColumn.getName().toUpperCase(), currentColumn); } // Build up the select statement from field list SelectStatementBuilder selectStatementBuilder = SelectStatement.select(); List<AliasedField> resultFields = new ArrayList<>(); for (Column currentColumn : metadata.getTable(destinationTableName).columns()) { String currentColumnName = currentColumn.getName(); // Add the destination column resultFields.add(new FieldReference(currentColumnName)); // If there is a default for this column in the defaults list then use it if (fieldDefaults.containsKey(currentColumnName)) { selectStatementBuilder = selectStatementBuilder.fields(fieldDefaults.get(currentColumnName)); continue; } // If there is a column in the source table with the same name then link // them // and move on to the next column if (sourceColumns.containsKey(currentColumnName.toUpperCase())) { selectStatementBuilder = selectStatementBuilder.fields(new FieldReference(currentColumnName)); continue; } } // Set the source table SelectStatement selectStatement = selectStatementBuilder .from(insertStatement.getFromTable()) .build(); return InsertStatement.insert() .into(insertStatement.getTable()) .fields(resultFields) .from(selectStatement) .build(); } /** * Copies an insert statement to a duplicate instance. * * @param statement the {@linkplain InsertStatement} to copy * @return a new instance of the {@linkplain InsertStatement} */ protected InsertStatement copyInsertStatement(InsertStatement statement) { return statement.shallowCopy().build(); } /** * @return The {@link DatabaseType} */ public abstract DatabaseType getDatabaseType(); /** * Whether this table has any BLOB columns. * * @param table The table. * @return true if the table has one or more BLOB columns. */ protected boolean tableHasBlobColumns(Table table) { for (Column column : table.columns()) { if (column.getType() == DataType.BLOB) { return true; } } return false; } /** * Builds a simple repair script for AutoNumbers that deletes and inserts a * value. * * @param dataTable the table to update for. * @param generatedFieldName Name of the field which has a generated value to * build an update statement for. * @param autoNumberTable the table to insert the autonumber to. * @param nameColumn the name of the name column. * @param valueColumn the name of the value column. * @return SQL allowing the repair of the AutoNumber table. */ private List<String> buildSimpleAutonumberUpdate(TableReference dataTable, String generatedFieldName, Table autoNumberTable, String nameColumn, String valueColumn) { String autoNumberName = getAutoNumberName(dataTable.getName()); if (autoNumberName.equals("autonumber")) { return new ArrayList<>(); } List<String> sql = new ArrayList<>(); sql.add(String.format("DELETE FROM %s where %s = '%s'", schemaNamePrefix(autoNumberTable) + autoNumberTable.getName(), nameColumn, autoNumberName)); sql.add(String.format("INSERT INTO %s (%s, %s) VALUES('%s', (%s))", schemaNamePrefix(autoNumberTable) + autoNumberTable.getName(), nameColumn, valueColumn, autoNumberName, getExistingMaxAutoNumberValue(dataTable, generatedFieldName))); return sql; } /** * Builds SQL to get the maximum value of the specified column on the * specified {@code dataTable}. * * @param dataTable the table to query over. * @param fieldName Name of the field to query over for the max value. * @return SQL getting the maximum value from the {@code dataTable}. */ protected String getExistingMaxAutoNumberValue(TableReference dataTable, String fieldName) { return getSqlFrom(new SelectStatement(Function.coalesce( new MathsField(Function.max(new FieldReference(fieldName)), MathsOperator.PLUS, new FieldLiteral(1)), new FieldLiteral(1)) .as("CurrentValue")).from(dataTable)); } /** * Creates a field reference to provide id column values. * * @param sourceTable the source table. * @param sourceReference a reference lookup to add the ID to. * @param autoNumberTable the name of the table to query over. * @param nameColumn the name of the column holding the Autonumber name. * @param valueColumn the name of the column holding the Autonumber value. * @return a field reference. */ public AliasedField nextIdValue(TableReference sourceTable, TableReference sourceReference, Table autoNumberTable, String nameColumn, String valueColumn) { String autoNumberName = getAutoNumberName(sourceTable.getName()); if (sourceReference == null) { return new FieldFromSelect(new SelectStatement(Function.coalesce(new FieldReference(valueColumn), new FieldLiteral(1))).from( new TableReference(autoNumberTable.getName(), autoNumberTable.isTemporary())).where( new Criterion(Operator.EQ, new FieldReference(nameColumn), autoNumberName))); } else { return new MathsField(new FieldFromSelect(new SelectStatement(Function.coalesce(new FieldReference(valueColumn), new FieldLiteral(0))).from(new TableReference(autoNumberTable.getName(), autoNumberTable.isTemporary())).where( new Criterion(Operator.EQ, new FieldReference(nameColumn), autoNumberName))), MathsOperator.PLUS, new FieldReference( sourceReference, "id")); } } /** * Gets the autonumber name for the {@code destinationReference}. * * @param destinationReference the table name to get the autonumber name for. * @return the autonumber name. */ protected String getAutoNumberName(String destinationReference) { String autoNumberName = destinationReference; if (autoNumberName.contains("_")) { autoNumberName = autoNumberName.substring(0, autoNumberName.lastIndexOf('_')); } return autoNumberName; } /** * @param identifier Unique identifier for trace file name, can be null. * @return Sql required to turn on tracing, or null if tracing is not * supported. */ public List<String> buildSQLToStartTracing(@SuppressWarnings("unused") String identifier) { return null; } /** * @return Sql required to turn on tracing, or null if tracing is not * supported. */ public List<String> buildSQLToStopTracing() { return null; } /** * Creates the SQL representation of a column data type. * * @param column The column to get the SQL representation for. * @param includeNullability Indicates whether or not the produced SQL should * include nullability of the column. * @param includeDefaultValue Indicates whether or not the produced SQL should * include the default value of the column. * @param includeColumnType ndicates whether or not the produced SQL should * include the type of the column. * @return The SQL representation for the column type. */ protected String sqlRepresentationOfColumnType(Column column, boolean includeNullability, boolean includeDefaultValue, boolean includeColumnType) { String sql = ""; StringBuilder suffix = new StringBuilder(""); if (includeDefaultValue) { suffix = new StringBuilder(StringUtils.isNotEmpty(column.getDefaultValue()) ? " DEFAULT " + sqlForDefaultClauseLiteral(column) : ""); } if (includeNullability) { suffix.append(column.isNullable() ? " NULL" : " NOT NULL"); } if (includeColumnType) { sql = getColumnRepresentation(column.getType(), column.getWidth(), column.getScale()) + suffix; } else { sql = suffix.toString(); } return sql; } /** * Creates the representation of the default clause literal value. * @param column The column whose default will be converted. * * @return An SQL fragment representing the literal in a DEFAULT clause in an SQL statement */ protected String sqlForDefaultClauseLiteral(Column column) { return getSqlFrom(new FieldLiteral(column.getDefaultValue(), column.getType())); } /** * Creates the SQL representation of a column data type. * * @param column The column to map. * @param includeNullability Indicates whether or not the produced SQL should * include nullability of the column. * @return The SQL representation for the column type. * @see #sqlRepresentationOfColumnType(Column, boolean, boolean, boolean) */ protected String sqlRepresentationOfColumnType(Column column, boolean includeNullability) { return sqlRepresentationOfColumnType(column, includeNullability, true, true); } /** * Creates the SQL representation of a column data type. * * @param column The column to map. * @return The SQL representation for the column type. * @see #sqlRepresentationOfColumnType(Column, boolean, boolean, boolean) */ protected String sqlRepresentationOfColumnType(Column column) { StringBuilder defaultSqlRepresentation = new StringBuilder(sqlRepresentationOfColumnType(column, false, true, true)); // Many RDBMS implementations get funny about specifying nullability at all // on autonumbered columns, and it's irrelevant in any case, so we just // avoid it. if (!column.isAutoNumbered()) { defaultSqlRepresentation.append(column.isNullable() ? "" : " NOT NULL"); } return defaultSqlRepresentation.toString(); } /** * Scans the specified {@link Table} for any autonumbered columns and returns * that {@link Column} if it is found, or null otherwise. * * @param table The table to check. * @return The autonumber column, or null if none exists. */ protected Column getAutoIncrementColumnForTable(Table table) { for (Column column : table.columns()) { if (column.isAutoNumbered()) { return column; } } return null; } /** * Generate the SQL to add a column to a table. * * @param table The table to add the column to (The column will already have * been added to this Table view) * @param column The column to add to the specified table. * @return The SQL statements to add a column to a table. */ public abstract Collection<String> alterTableAddColumnStatements(Table table, Column column); /** * Generate the SQL to run analysis on a table. * * @param table The table to run the analysis on. * @return The SQL statements to analyse the table. */ public Collection<String> getSqlForAnalyseTable(@SuppressWarnings("unused") Table table) { return SqlDialect.NO_STATEMENTS; } /** * Generate the SQL to change an existing column on a table. * * @param table The table to change the column definition on. (The column will * already have been altered in this Table view) * @param oldColumn The old column definition. * @param newColumn The new column definition. * @return The SQL statements to modify the specified column. */ public abstract Collection<String> alterTableChangeColumnStatements(Table table, Column oldColumn, Column newColumn); /** * Generate the SQL to drop a column from a table. * * @param table The table to drop the column from. (The column will already * have been dropped from this Table view) * @param column The column to drop from the specified table. * @return The SQL to drop the specified column. */ public abstract Collection<String> alterTableDropColumnStatements(Table table, Column column); /** * Generate the SQL to drop an index from a table. * * @param table The table to drop the index from. * @param indexToBeRemoved The index to be dropped. * @return The SQL to drop the specified index. */ public Collection<String> indexDropStatements(@SuppressWarnings("unused") Table table, Index indexToBeRemoved) { return ImmutableList.of("DROP INDEX " + indexToBeRemoved.getName()); } /** * Generates the SQL to create a table and insert the data specified in the {@link SelectStatement}. * * @param table The table to create. * @param selectStatement The {@link SelectStatement} * @return A collection of SQL statements */ public Collection<String> addTableFromStatements(Table table, SelectStatement selectStatement) { return ImmutableList.<String>builder() .addAll( tableDeploymentStatements(table) ) .addAll(convertStatementToSQL( SqlUtils.insert().into(SqlUtils.tableRef(table.getName())).from(selectStatement)) ) .build(); } /** * Generates the SQL to add an index to an existing table. * * @param table The existing table. * @param index The new index being added. * @return A collection of SQL statements. */ public Collection<String> addIndexStatements(Table table, Index index) { return indexDeploymentStatements(table, index); } /** * Generate the SQL to deploy an index on a table. * * @param table The table to deploy the index on. * @param index The index to deploy on the table. * @return The SQL to deploy the index on the table. */ protected Collection<String> indexDeploymentStatements(Table table, Index index) { StringBuilder statement = new StringBuilder(); statement.append("CREATE "); if (index.isUnique()) { statement.append("UNIQUE "); } statement.append("INDEX ") .append(schemaNamePrefix(table)) .append(index.getName()) .append(" ON ") .append(schemaNamePrefix(table)) .append(table.getName()) .append(" (") .append(Joiner.on(", ").join(index.columnNames())) .append(')'); return ImmutableList.of(statement.toString()); } /** * Decorate the table name in an appropriate manner for temporary table in the * relevant database. * * @param undecoratedName core name. * @return decorated version. */ public String decorateTemporaryTableName(String undecoratedName) { return undecoratedName; } /** * Sets up parameters on a {@link NamedParameterPreparedStatement} with a set of values. * * @param statement The {@link PreparedStatement} to set up * @param parameters The parameters. * @param values The values. * @throws RuntimeException if a data type is not supported or if a * supplied string value cannot be converted to the column data type. */ public void prepareStatementParameters(NamedParameterPreparedStatement statement, Iterable<SqlParameter> parameters, DataValueLookup values) { parameters.forEach(parameter -> { try { prepareStatementParameters(statement, values, parameter); } catch (Exception e) { throw new RuntimeException(String.format("Error setting parameter value, column [%s], value [%s] on prepared statement", parameter.getMetadata().getName(), values.getObject(parameter.getMetadata())), e); } }); } /** * Sets up a parameter on {@link NamedParameterPreparedStatement} with a value. * @param statement The {@link PreparedStatement} to set up * @param values The values. * @param parameter The parameters. * @throws RuntimeException if a data type is not supported or if a * supplied string value cannot be converted to the column data type. * @throws SQLException for JDBC errors. */ public void prepareStatementParameters(NamedParameterPreparedStatement statement, DataValueLookup values, SqlParameter parameter) throws SQLException { switch (parameter.getMetadata().getType()) { case BIG_INTEGER: Long longVal = values.getLong(parameter.getImpliedName()); if (longVal == null) { statement.setObject(parameter, null); } else { statement.setLong(parameter, longVal); } break; case BLOB: byte[] blobVal = values.getByteArray(parameter.getImpliedName()); if (blobVal == null) { statement.setBlob(parameter, new byte[] {}); } else { statement.setBlob(parameter, blobVal); } break; case BOOLEAN: prepareBooleanParameter(statement, values.getBoolean(parameter.getImpliedName()), parameter); break; case DATE: Date dateVal = values.getDate(parameter.getImpliedName()); if (dateVal == null) { statement.setObject(parameter, null); } else { statement.setDate(parameter, new java.sql.Date(dateVal.getTime())); } break; case DECIMAL: statement.setBigDecimal(parameter, values.getBigDecimal(parameter.getImpliedName())); break; case INTEGER: prepareIntegerParameter(statement, values.getInteger(parameter.getImpliedName()), parameter); break; case CLOB: case STRING: String stringVal = values.getString(parameter.getImpliedName()); if (stringVal == null || stringVal.equals("")) { // since web-9161 for *ALL* databases // - we are using EmptyStringHQLAssistant // - and store empty strings as null statement.setString(parameter, null); } else { statement.setString(parameter, stringVal); } break; default: throw new RuntimeException(String.format("Unexpected DataType [%s]", parameter.getMetadata().getType())); } } /** * Overridable behaviour for mapping an integer parameter to a prepared statement. * * @param statement The statement. * @param integerVal The integer value. * @param parameter The parameter to map to. * @throws SQLException If an exception occurs setting the parameter. */ protected void prepareIntegerParameter(NamedParameterPreparedStatement statement, Integer integerVal, SqlParameter parameter) throws SQLException { if (integerVal == null) { statement.setObject(parameter, null); } else { statement.setInt(parameter, integerVal); } } /** * Overridable behaviour for mapping a boolean parameter to a prepared statement. * * @param statement The statement. * @param boolVal The boolean value. * @param parameter The parameter to map to. * @throws SQLException If an exception occurs setting the parameter. */ protected void prepareBooleanParameter(NamedParameterPreparedStatement statement, Boolean boolVal, SqlParameter parameter) throws SQLException { if (boolVal == null) { statement.setObject(parameter, null); } else { statement.setBoolean(parameter, boolVal); } } /** * Formats the SQL statement provided. * * @param sqlStatement The statement to format * @return the formatted SQL statement */ public String formatSqlStatement(String sqlStatement) { return sqlStatement + ";"; } /** * Convert a string to an SQL comment * * @param string The comment string * @return An SQL comment containing the comment string */ public String convertCommentToSQL(String string) { return "-- "+string; } /** * @param sql The SQL to test * @return true if the sql provided is a comment */ public boolean sqlIsComment(String sql) { return sql.startsWith("--") && !sql.contains("\n"); // multi-line statements may have comments as the top line } /** * Given an ordered list of columns and a {@link ResultSet}, creates a * {@link Record} from the current row. * * @param resultSet The {@link ResultSet}. Must have been advanced (using * {@link ResultSet#next()}) to the appropriate row. * @param columns The columns, ordered according to their appearance in the * {@link ResultSet}. Use {@link ResultSetMetadataSorter} to pre-sort * your columns according to the {@link ResultSetMetaData} if you * can't be sure that the SQL will return the columns in the precise * order that you are expecting. * @return A {@link Record} representation of the current {@link ResultSet} * row. */ public Record resultSetToRecord(ResultSet resultSet, Iterable<Column> columns) { // Provide initial sizing hint to the array. This potentially means double-traversal // of the columns if the column list is not a simple list, but it's almost certainly // worth it to minimise the array size and prevent resizing. RecordBuilder recordBuilder = DataSetUtils.record() .withInitialColumnCount(Iterables.size(columns)); int idx = 1; for (Column column : columns) { try { switch (column.getType()) { case BIG_INTEGER: long longVal = resultSet.getLong(idx); if (resultSet.wasNull()) { recordBuilder.setObject(column.getName(), null); } else { recordBuilder.setLong(column.getName(), longVal); } break; case BOOLEAN: boolean boolVal = resultSet.getBoolean(idx); if (resultSet.wasNull()) { recordBuilder.setObject(column.getName(), null); } else { recordBuilder.setBoolean(column.getName(), boolVal); } break; case INTEGER: int intVal = resultSet.getInt(idx); if (resultSet.wasNull()) { recordBuilder.setObject(column.getName(), null); } else { recordBuilder.setInteger(column.getName(), intVal); } break; case DATE: Date date = resultSet.getDate(idx); if (date == null) { recordBuilder.setObject(column.getName(), null); } else { recordBuilder.setDate(column.getName(), date); } break; case DECIMAL: recordBuilder.setBigDecimal(column.getName(), resultSet.getBigDecimal(idx)); break; case BLOB: recordBuilder.setByteArray(column.getName(), resultSet.getBytes(idx)); break; case CLOB: case STRING: recordBuilder.setString(column.getName(), resultSet.getString(idx)); break; default: recordBuilder.setObject(column.getName(), resultSet.getObject(idx)); break; } idx++; } catch (SQLException e) { throw new RuntimeSqlException("Error retrieving value from result set with name [" + column.getName() + "]", e); } } return recordBuilder; } /** * Returns the non key fields from a merge statement. * * @param statement a merge statement * @return the non key fields */ protected Iterable<AliasedField> getNonKeyFieldsFromMergeStatement(MergeStatement statement) { Set<String> tableUniqueKey = statement.getTableUniqueKey().stream() .map(AliasedField::getImpliedName) .collect(Collectors.toSet()); return Iterables.filter( statement.getSelectStatement().getFields(), input -> !tableUniqueKey.contains(input.getImpliedName()) ); } /** * Creates matching conditions SQL for a list of fields used in the ON section * of a Merge Statement. For example: * "table1.fieldA = table2.fieldA AND table1.fieldB = table2.fieldB". * * @param statement the merge statement. * @param selectAlias the alias of the select statement of a merge statement. * @param targetTableName the name of the target table into which to merge. * @return The corresponding SQL */ protected String matchConditionSqlForMergeFields(MergeStatement statement, String selectAlias, String targetTableName) { Iterable<String> expressions = Iterables.transform(statement.getTableUniqueKey(), field -> String.format("%s.%s = %s.%s", targetTableName, field.getImpliedName(), selectAlias, field.getImpliedName())); return Joiner.on(" AND ").join(expressions); } /** * Extracts updating expressions from the given merge statement and returns them as aliased fields, * similarly to how update expressions are provided to the update statement. Since updating expressions * are optional in merge statements, uses default expressions for any missing destination fields. * * @param statement a merge statement * @return the updating expressions aliased as destination fields */ protected Iterable<AliasedField> getMergeStatementUpdateExpressions(MergeStatement statement) { final Map<String, AliasedField> onUpdateExpressions = Maps.uniqueIndex(statement.getIfUpdating(), AliasedField::getImpliedName); final Iterable<AliasedField> nonKeyFieldsFromMergeStatement = getNonKeyFieldsFromMergeStatement(statement); Set<String> keyFields = FluentIterable.from(statement.getTableUniqueKey()) .transform(AliasedField::getImpliedName) .toSet(); List<String> listOfKeyFieldsWithUpdateExpression = FluentIterable.from(onUpdateExpressions.keySet()) .filter(a -> keyFields.contains(a)) .toList(); if (!listOfKeyFieldsWithUpdateExpression.isEmpty()) { throw new IllegalArgumentException("MergeStatement tries to update a key field via the update expressions " + listOfKeyFieldsWithUpdateExpression + " in " + statement); } // Note that we use the source select statement's fields here as we assume that they are // appropriately aliased to match the target table as part of the API contract return Iterables.transform(nonKeyFieldsFromMergeStatement, field -> onUpdateExpressions.getOrDefault(field.getImpliedName(), new MergeStatement.InputField(field.getImpliedName()).as(field.getImpliedName()))); } /** * Returns the assignments for updating part of an SQL MERGE statement * based on the given {@link AliasedField}s. * * @param fields The {@link AliasedField}s to create the assignments from * @return the assignments for the updating part as a string */ protected String getMergeStatementAssignmentsSql(Iterable<AliasedField> fields) { return getUpdateStatementAssignmentsSql(fields); } /** * Construct the old table for a change column * @param table The table to change * @param oldColumn The old column * @param newColumn The new column * @return The 'old' table * */ protected Table oldTableForChangeColumn(Table table, Column oldColumn, Column newColumn) { return new ChangeColumn(table.getName(), oldColumn, newColumn).reverse(SchemaUtils.schema(table)).getTable(table.getName()); } /** * Drops and recreates the triggers and supporting items for the target table. * * @param table the table for which to rebuild triggers * @return a collection of sql statements to execute */ public Collection<String> rebuildTriggers(@SuppressWarnings("unused") Table table) { return SqlDialect.NO_STATEMENTS; } /** * Indicates whether the dialect uses NVARCHAR or VARCHAR to store string values. * * @return true if NVARCHAR is used, false is VARCHAR is used. */ public boolean usesNVARCHARforStrings() { return false; } /** * Indicates whether the dialect supports window functions. * * @return true if the dialect supports window functions (e.g. PARTITION BY). * **/ public boolean supportsWindowFunctions() { return false; } /** * Convert a {@link WindowFunction} into standards compliant SQL. * @param windowFunctionField The field to convert * @return The resulting SQL **/ protected String getSqlFrom(WindowFunction windowFunctionField) { StringBuilder statement = new StringBuilder().append(getSqlFrom(windowFunctionField.getFunction())); statement.append(" OVER ("); if (windowFunctionField.getPartitionBys().size() > 0) { statement.append("PARTITION BY "); boolean firstField = true; for (AliasedField field : windowFunctionField.getPartitionBys()) { if (!firstField) { statement.append(", "); } statement.append(getSqlFrom(field)); firstField = false; } } if (windowFunctionField.getOrderBys().size() > 0) { statement.append(" ORDER BY "); boolean firstField = true; for (AliasedField field : windowFunctionField.getOrderBys()) { if (!firstField) { statement.append(", "); } statement.append(getSqlForOrderByField(field)); firstField = false; } } statement.append(")"); return statement.toString(); } /** * Returns the INSERT INTO statement. * * @param insertStatement he {@linkplain InsertStatement} object which can be used by the overriding methods to customize the INSERT statement. * @return the INSERT INTO statement. */ protected String getSqlForInsertInto(@SuppressWarnings("unused") InsertStatement insertStatement) { return "INSERT INTO "; } /** * Class representing the structor of an ID Table. * * @author Copyright (c) Alfa Financial Software 2011 */ public static final class IdTable implements Table { /** * The name of the ID Table. */ private final String tableName; /** * True if this idTable should be create as a temporary table specific to * dialect */ private final boolean isTemporary; /** * For testing only - the tableName might not be appropriate for your * dialect! The table will be a temporary table, specific to the dialect. * * @param tableName table name for the id table. * @return {@link IdTable}. */ public static IdTable withDeterministicName(String tableName) { return new IdTable(tableName, true); } /** * Use this to create a temporary {@link IdTable} which is guaranteed to have a legal * name for the dialect. * * @param dialect {@link SqlDialect} that knows what temp table names are * allowed. * @param prefix prefix for the unique name generated. * @return {@link IdTable} */ public static IdTable withPrefix(SqlDialect dialect, String prefix) { return withPrefix(dialect, prefix, true); } /** * Use this to create a temporary or non-temporary {@link IdTable} which is * guaranteed to have a legal name for the dialect. The non-temporary idTables * are necessary to enable access from many sessions/connections in case of some * dialects. * * @param dialect {@link SqlDialect} that knows what temp table names are * allowed. * @param prefix prefix for the unique name generated. * @param isTemporary if set to true the table will be created as a temporary * table specific for the dialect. * @return {@link IdTable} */ public static IdTable withPrefix(SqlDialect dialect, String prefix, boolean isTemporary) { return new IdTable(dialect.decorateTemporaryTableName(prefix + RandomStringUtils.randomAlphabetic(5)), isTemporary); } /** * Constructor used by tests for generating a predictable table name. * * @param tableName table name for the temporary table. * @param isTemporary if set to true, the table will be a temporary table specific to dialect. */ private IdTable(String tableName, boolean isTemporary) { this.tableName = tableName; this.isTemporary = isTemporary; } /** * @see org.alfasoftware.morf.metadata.Table#indexes() */ @Override public List<Index> indexes() { return new ArrayList<>(); } /** * @see org.alfasoftware.morf.metadata.Table#getName() */ @Override public String getName() { return tableName; } /** * @see org.alfasoftware.morf.metadata.Table#columns() */ @Override public List<Column> columns() { List<Column> columns = new ArrayList<>(); columns.add(SchemaUtils.column(ID_INCREMENTOR_TABLE_COLUMN_NAME, DataType.STRING, 132).primaryKey()); columns.add(SchemaUtils.column(ID_INCREMENTOR_TABLE_COLUMN_VALUE, DataType.BIG_INTEGER)); return columns; } /** * {@inheritDoc} * * @see org.alfasoftware.morf.metadata.Table#isTemporary() */ @Override public boolean isTemporary() { return isTemporary; } } }
apache-2.0
popwich/test_selendroid
selendroid-standalone/src/main/java/io/selendroid/android/Abi.java
696
/* * Copyright 2012-2013 eBay Software Foundation and selendroid committers. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.selendroid.android; public enum Abi { ARM, X86 }
apache-2.0
ipan97/Apotik-Enggal-Damang
src/main/java/com/github/ipan97/enggal/damang/service/ProductService.java
771
/* * Copyright (c) Ipan Taupik Rahman */ package com.github.ipan97.enggal.damang.service; import com.github.ipan97.enggal.damang.model.Product; import com.github.ipan97.enggal.damang.repository.ProductRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; /** * Created by ipan on 03/06/17. */ @Service @Transactional public class ProductService extends AbstractService<Product, Long> { @Autowired private ProductRepository productRepository; @Override protected JpaRepository<Product, Long> getRepository() { return productRepository; } }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Test/IntegrationTest/src/test.slow/java/ghidra/framework/plugintool/TestingPlugin.java
785
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.framework.plugintool; /** * Marker interface to signal that the implementing class is a test plugin and should * not be considered as 'real'. */ public interface TestingPlugin { }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Features/FileFormats/src/main/java/ghidra/file/formats/android/dex/format/EncodedCatchHandler.java
3576
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.file.formats.android.dex.format; import java.io.IOException; import java.util.ArrayList; import java.util.List; import ghidra.app.util.bin.BinaryReader; import ghidra.app.util.bin.StructConverter; import ghidra.app.util.bin.format.dwarf4.LEB128; import ghidra.program.model.data.*; import ghidra.util.exception.DuplicateNameException; public class EncodedCatchHandler implements StructConverter { private int size; private int sizeLength;// in bytes private List<EncodedTypeAddressPair> handlers = new ArrayList<>(); private int catchAllAddress; private int catchAllAddressLength; public EncodedCatchHandler(BinaryReader reader) throws IOException { LEB128 leb128 = LEB128.readSignedValue(reader); size = leb128.asInt32(); sizeLength = leb128.getLength(); for (int i = 0; i < Math.abs(size); ++i) { handlers.add(new EncodedTypeAddressPair(reader)); } if (size <= 0) {// This element is only present if size is non-positive. leb128 = LEB128.readUnsignedValue(reader); catchAllAddress = leb128.asUInt32(); catchAllAddressLength = leb128.getLength(); } } /** * <pre> * Number of catch types in this list. If non-positive, then this is the * negative of the number of catch types, and the catches are followed by a catch-all handler. * For example: A size of 0 means that there is a catch-all but no explicitly typed catches. * A size of 2 means that there are two explicitly typed catches and no catch-all. * And a size of -1 means that there is one typed catch along with a catch-all. * </pre> */ public int getSize() { return size; } /** * Stream of abs(size) encoded items, one for each caught type, in the order that the types should be tested. */ public List<EncodedTypeAddressPair> getPairs() { return handlers; } /** * Bytecode address of the catch-all handler. This element is only present if size is non-positive. */ public int getCatchAllAddress() { return catchAllAddress; } @Override public DataType toDataType() throws DuplicateNameException, IOException { StringBuilder builder = new StringBuilder(); builder.append("encoded_catch_handler_" + sizeLength + "_" + catchAllAddressLength + "_" + handlers.size()); Structure structure = new StructureDataType(builder.toString(), 0); structure.add(new ArrayDataType(BYTE, sizeLength, BYTE.getLength()), "size", null); int index = 0; for (EncodedTypeAddressPair pair : handlers) { DataType dataType = pair.toDataType(); structure.add(dataType, "handler_" + index, null); builder.append(pair.getDataTypeIdString()); } if (size <= 0) {// This element is only present if size is non-positive. structure.add(new ArrayDataType(BYTE, catchAllAddressLength, BYTE.getLength()), "catch_all_addr", null); } structure.setCategoryPath(new CategoryPath("/dex/encoded_catch_handler")); try { structure.setName(builder.toString()); } catch (Exception e) { // ignore } return structure; } }
apache-2.0
xschildw/Synapse-Repository-Services
lib/jdomodels/src/main/java/org/sagebionetworks/repo/model/dbo/migration/QueryStreamIterable.java
2278
package org.sagebionetworks.repo.model.dbo.migration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; /** * Provides generic support for iterating over query results one page at a time. * * @param <T> */ public class QueryStreamIterable<T> implements Iterable<T>, Iterator<T> { public static final String KEY_OFFSET = "KEY_OFFSET"; public static final String KEY_LIMIT = "KEY_LIMIT"; public static final String PAGINATION = " LIMIT :" + KEY_LIMIT + " OFFSET :" + KEY_OFFSET; NamedParameterJdbcTemplate namedTemplate; RowMapper<T> rowMapper; String sql; Map<String, Object> parameters; long limit; long offset; Iterator<T> currentPage; /** * * @param namedTemplate * @param rowMapper * @param sql * @param parameters * @param limit * The limit sets the page size. This stream will never keep more * than one page of data in memory at a time. */ public QueryStreamIterable(NamedParameterJdbcTemplate namedTemplate, RowMapper<T> rowMapper, String sql, Map<String, Object> parameters, long limit) { super(); this.namedTemplate = namedTemplate; this.rowMapper = rowMapper; StringBuilder sqlBuilder = new StringBuilder(sql); sqlBuilder.append(PAGINATION); this.sql = sqlBuilder.toString(); this.parameters = parameters; this.limit = limit; this.offset = 0L; this.currentPage = null; this.parameters = new HashMap<>(parameters); this.parameters.put(KEY_LIMIT, this.limit); this.parameters.put(KEY_OFFSET, this.offset); } @Override public Iterator<T> iterator() { return this; } @Override public boolean hasNext() { if (currentPage != null) { if (currentPage.hasNext()) { return true; } } // Use the current offset this.parameters.put(KEY_OFFSET, this.offset); // Nothing in the current page so fetch the next currentPage = namedTemplate.query(sql, parameters, rowMapper).iterator(); // Bump pagination for the next page this.offset = this.offset + this.limit; // return the results for the current page. return currentPage.hasNext(); } @Override public T next() { return currentPage.next(); } }
apache-2.0
eschwert/ontop
obdalib-core/src/test/java/it/unibz/krdb/obda/utils/VersionInfoTest.java
368
package it.unibz.krdb.obda.utils; import org.junit.Test; import static org.junit.Assert.assertNotNull; /** * @author xiao */ public class VersionInfoTest { @Test public void testGetVersion() throws Exception { String version = VersionInfo.getVersionInfo().getVersion(); System.out.println(version); assertNotNull(version); } }
apache-2.0
chronakis/dbcp-conn-log
src/main/java/net/chronakis/tomcat/DBCPConnLogger.java
3516
package net.chronakis.tomcat; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; @Aspect public class DBCPConnLogger { /** * Turn this off! */ public static final boolean OFF = Boolean.parseBoolean(System.getenv().getOrDefault("DBCPLOG_OFF", "false")); /** * The first trace elements are: * 1. The getStackTrace() call itself * 2. The Aspect method * 3. The actual method we are wrapping. * Better skip those */ private static final int SKIP_FIRST = Integer.parseInt(System.getenv().getOrDefault("DBCPLOG_SKIP_FIRST", "3")); /** * How many callers to trace back. * This limits the stack traces to a more readable format * and also prevents all the container methods to be printed. * We will rarely need more than 5 steps to identify the culprit, * but if you need, change it. * * More elegant method would be to pass a system property * with a list of packages you want to be included in the trace */ private static final int MAX_TRACE = Integer.parseInt(System.getenv().getOrDefault("DBCPLOG_MAX_TRACE", "5"));; /** * Do not use package names, we rarely use the same names */ public static final boolean NO_PACKAGE_NAMES = Boolean.parseBoolean(System.getenv().getOrDefault("DBCPLOG_NO_PACKAGE_NAMES", "true")); /** * Exclude calls within the package from the trace */ private static final boolean EXC_DBCP_PACKAGE = Boolean.parseBoolean(System.getenv().getOrDefault("DBCPLOG_EXC_DBCP_PACKAGE", "true")); /** * The apache tomcat dbcp package */ private static final String DBCP_PACKAGE = "org.apache.tomcat.dbcp.dbcp2"; /** * Wrap around the getConnection and print the trace in a compact format */ @Around("execution(* org.apache.tomcat.dbcp.dbcp2.PoolingDataSource.getConnection(..))") public Object logGetConnection(ProceedingJoinPoint invocation) throws Throwable { Object con = invocation.proceed(); if (!OFF) { System.out.println("--- getConnection(" + Integer.toHexString(con.hashCode()) + "): " + oneLineTrace(Thread.currentThread().getStackTrace())); } return con; } /** * Wrap around the connection close */ @Around("execution(* org.apache.tomcat.dbcp.dbcp2.PoolingDataSource.PoolGuardConnectionWrapper.close(..))") public Object logCloseConnection(ProceedingJoinPoint invocation) throws Throwable { Object con = invocation.getTarget(); if (!OFF) { System.out.println("--- retConnection(" + Integer.toHexString(con.hashCode()) + "): " + oneLineTrace(Thread.currentThread().getStackTrace())); } return invocation.proceed(); } /** * Brief, one line stack traces. Feel free to change it to anyway you like */ public static String oneLineTrace(StackTraceElement[] trace) { StringBuilder sb = new StringBuilder(); boolean first = true; for (int i = SKIP_FIRST, max = MAX_TRACE + SKIP_FIRST ; i < trace.length && i < max - 1 ; i++) { StackTraceElement elm = trace[i]; String className = elm.getClassName(); if (EXC_DBCP_PACKAGE && className.startsWith(DBCP_PACKAGE)) continue; if (NO_PACKAGE_NAMES) className = className.substring(className.lastIndexOf(".") + 1, className.length()); if (first) first = false; else sb.append(" > "); sb.append(className) .append(".") .append(elm.getMethodName()) .append("(") .append(elm.getLineNumber()) .append(")"); } return sb.toString(); } }
apache-2.0
yaobanglin/wpan
app/src/main/java/com/xinyu/mwp/entity/CurrentPositionListReturnEntity.java
4329
package com.xinyu.mwp.entity; /** * Created by Administrator on 2017/2/28. */ public class CurrentPositionListReturnEntity extends BaseEntity { /** * positionId : 1000001 * id : 1000 * code : sz10001 * typeCode : st1001 * name : 白银 * buySell : -1 * amount : 12 * openPrice : 12.1 * positionTime : 1483137783 * openCost : 120 * openCharge : 12.1 * closeTime : 1483137783 * closePrice : 119 * grossProfit : 1.2 * limit : 1 * stop : 1.1 * closeType : 1 * isDeferred : 1 * deferred : 12.1 */ private long positionId; private int id; private String code; private String typeCode; private String name; private int buySell; private double amount; private double openPrice; private int positionTime; private double openCost; private double openCharge; private int closeTime; private double closePrice; private double grossProfit; private double limit; private double stop; private int closeType; private boolean isDeferred; private double deferred; private double interval; private long endTime; public double getInterval() { return interval; } public void setInterval(double interval) { this.interval = interval; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public long getPositionId() { return positionId; } public void setPositionId(long positionId) { this.positionId = positionId; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getTypeCode() { return typeCode; } public void setTypeCode(String typeCode) { this.typeCode = typeCode; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getBuySell() { return buySell; } public void setBuySell(int buySell) { this.buySell = buySell; } public double getAmount() { return amount; } public void setAmount(double amount) { this.amount = amount; } public double getOpenPrice() { return openPrice; } public void setOpenPrice(double openPrice) { this.openPrice = openPrice; } public int getPositionTime() { return positionTime; } public void setPositionTime(int positionTime) { this.positionTime = positionTime; } public double getOpenCost() { return openCost; } public void setOpenCost(double openCost) { this.openCost = openCost; } public double getOpenCharge() { return openCharge; } public void setOpenCharge(double openCharge) { this.openCharge = openCharge; } public int getCloseTime() { return closeTime; } public void setCloseTime(int closeTime) { this.closeTime = closeTime; } public double getClosePrice() { return closePrice; } public void setClosePrice(double closePrice) { this.closePrice = closePrice; } public double getGrossProfit() { return grossProfit; } public void setGrossProfit(double grossProfit) { this.grossProfit = grossProfit; } public double getLimit() { return limit; } public void setLimit(double limit) { this.limit = limit; } public double getStop() { return stop; } public void setStop(double stop) { this.stop = stop; } public int getCloseType() { return closeType; } public void setCloseType(int closeType) { this.closeType = closeType; } public boolean isDeferred() { return isDeferred; } public void setDeferred(boolean deferred) { isDeferred = deferred; } public double getDeferred() { return deferred; } public void setDeferred(double deferred) { this.deferred = deferred; } }
apache-2.0
fillumina/PerformanceTools
performance-tools/src/test/java/com/fillumina/performance/producer/progression/AutoProgressionPerformanceInstrumenterConsumerDirectTest.java
972
package com.fillumina.performance.producer.progression; import com.fillumina.performance.PerformanceTimerFactory; import com.fillumina.performance.producer.PerformanceConsumerTestHelper; /** * * @author Francesco Illuminati */ public class AutoProgressionPerformanceInstrumenterConsumerDirectTest extends PerformanceConsumerTestHelper { @Override public void executePerformanceProducerWithConsumers( final ConsumerExecutionChecker... consumers) { AutoProgressionPerformanceInstrumenter.builder() .setMaxStandardDeviation(1) .build() .instrument(PerformanceTimerFactory .createSingleThreaded() .addTest("example", new Runnable() { @Override public void run() { // do nothing } })) .addPerformanceConsumer(consumers) .execute(); } }
apache-2.0
thachi/gae4s
core/src/test/scala/com/xhachi/gae4s/datastore/JavaEnum.java
94
package com.xhachi.gae4s.datastore; public enum JavaEnum { JAVA_ENUM1, JAVA_ENUM2 }
apache-2.0
qiu-yongheng/Ship
app/src/main/java/com/kc/shiptransport/mvp/plan/PlanAdapter.java
3788
package com.kc.shiptransport.mvp.plan; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView; import com.kc.shiptransport.R; import com.kc.shiptransport.db.WeekTask; import com.kc.shiptransport.interfaze.OnRecyclerviewItemClickListener; import com.kc.shiptransport.util.SettingUtil; import org.litepal.crud.DataSupport; import java.util.List; /** * @author qiuyongheng * @time 2017/5/17 15:13 * @desc ${TODD} */ public class PlanAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private final Context context; private List<String> dates; private final List<WeekTask> weekLists; private OnRecyclerviewItemClickListener listener; public PlanAdapter(Context context, List<String> dates, List<WeekTask> weekLists) { this.context = context; this.dates = dates; this.weekLists = weekLists; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { return new NormalHolder(LayoutInflater.from(context).inflate(R.layout.item_plan, parent, false)); } @Override public void onBindViewHolder(final RecyclerView.ViewHolder holder, int position) { // 设置数据 if (position < 7) { ((NormalHolder) holder).mLlDate.setVisibility(View.VISIBLE); ((NormalHolder) holder).mLlTask.setVisibility(View.INVISIBLE); ((NormalHolder) holder).mTvDate.setText(dates.get(position)); } else { ((NormalHolder) holder).mLlDate.setVisibility(View.INVISIBLE); List<WeekTask> weekTasks = DataSupport.where("position = ?", position + "").find(WeekTask.class); if (weekTasks != null && !weekTasks.isEmpty()) { WeekTask weekTask = weekTasks.get(0); ((NormalHolder) holder).mTvShip.setText(weekTask.getShipName()); ((NormalHolder) holder).mTvQuantum.setText(String.valueOf(weekTask.getSandSupplyCount())); ((NormalHolder) holder).mLlTask.setVisibility(View.VISIBLE); } else { ((NormalHolder) holder).mLlTask.setVisibility(View.INVISIBLE); } } // 根据不同的position设置点击事件 if (listener != null) { holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int pos = holder.getLayoutPosition(); listener.onItemClick(holder.itemView, pos); } }); } } class NormalHolder extends RecyclerView.ViewHolder { private final TextView mTvShip; private final TextView mTvQuantum; private final LinearLayout mLlTask; private final LinearLayout mLlDate; private final TextView mTvDate; public NormalHolder(View itemView) { super(itemView); mLlTask = (LinearLayout) itemView.findViewById(R.id.ll_task); mTvShip = (TextView) itemView.findViewById(R.id.tv_ship); mTvQuantum = (TextView) itemView.findViewById(R.id.tv_quantum); mLlDate = (LinearLayout) itemView.findViewById(R.id.ll_date); mTvDate = (TextView) itemView.findViewById(R.id.tv_date); } } @Override public int getItemCount() { return SettingUtil.Recycler_item_num; } public void setOnItemClickListener(OnRecyclerviewItemClickListener listener) { this.listener = listener; } public void setDates (List<String> dates) { this.dates = dates; } }
apache-2.0
alibaba/fastjson
src/test/java/com/alibaba/fastjson/validate/JSONValidateTest_0.java
1349
package com.alibaba.fastjson.validate; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONValidator; import com.alibaba.json.test.benchmark.decode.EishayDecodeBytes; import junit.framework.TestCase; import java.io.ByteArrayInputStream; public class JSONValidateTest_0 extends TestCase { public void test_validate_benchmark() throws Exception { String json = JSON.toJSONString(EishayDecodeBytes.instance.getContent()); for (int n = 0; n < 10; ++n) { long start = System.currentTimeMillis(); for (int i = 0; i < 1000 * 1000 * 1; ++i) { JSONValidator validator = JSONValidator.from(json); validator.validate(); // 518 } System.out.println("millis : " + (System.currentTimeMillis() - start)); } } public void test_validate_utf8_benchmark() throws Exception { byte[] json = JSON.toJSONBytes(EishayDecodeBytes.instance.getContent()); for (int n = 0; n < 5; ++n) { long start = System.currentTimeMillis(); for (int i = 0; i < 1000 * 1000 * 1; ++i) { JSONValidator validator = JSONValidator.fromUtf8(json); validator.validate(); } System.out.println("millis : " + (System.currentTimeMillis() - start)); } } }
apache-2.0
JingchengDu/hbase
hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
5838
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.TreeSet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.util.StringUtils; /** * Emits sorted Puts. * Reads in all Puts from passed Iterator, sorts them, then emits * Puts in sorted order. If lots of columns per row, it will use lots of * memory sorting. * @see HFileOutputFormat2 * @see CellSortReducer */ @InterfaceAudience.Public public class PutSortReducer extends Reducer<ImmutableBytesWritable, Put, ImmutableBytesWritable, KeyValue> { // the cell creator private CellCreator kvCreator; @Override protected void setup(Reducer<ImmutableBytesWritable, Put, ImmutableBytesWritable, KeyValue>.Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); this.kvCreator = new CellCreator(conf); } @Override protected void reduce( ImmutableBytesWritable row, java.lang.Iterable<Put> puts, Reducer<ImmutableBytesWritable, Put, ImmutableBytesWritable, KeyValue>.Context context) throws java.io.IOException, InterruptedException { // although reduce() is called per-row, handle pathological case long threshold = context.getConfiguration().getLong( "putsortreducer.row.threshold", 1L * (1<<30)); Iterator<Put> iter = puts.iterator(); while (iter.hasNext()) { TreeSet<KeyValue> map = new TreeSet<>(CellComparator.COMPARATOR); long curSize = 0; // stop at the end or the RAM threshold List<Tag> tags = new ArrayList<>(); while (iter.hasNext() && curSize < threshold) { // clear the tags tags.clear(); Put p = iter.next(); long t = p.getTTL(); if (t != Long.MAX_VALUE) { // add TTL tag if found tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(t))); } byte[] acl = p.getACL(); if (acl != null) { // add ACL tag if found tags.add(new ArrayBackedTag(TagType.ACL_TAG_TYPE, acl)); } try { CellVisibility cellVisibility = p.getCellVisibility(); if (cellVisibility != null) { // add the visibility labels if any tags.addAll(kvCreator.getVisibilityExpressionResolver() .createVisibilityExpTags(cellVisibility.getExpression())); } } catch (DeserializationException e) { // We just throw exception here. Should we allow other mutations to proceed by // just ignoring the bad one? throw new IOException("Invalid visibility expression found in mutation " + p, e); } for (List<Cell> cells: p.getFamilyCellMap().values()) { for (Cell cell: cells) { // Creating the KV which needs to be directly written to HFiles. Using the Facade // KVCreator for creation of kvs. KeyValue kv = null; TagUtil.carryForwardTags(tags, cell); if (!tags.isEmpty()) { kv = (KeyValue) kvCreator.create(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), cell.getTimestamp(), cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), tags); } else { kv = KeyValueUtil.ensureKeyValue(cell); } if (map.add(kv)) {// don't count duplicated kv into size curSize += kv.heapSize(); } } } } context.setStatus("Read " + map.size() + " entries of " + map.getClass() + "(" + StringUtils.humanReadableInt(curSize) + ")"); int index = 0; for (KeyValue kv : map) { context.write(row, kv); if (++index % 100 == 0) context.setStatus("Wrote " + index); } // if we have more entries to process if (iter.hasNext()) { // force flush because we cannot guarantee intra-row sorted order context.write(null, null); } } } }
apache-2.0
glahiru/airavata
modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java
5632
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.airavata.persistance.registry.jpa.resources; import org.apache.airavata.persistance.registry.jpa.Resource; import org.apache.airavata.persistance.registry.jpa.ResourceType; import org.apache.airavata.persistance.registry.jpa.ResourceUtils; import org.apache.airavata.persistance.registry.jpa.model.Experiment; import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input; import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input_PK; import org.apache.airavata.registry.cpi.RegistryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import java.util.List; public class ExperimentInputResource extends AbstractResource { private static final Logger logger = LoggerFactory.getLogger(ExperimentInputResource.class); private ExperimentResource experimentResource; private String experimentKey; private String value; private String inputType; private String metadata; public String getExperimentKey() { return experimentKey; } public void setExperimentKey(String experimentKey) { this.experimentKey = experimentKey; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public ExperimentResource getExperimentResource() { return experimentResource; } public void setExperimentResource(ExperimentResource experimentResource) { this.experimentResource = experimentResource; } public String getInputType() { return inputType; } public void setInputType(String inputType) { this.inputType = inputType; } public String getMetadata() { return metadata; } public void setMetadata(String metadata) { this.metadata = metadata; } public Resource create(ResourceType type) throws RegistryException { logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException()); throw new UnsupportedOperationException(); } public void remove(ResourceType type, Object name) throws RegistryException{ logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException()); throw new UnsupportedOperationException(); } public Resource get(ResourceType type, Object name) throws RegistryException { logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException()); throw new UnsupportedOperationException(); } public List<Resource> get(ResourceType type) throws RegistryException { logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException()); throw new UnsupportedOperationException(); } public void save() throws RegistryException{ EntityManager em = null; try { em = ResourceUtils.getEntityManager(); Experiment_Input existingInput = em.find(Experiment_Input.class, new Experiment_Input_PK(experimentResource.getExpID(), experimentKey)); em.close(); em = ResourceUtils.getEntityManager(); em.getTransaction().begin(); Experiment_Input exInput = new Experiment_Input(); exInput.setEx_key(experimentKey); Experiment experiment = em.find(Experiment.class, experimentResource.getExpID()); exInput.setExperiment(experiment); exInput.setExperiment_id(experiment.getExpId()); if (value != null){ exInput.setValue(value.toCharArray()); } exInput.setInputType(inputType); exInput.setMetadata(metadata); if (existingInput != null) { existingInput.setEx_key(experimentKey); existingInput.setExperiment(experiment); existingInput.setExperiment_id(experiment.getExpId()); if (value != null){ existingInput.setValue(value.toCharArray()); } existingInput.setInputType(inputType); existingInput.setMetadata(metadata); exInput = em.merge(existingInput); } else { em.persist(exInput); } em.getTransaction().commit(); em.close(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RegistryException(e); } finally { if (em != null && em.isOpen()) { if (em.getTransaction().isActive()){ em.getTransaction().rollback(); } em.close(); } } } }
apache-2.0
Distrotech/gerrit
gerrit-gwtui/src/main/java/com/google/gerrit/client/download/DownloadPanel.java
2681
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.download; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.config.DownloadInfo.DownloadCommandInfo; import com.google.gerrit.client.config.DownloadInfo.DownloadSchemeInfo; import com.google.gerrit.reviewdb.client.AccountGeneralPreferences; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.InlineLabel; import com.google.gwtexpui.clippy.client.CopyableLabel; import java.util.Set; public abstract class DownloadPanel extends FlowPanel { protected final String project; private final DownloadCommandPanel commands = new DownloadCommandPanel(); private final DownloadUrlPanel urls = new DownloadUrlPanel(); private final CopyableLabel copyLabel = new CopyableLabel(""); public DownloadPanel(String project, boolean allowAnonymous) { this.project = project; copyLabel.setStyleName(Gerrit.RESOURCES.css().downloadLinkCopyLabel()); urls.add(DownloadUrlLink.createDownloadUrlLinks(allowAnonymous, this)); setupWidgets(); } private void setupWidgets() { if (!urls.isEmpty()) { final AccountGeneralPreferences pref; if (Gerrit.isSignedIn()) { pref = Gerrit.getUserAccount().getGeneralPreferences(); } else { pref = new AccountGeneralPreferences(); pref.resetToDefaults(); } urls.select(pref.getDownloadUrl()); FlowPanel p = new FlowPanel(); p.setStyleName(Gerrit.RESOURCES.css().downloadLinkHeader()); p.add(commands); final InlineLabel glue = new InlineLabel(); glue.setStyleName(Gerrit.RESOURCES.css().downloadLinkHeaderGap()); p.add(glue); p.add(urls); add(p); add(copyLabel); } } void populateDownloadCommandLinks(DownloadSchemeInfo schemeInfo) { commands.clear(); for (DownloadCommandInfo cmd : getCommands(schemeInfo)) { commands.add(new DownloadCommandLink(copyLabel, cmd)); } commands.select(); } protected abstract Set<DownloadCommandInfo> getCommands( DownloadSchemeInfo schemeInfo); }
apache-2.0
CanangTechnologies/grapevine
src/main/java/net/canang/grapevine/client/GrapevineServiceAsync.java
558
package net.canang.grapevine.client; import com.google.gwt.user.client.rpc.AsyncCallback; import net.canang.grapevine.client.model.ScoopModel; import java.util.List; /** * @author rafizan.baharum * @since 10/25/13 */ public interface GrapevineServiceAsync { void save(ScoopModel model, AsyncCallback<Void> callback); void delete(ScoopModel model, AsyncCallback<Void> callback); void findScoops(AsyncCallback<List<ScoopModel>> callback); void findScoops(Float latitude, Float longitude, AsyncCallback<List<ScoopModel>> callback); }
apache-2.0
K-scope/K-scope
src/jp/riken/kscope/language/KeywordArgument.java
1990
/* * K-scope * Copyright 2012-2013 RIKEN, Japan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.riken.kscope.language; /** * キーワード引数を表現するためのクラス。 * @author RIKEN * */ public class KeywordArgument extends Expression { /** シリアル番号 */ private static final long serialVersionUID = 7613378389238504275L; private String keyword = ""; /** * コンストラクタ。 */ public KeywordArgument() { super(); } /** * キーワードを取得する。 * @return キーワード */ public String getKeyword() { return keyword; } /** * キーワードをセットする。 * @param key キーワード */ public void setKeyword(String key) { this.keyword = key; } /** * コンストラクタ * @param key キーワード * @param expr コピー元式クラス */ public KeywordArgument(String key, Expression expr) { this.keyword = key; this.setLine(expr.getLine()); this.setVariableType(expr.getType()); this.getVariables().addAll(expr.getVariables()); this.getFuncCalls().addAll(expr.getFuncCalls()); this.setAddCount(expr.getAddCount()); this.setSubCount(expr.getSubCount()); this.setMulCount(expr.getMulCount()); this.setDivCount(expr.getDivCount()); this.setPowCount(expr.getPowCount()); } }
apache-2.0
RogerParkinson/madura-vaadin-support
madura-vaadin-tableeditor/src/main/java/nz/co/senanque/vaadin/tableeditor/EditorWindow.java
1110
/******************************************************************************* * Copyright (c)2014 Prometheus Consulting * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package nz.co.senanque.vaadin.tableeditor; import java.util.List; import nz.co.senanque.vaadin.MaduraSessionManager; import com.vaadin.ui.Component; public interface EditorWindow<T> extends Component { public void loadObject(T object, boolean newRow); public void initialize(List<String> fields, MaduraSessionManager maduraSessionManager); }
apache-2.0
googlemaps/android-maps-utils
library/src/main/java/com/google/maps/android/data/kml/KmlStyle.java
15412
/* * Copyright 2020 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.maps.android.data.kml; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.maps.model.PolygonOptions; import com.google.android.gms.maps.model.PolylineOptions; import com.google.maps.android.data.Style; import android.graphics.Color; import java.util.HashMap; import java.util.HashSet; import java.util.Random; import androidx.annotation.VisibleForTesting; /** * Represents the defined styles in the KML document */ public class KmlStyle extends Style { private final static int HSV_VALUES = 3; private final static int HUE_VALUE = 0; private final static int INITIAL_SCALE = 1; private final HashMap<String, String> mBalloonOptions; private final HashSet<String> mStylesSet; private boolean mFill = true; private boolean mOutline = true; private String mIconUrl; private double mScale; private String mStyleId; private boolean mIconRandomColorMode; private boolean mLineRandomColorMode; private boolean mPolyRandomColorMode; @VisibleForTesting float mMarkerColor; /** * Creates a new KmlStyle object */ /* package */ KmlStyle() { super(); mStyleId = null; mBalloonOptions = new HashMap<String, String>(); mStylesSet = new HashSet<String>(); mScale = INITIAL_SCALE; mMarkerColor = 0; mIconRandomColorMode = false; mLineRandomColorMode = false; mPolyRandomColorMode = false; } /** * Sets text found for an info window * * @param text Text for an info window */ /* package */ void setInfoWindowText(String text) { mBalloonOptions.put("text", text); } /** * Gets the id for the style * * @return Style Id, null otherwise */ /* package */ String getStyleId() { return mStyleId; } /** * Sets id for a style * * @param styleId Id for the style */ /* package */ void setStyleId(String styleId) { mStyleId = styleId; } /** * Checks if a given style (for a marker, linestring or polygon) has been set * * @param style style to check if set * @return True if style was set, false otherwise */ public boolean isStyleSet(String style) { return mStylesSet.contains(style); } /** * Gets whether the Polygon fill is set * * @return True if there is a fill for the polygon, false otherwise */ public boolean hasFill() { return mFill; } /** * Sets whether the Polygon has a fill * * @param fill True if the polygon fill is set, false otherwise */ public void setFill(boolean fill) { mFill = fill; } /** * Gets the scale for a marker icon * * @return scale value */ public double getIconScale() { return mScale; } /** * Sets the scale for a marker icon * * @param scale scale value */ /* package */ void setIconScale(double scale) { mScale = scale; mStylesSet.add("iconScale"); } /** * Gets whether the Polygon outline is set * * @return True if the polygon outline is set, false otherwise */ public boolean hasOutline() { return mOutline; } /** * Gets whether a BalloonStyle has been set * * @return True if a BalloonStyle has been set, false otherwise */ public boolean hasBalloonStyle() { return mBalloonOptions.size() > 0; } /** * Sets whether the Polygon has an outline * * @param outline True if the polygon outline is set, false otherwise */ /* package */ void setOutline(boolean outline) { mOutline = outline; mStylesSet.add("outline"); } /** * Gets the url for the marker icon * * @return Url for the marker icon, null otherwise */ public String getIconUrl() { return mIconUrl; } /** * Sets the url for the marker icon * * @param iconUrl Url for the marker icon */ /* package */ void setIconUrl(String iconUrl) { mIconUrl = iconUrl; mStylesSet.add("iconUrl"); } /** * Sets the fill color for a KML Polygon using a String * * @param color Fill color for a KML Polygon as a String */ /* package */ void setFillColor(String color) { // Add # to allow for mOutline color to be parsed correctly int polygonColorNum = (Color.parseColor("#" + convertColor(color))); setPolygonFillColor(polygonColorNum); mStylesSet.add("fillColor"); } /** * Sets the color for a marker * * @param color Color for a marker */ /* package */ void setMarkerColor(String color) { int integerColor = Color.parseColor("#" + convertColor(color)); mMarkerColor = getHueValue(integerColor); mMarkerOptions.icon(BitmapDescriptorFactory.defaultMarker(mMarkerColor)); mStylesSet.add("markerColor"); } /** * Gets the hue value from a color * * @param integerColor Integer representation of a color * @return Hue value from a color */ private static float getHueValue(int integerColor) { float[] hsvValues = new float[HSV_VALUES]; Color.colorToHSV(integerColor, hsvValues); return hsvValues[HUE_VALUE]; } /** * Converts a color format of the form AABBGGRR to AARRGGBB. Any leading or trailing spaces * in the provided string will be trimmed prior to conversion. * * @param color Color of the form AABBGGRR * @return Color of the form AARRGGBB */ private static String convertColor(String color) { // Tolerate KML with leading or trailing whitespace in colors color = color.trim(); String newColor; if (color.length() > 6) { newColor = color.substring(0, 2) + color.substring(6, 8) + color.substring(4, 6) + color.substring(2, 4); } else { newColor = color.substring(4, 6) + color.substring(2, 4) + color.substring(0, 2); } return newColor; } /** * Sets the rotation / heading for a marker * * @param heading Decimal representation of a rotation value */ /* package */ void setHeading(float heading) { setMarkerRotation(heading); mStylesSet.add("heading"); } /** * Sets the hotspot / anchor point of a marker * * @param x x point of a marker position * @param y y point of a marker position * @param xUnits units in which the x value is specified * @param yUnits units in which the y value is specified */ /* package */ void setHotSpot(float x, float y, String xUnits, String yUnits) { setMarkerHotSpot(x, y, xUnits, yUnits); mStylesSet.add("hotSpot"); } /** * Sets the color mode for a marker. A "random" color mode sets the color mode to true, * a "normal" colormode sets the color mode to false. * * @param colorMode A "random" or "normal" color mode */ /* package */ void setIconColorMode(String colorMode) { mIconRandomColorMode = colorMode.equals("random"); mStylesSet.add("iconColorMode"); } /** * Checks whether the color mode for a marker is true / random * * @return True if the color mode is true, false otherwise */ /* package */ boolean isIconRandomColorMode() { return mIconRandomColorMode; } /** * Sets the color mode for a polyline. A "random" color mode sets the color mode to true, * a "normal" colormode sets the color mode to false. * * @param colorMode A "random" or "normal" color mode */ /* package */ void setLineColorMode(String colorMode) { mLineRandomColorMode = colorMode.equals("random"); mStylesSet.add("lineColorMode"); } /** * Checks whether the color mode for a polyline is true / random * * @return True if the color mode is true, false otherwise */ public boolean isLineRandomColorMode() { return mLineRandomColorMode; } /** * Sets the color mode for a polygon. A "random" color mode sets the color mode to true, * a "normal" colormode sets the color mode to false. * * @param colorMode A "random" or "normal" color mode */ /* package */ void setPolyColorMode(String colorMode) { mPolyRandomColorMode = colorMode.equals("random"); mStylesSet.add("polyColorMode"); } /** * Checks whether the color mode for a polygon is true / random * * @return True if the color mode is true, false otherwise */ /* package */ public boolean isPolyRandomColorMode() { return mPolyRandomColorMode; } /** * Sets the outline color for a Polyline and a Polygon using a String * * @param color Outline color for a Polyline and a Polygon represented as a String */ /* package */ void setOutlineColor(String color) { // Add # to allow for mOutline color to be parsed correctly mPolylineOptions.color(Color.parseColor("#" + convertColor(color))); mPolygonOptions.strokeColor(Color.parseColor("#" + convertColor(color))); mStylesSet.add("outlineColor"); } /** * Sets the line width for a Polyline and a Polygon * * @param width Line width for a Polyline and a Polygon */ /* package */ void setWidth(Float width) { setLineStringWidth(width); setPolygonStrokeWidth(width); mStylesSet.add("width"); } /** * Gets the balloon options * * @return Balloon Options */ public HashMap<String, String> getBalloonOptions() { return mBalloonOptions; } /** * Creates a new marker option from given properties of an existing marker option * * @param originalMarkerOption An existing MarkerOption instance * @param iconRandomColorMode True if marker color mode is random, false otherwise * @param markerColor Color of the marker * @return A new MarkerOption */ private static MarkerOptions createMarkerOptions(MarkerOptions originalMarkerOption, boolean iconRandomColorMode, float markerColor) { MarkerOptions newMarkerOption = new MarkerOptions(); newMarkerOption.rotation(originalMarkerOption.getRotation()); newMarkerOption.anchor(originalMarkerOption.getAnchorU(), originalMarkerOption.getAnchorV()); if (iconRandomColorMode) { float hue = getHueValue(computeRandomColor((int) markerColor)); originalMarkerOption.icon(BitmapDescriptorFactory.defaultMarker(hue)); } newMarkerOption.icon(originalMarkerOption.getIcon()); return newMarkerOption; } /** * Creates a new PolylineOption from given properties of an existing PolylineOption * * @param originalPolylineOption An existing PolylineOption instance * @return A new PolylineOption */ private static PolylineOptions createPolylineOptions(PolylineOptions originalPolylineOption) { PolylineOptions polylineOptions = new PolylineOptions(); polylineOptions.color(originalPolylineOption.getColor()); polylineOptions.width(originalPolylineOption.getWidth()); polylineOptions.clickable(originalPolylineOption.isClickable()); return polylineOptions; } /** * Creates a new PolygonOption from given properties of an existing PolygonOption * * @param originalPolygonOption An existing PolygonOption instance * @param isFill Whether the fill for a polygon is set * @param isOutline Whether the outline for a polygon is set * @return A new PolygonOption */ private static PolygonOptions createPolygonOptions(PolygonOptions originalPolygonOption, boolean isFill, boolean isOutline) { float originalWidth = 0.0f; PolygonOptions polygonOptions = new PolygonOptions(); if (isFill) { polygonOptions.fillColor(originalPolygonOption.getFillColor()); } if (isOutline) { polygonOptions.strokeColor(originalPolygonOption.getStrokeColor()); originalWidth = originalPolygonOption.getStrokeWidth(); } polygonOptions.strokeWidth(originalWidth); polygonOptions.clickable(originalPolygonOption.isClickable()); return polygonOptions; } /** * Gets a MarkerOption * * @return A new MarkerOption */ public MarkerOptions getMarkerOptions() { return createMarkerOptions(mMarkerOptions, isIconRandomColorMode(), mMarkerColor); } /** * Gets a PolylineOption * * @return new PolylineOptions */ public PolylineOptions getPolylineOptions() { return createPolylineOptions(mPolylineOptions); } /** * Gets a PolygonOption * * @return new PolygonOptions */ public PolygonOptions getPolygonOptions() { return createPolygonOptions(mPolygonOptions, mFill, mOutline); } /** * Computes a random color given an integer. Algorithm to compute the random color can be * found in https://developers.google.com/kml/documentation/kmlreference#colormode * * @param color Color represented as an integer * @return Integer representing a random color */ public static int computeRandomColor(int color) { Random random = new Random(); int red = Color.red(color); int green = Color.green(color); int blue = Color.blue(color); //Random number can only be computed in range [0, n) if (red != 0) { red = random.nextInt(red); } if (blue != 0) { blue = random.nextInt(blue); } if (green != 0) { green = random.nextInt(green); } return Color.rgb(red, green, blue); } @Override public String toString() { StringBuilder sb = new StringBuilder("Style").append("{"); sb.append("\n balloon options=").append(mBalloonOptions); sb.append(",\n fill=").append(mFill); sb.append(",\n outline=").append(mOutline); sb.append(",\n icon url=").append(mIconUrl); sb.append(",\n scale=").append(mScale); sb.append(",\n style id=").append(mStyleId); sb.append("\n}\n"); return sb.toString(); } }
apache-2.0
massdosage/mass-scrobblage
src/test/java/za/co/massdosage/scrobble/FileScrobblerTest.java
4649
/** * Copyright (C) 2015-2020 Mass Dosage * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package za.co.massdosage.scrobble; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.io.File; import java.util.List; import org.junit.Rule; import org.junit.Test; import de.umass.lastfm.scrobble.ScrobbleData; import fm.last.commons.test.file.ClassDataFolder; import fm.last.commons.test.file.DataFolder; public class FileScrobblerTest { @Rule public DataFolder dataFolder = new ClassDataFolder(); @Test public void authenticationFailure() throws Exception { FileScrobbler scrobbler = new FileScrobbler("invalidKey", "invalidSecret", "scrobtestuser", "invalidHash"); scrobbler.scrobbleFolder(dataFolder.getFile("nohidden")); } @Test public void extractScrobbleDataFromFolder() throws Exception { File audioFolder = dataFolder.getFile("nohidden"); FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); List<ScrobbleData> scrobbleData = scrobbler.extractScrobbles(audioFolder); assertThat(scrobbleData.size(), is(3)); } @Test public void extractScrobbleDataFromFolderWithHiddenFile() throws Exception { File audioFolder = dataFolder.getFile("hidden"); FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); List<ScrobbleData> extracted = scrobbler.extractScrobbles(audioFolder); assertThat(extracted.size(), is(1)); ScrobbleData scrobbleData = extracted.get(0); assertThat(scrobbleData.getArtist(), is("DJ Mass Dosage")); assertThat(scrobbleData.getTrack(), is("How DJ can you Dosed Mix")); } @Test public void extractScrobbleDataFromFile() throws Exception { File mp3File = dataFolder.getFile("nohidden/test2.mp3"); FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); ScrobbleData scrobbleData = scrobbler.extractScrobble(mp3File); assertThat(scrobbleData.getArtist(), is("ArtistName")); assertThat(scrobbleData.getTrack(), is("TrackTitle")); assertThat(scrobbleData.getAlbumArtist(), is("AlbumArtistName")); assertThat(scrobbleData.getAlbum(), is("AlbumName")); assertThat(scrobbleData.getTrackNumber(), is(1)); assertThat(scrobbleData.getDuration(), is(5)); } @Test public void extractScrobbleInvalidTrackNumber() throws Exception { File mp3File = dataFolder.getFile("nohidden/test-invalid-track-number.mp3"); FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); ScrobbleData scrobbleData = scrobbler.extractScrobble(mp3File); assertThat(scrobbleData.getArtist(), is("DJ Mass Dosage")); assertThat(scrobbleData.getTrack(), is("How DJ can you Dosed Mix")); assertThat(scrobbleData.getAlbumArtist(), is(nullValue())); assertThat(scrobbleData.getAlbum(), is(nullValue())); assertThat(scrobbleData.getTrackNumber(), is(-1)); assertThat(scrobbleData.getDuration(), is(5)); } @Test public void createScrobbleDataTypical() { FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); ScrobbleData scrobbleData = scrobbler.createScrobbleData("artistName", "trackName", "albumArtistName", "albumName", "13", 33); assertThat(scrobbleData.getArtist(), is("artistName")); assertThat(scrobbleData.getTrack(), is("trackName")); assertThat(scrobbleData.getAlbumArtist(), is("albumArtistName")); assertThat(scrobbleData.getAlbum(), is("albumName")); assertThat(scrobbleData.getTrackNumber(), is(13)); assertThat(scrobbleData.getDuration(), is(33)); } // See https://github.com/massdosage/mass-scrobblage/issues/1 @Test public void createScrobbleDataTrackNumberWithSlash() { FileScrobbler scrobbler = new FileScrobbler("key", "secret", "scrobtestuser", "hash"); ScrobbleData scrobbleData = scrobbler.createScrobbleData("artistName", "trackName", "albumArtistName", "albumName", "1/10", 33); assertThat(scrobbleData.getTrackNumber(), is(1)); } }
apache-2.0
stori-es/stori_es
dashboard/src/main/java/org/consumersunion/stories/dashboard/client/application/ui/block/BlockBuilder.java
9974
package org.consumersunion.stories.dashboard.client.application.ui.block; import org.consumersunion.stories.common.shared.model.document.Block; import org.consumersunion.stories.common.shared.model.document.BlockType; import org.consumersunion.stories.dashboard.client.application.questionnaire.ui.BlockBuilderPanel; import org.consumersunion.stories.dashboard.client.resource.Resources; import com.google.common.collect.Lists; import com.google.gwt.core.client.Scheduler; import com.google.gwt.dom.client.DivElement; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.HeadingElement; import com.google.gwt.dom.client.SpanElement; import com.google.gwt.dom.client.Style; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.query.client.Function; import com.google.gwt.query.client.GQuery; import com.google.gwt.query.client.Predicate; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.ui.HTMLPanel; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; import gwtquery.plugins.draggable.client.events.BeforeDragStartEvent; import gwtquery.plugins.draggable.client.events.BeforeDragStartEvent.BeforeDragStartEventHandler; import gwtquery.plugins.draggable.client.events.DragStopEvent; import gwtquery.plugins.draggable.client.events.DragStopEvent.DragStopEventHandler; import gwtquery.plugins.draggable.client.gwt.DraggableWidget; import static com.google.gwt.query.client.GQuery.$; public abstract class BlockBuilder extends DraggableWidget<Widget> { interface Binder extends UiBinder<Widget, BlockBuilder> { } public enum BlockMoveAction { TOP, UP, DOWN, BOTTOM } public interface Handler { void setPanel(BlockBuilderPanel blockBuilderPanel); void onBeforeEdit(BlockBuilder blockBuilder); void onBlockEdited(BlockBuilder blockBuilder); void onBlockChanged(BlockBuilder blockBuilder); void onBlockRemoved(BlockBuilder blockBuilder); void onBlockDuplicated(BlockBuilder blockBuilder); void onBlockMove(BlockBuilder blockBuilder, BlockMoveAction action); void onBlockAdded(BlockBuilder blockBuilder); } private class DraggablePositionHandler implements BeforeDragStartEventHandler, DragStopEventHandler { public void onBeforeDragStart(BeforeDragStartEvent event) { Element draggable = event.getDraggable(); $(draggable).width($(draggable).parent().width()); $(draggable).css("position", "absolute"); } public void onDragStop(DragStopEvent event) { $(event.getDraggable()) .css("position", "relative") .css("top", null) .css("left", null) .css("width", null) .css("z-index", "auto"); handler.onBlockChanged(BlockBuilder.this); } } protected static final Integer PREVIEW = 0; protected static final Integer EDITION = 1; @UiField HTMLPanel block; @UiField SimplePanel switcher; @UiField DivElement toolbar; @UiField SpanElement duplicate; @UiField SpanElement remove; @UiField HeadingElement questionType; @UiField Resources resources; @UiField SpanElement edit; @UiField Element moveToTop; @UiField Element moveUp; @UiField Element moveDown; @UiField Element moveToBottom; protected Handler handler; private final HasValidation editView; private final DraggablePositionHandler HANDLER = new DraggablePositionHandler(); private IsWidget previewView; private boolean showDuplicate; protected BlockBuilder( Binder uiBinder, IsWidget previewView, HasValidation editView, boolean editMode, boolean readOnly) { this.previewView = previewView; this.editView = editView; initWidget(uiBinder.createAndBindUi(this)); setDisabledDrag(readOnly); setup(); if (!readOnly) { switchTo(editMode ? EDITION : PREVIEW); block.setStyleName(resources.builderStyleCss().blockWrapper()); } else { switchTo(PREVIEW); block.setStyleName(resources.builderStyleCss().blockWrapperReadOnly()); } // Delays the init after the parent class is fully created Scheduler.get().scheduleDeferred(new Scheduler.ScheduledCommand() { @Override public void execute() { questionType.setInnerText(getValue().getBlockType().label()); } }); bind(); } public void edit() { switchTo(EDITION); } public void allowDownOnly() { $(Lists.newArrayList(moveToTop, moveUp)).addClass(resources.builderStyleCss().disabled()); $(Lists.newArrayList(moveToBottom, moveDown)).removeClass(resources.builderStyleCss().disabled()); } public void allowUpOnly() { $(Lists.newArrayList(moveToBottom, moveDown)).addClass(resources.builderStyleCss().disabled()); $(Lists.newArrayList(moveToTop, moveUp)).removeClass(resources.builderStyleCss().disabled()); } public void allowAll() { $(Lists.newArrayList(moveToTop, moveUp, moveToBottom, moveDown)) .removeClass(resources.builderStyleCss().disabled()); } public void disableMove() { $(Lists.newArrayList(moveToTop, moveUp, moveToBottom, moveDown)) .addClass(resources.builderStyleCss().disabled()); } public abstract Block getValue(); public void setHandler(Handler handler) { this.handler = handler; } public Boolean isOnEditMode() { return switcher.getWidget() == editView; } public void resetBlockBuilder() { switchTo(PREVIEW); } protected void switchTo(final Integer mode) { if (EDITION.equals(mode)) { switcher.setWidget(editView); setDisabledDrag(false); if (handler != null) { handler.onBeforeEdit(this); } $(questionType).css("display", "inline-block"); showOrHideElement(duplicate, !editView.isNew() && showDuplicate); } else if (PREVIEW.equals(mode)) { switcher.setWidget(previewView); setDisabledDrag(true); showOrHideElement(duplicate, false); $(questionType).css("display", "none"); } Scheduler.get().scheduleDeferred(new Scheduler.ScheduledCommand() { @Override public void execute() { $(block).toggleClass(resources.builderStyleCss().blockWrapperEdit(), EDITION.equals(mode)); $(questionType).parent().width("calc(100% - " + getToolbarWidth() + "px)"); } }); } protected void setShowDuplicate(boolean showDuplicate) { this.showDuplicate = showDuplicate; } protected void showRemove(boolean showRemove) { showOrHideElement(remove, showRemove); } protected void setPreviewView(IsWidget previewView) { this.previewView = previewView; } private int getToolbarWidth() { return ($(toolbar).children().filter(new Predicate() { @Override public boolean f(Element e, int index) { return !e.getStyle().getDisplay().equalsIgnoreCase(Style.Display.NONE.getCssName()); } }).length() - 1) * 28; } private void showOrHideElement(Element element, boolean show) { if (show) { $(element).show(); } else { $(element).hide(); } } private void bind() { block.sinkEvents(Event.ONCLICK); block.addHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { if (canEdit($(block)) && isValidClickTarget($(event.getNativeEvent().getEventTarget()))) { switchTo(EDITION); } } }, ClickEvent.getType()); $(duplicate).click(new Function() { @Override public void f() { handler.onBlockDuplicated(BlockBuilder.this); } }); $(remove).click(new Function() { @Override public void f() { handler.onBlockRemoved(BlockBuilder.this); } }); bindMove(moveToTop, BlockMoveAction.TOP); bindMove(moveUp, BlockMoveAction.UP); bindMove(moveDown, BlockMoveAction.DOWN); bindMove(moveToBottom, BlockMoveAction.BOTTOM); } private boolean canEdit(GQuery block) { return !block.hasClass(resources.builderStyleCss().blockWrapperEdit()) && !block.hasClass(resources.builderStyleCss().blockWrapperReadOnly()); } private boolean isValidClickTarget(GQuery gQuery) { return !gQuery.is("input", "textarea", "button", "select", "div[role=button]", "[class*=\"icon-star\""); } private void bindMove(Element element, final BlockMoveAction action) { $(element).click(new Function() { @Override public void f() { if (!$(getElement()).hasClass(resources.builderStyleCss().disabled())) { handler.onBlockMove(BlockBuilder.this, action); } } }); } private void setup() { setDraggingOpacity(new Float(0.8)); setDraggingZIndex(1000); addBeforeDragHandler(HANDLER); addDragStopHandler(HANDLER); setHandle("." + resources.builderStyleCss().blockHeader()); } }
apache-2.0
BCGDV-ASIA/android-widget-connectpattern
connectpattern/src/test/java/com/bcgdv/asia/lib/connectpattern/ExampleUnitTest.java
326
package com.bcgdv.asia.lib.connectpattern; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
pabloalba/universidadvirtual
UniversidadVirtual/src/main/java/net/kaleidos/universidadvirtual/SplashActivity.java
3805
package net.kaleidos.universidadvirtual; import android.content.Intent; import org.andengine.engine.camera.Camera; import org.andengine.engine.handler.IUpdateHandler; import org.andengine.engine.options.EngineOptions; import org.andengine.engine.options.ScreenOrientation; import org.andengine.engine.options.resolutionpolicy.RatioResolutionPolicy; import org.andengine.entity.scene.Scene; import org.andengine.entity.sprite.Sprite; import org.andengine.entity.util.FPSLogger; import org.andengine.opengl.texture.bitmap.BitmapTexture; import org.andengine.opengl.texture.region.ITextureRegion; import org.andengine.opengl.texture.region.TextureRegionFactory; import org.andengine.ui.activity.SimpleBaseGameActivity; import org.andengine.util.adt.io.in.IInputStreamOpener; import java.io.IOException; import java.io.InputStream; public class SplashActivity extends SimpleBaseGameActivity { // =========================================================== // Constants // =========================================================== private static final int CAMERA_WIDTH = 800; private static final int CAMERA_HEIGHT = 480; // =========================================================== // Fields // =========================================================== protected Camera mCamera; protected Scene mMainScene; private ITextureRegion mBackgroundTextureRegion; private float totalTime = 0; // =========================================================== // Constructors // =========================================================== // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== @Override public EngineOptions onCreateEngineOptions() { this.mCamera = new Camera(0, 0, CAMERA_WIDTH, CAMERA_HEIGHT); return new EngineOptions(true, ScreenOrientation.LANDSCAPE_FIXED, new RatioResolutionPolicy(CAMERA_WIDTH, CAMERA_HEIGHT), this.mCamera); } @Override public void onCreateResources() { BitmapTexture backgroundTexture = null; try { backgroundTexture = new BitmapTexture(this.getTextureManager(), new IInputStreamOpener() { @Override public InputStream open() throws IOException { return getResources().openRawResource(R.drawable.universidadvirtual); } }); } catch (IOException e) { e.printStackTrace(); } this.mBackgroundTextureRegion = TextureRegionFactory.extractFromTexture(backgroundTexture); backgroundTexture.load(); } @Override public Scene onCreateScene() { this.mEngine.registerUpdateHandler(new FPSLogger()); this.mMainScene = new Scene(); this.mMainScene.registerUpdateHandler(new IUpdateHandler() { public void reset() { } public void onUpdate(float pSecondsElapsed) { totalTime += pSecondsElapsed; if (totalTime > 3) { SplashActivity.this.startActivity(new Intent(SplashActivity.this, MainMenuActivity.class)); SplashActivity.this.finish(); } } }); /* No background color needed as we have a fullscreen background sprite. */ this.mMainScene.setBackgroundEnabled(false); this.mMainScene.attachChild(new Sprite(0, 0, this.mBackgroundTextureRegion, this.getVertexBufferObjectManager())); return this.mMainScene; } }
apache-2.0
dmssargent/reimagined-enigma-bot
hardware/src/test/java/org/ftccommunity/hardware/ExampleUnitTest.java
318
package org.ftccommunity.hardware; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
toby1984/boolean-algebra
src/main/java/de/codesourcery/booleanalgebra/ParseContext.java
1391
package de.codesourcery.booleanalgebra; import java.util.Set; import de.codesourcery.booleanalgebra.ast.ASTNode; import de.codesourcery.booleanalgebra.ast.Identifier; import de.codesourcery.booleanalgebra.lexer.Lexer; public class ParseContext extends Lexer implements IParseContext { private final IExpressionContext ctx; public ParseContext(IScanner scanner,IExpressionContext ctx) { super(scanner); if ( ctx == null ) { throw new IllegalArgumentException("ctx must not be null"); } this.ctx = ctx; } @Override public ASTNode lookup(Identifier identifier) { return ctx.lookup( identifier ); } @Override public ASTNode tryLookup(Identifier identifier) { return ctx.tryLookup(identifier); } @Override public void set(Identifier name, ASTNode value) { ctx.set( name , value ); } @Override public Set<Identifier> getAllIdentifiers() { return ctx.getAllIdentifiers(); } @Override public void retainOnly(Set<Identifier> ids) { ctx.retainOnly( ids ); } @Override public void remove(Identifier identifier) { ctx.remove( identifier ); } @Override public IExpressionContext createCopy() { return ctx.createCopy(); } @Override public void clear() { ctx.clear(); } @Override public Identifier createIdentifier(ASTNode value) { return ctx.createIdentifier( value ); } }
apache-2.0
sxxlearn2rock/DesignPatterns_JavaMyLife
src/cn/sxx/dp/strategy/MemberStrategy.java
104
package cn.sxx.dp.strategy; public interface MemberStrategy { double calcPrice(double originPrice); }
apache-2.0
MissionCriticalCloud/cosmic-plugin-hypervisor-kvm
src/main/java/com/cloud/hypervisor/kvm/resource/wrapper/LibvirtRequestWrapper.java
1981
package com.cloud.hypervisor.kvm.resource.wrapper; import java.util.Hashtable; import java.util.Set; import com.cloud.agent.api.Answer; import com.cloud.agent.api.Command; import com.cloud.hypervisor.kvm.resource.LibvirtComputingResource; import com.cloud.resource.CommandWrapper; import com.cloud.resource.RequestWrapper; import com.cloud.resource.ServerResource; import org.reflections.Reflections; public class LibvirtRequestWrapper extends RequestWrapper { private static LibvirtRequestWrapper instance; static { instance = new LibvirtRequestWrapper(); } Reflections baseWrappers = new Reflections("com.cloud.hypervisor.kvm.resource.wrapper"); @SuppressWarnings("rawtypes") Set<Class<? extends CommandWrapper>> baseSet = baseWrappers.getSubTypesOf(CommandWrapper.class); private LibvirtRequestWrapper() { init(); } @SuppressWarnings("rawtypes") private void init() { // LibvirtComputingResource commands final Hashtable<Class<? extends Command>, CommandWrapper> libvirtCommands = processAnnotations(baseSet); resources.put(LibvirtComputingResource.class, libvirtCommands); } public static LibvirtRequestWrapper getInstance() { return instance; } @SuppressWarnings({ "rawtypes" }) @Override public Answer execute(final Command command, final ServerResource serverResource) { final Class<? extends ServerResource> resourceClass = serverResource.getClass(); final Hashtable<Class<? extends Command>, CommandWrapper> resourceCommands = retrieveResource(command, resourceClass); CommandWrapper<Command, Answer, ServerResource> commandWrapper = retrieveCommands(command.getClass(), resourceCommands); while (commandWrapper == null) { // Could not find the command in the given resource, will traverse the family tree. commandWrapper = retryWhenAllFails(command, resourceClass, resourceCommands); } return commandWrapper.execute(command, serverResource); } }
apache-2.0
phax/ph-commons
ph-commons/src/main/java/com/helger/commons/url/URLParameterDecoder.java
1510
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.commons.url; import java.nio.charset.Charset; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.helger.commons.ValueEnforcer; import com.helger.commons.codec.IDecoder; /** * Decoder for URL parameters * * @author Philip Helger */ public class URLParameterDecoder implements IDecoder <String, String> { private final Charset m_aCharset; public URLParameterDecoder (@Nonnull final Charset aCharset) { m_aCharset = ValueEnforcer.notNull (aCharset, "Charset"); } /** * @return The charset passed in the constructor. Never <code>null</code>. * @since 9.4.1 */ @Nonnull public final Charset getCharset () { return m_aCharset; } @Nullable public String getDecoded (@Nullable final String sInput) { return URLHelper.urlDecodeOrNull (sInput, m_aCharset); } }
apache-2.0
berinle/jawr-core
src/main/java/net/jawr/web/servlet/JawrServlet.java
2979
/** * Copyright 2007-2012 Jordi Hernández Sellés, Ibrahim Chaehoi * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package net.jawr.web.servlet; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import net.jawr.web.JawrConstant; import org.apache.log4j.Logger; /** * * Main Jawr servlet. Maps logical URLs to script bundles, which are generated on the fly (may * be cached), and served as a single file. * * * @author Jordi Hernández Sellés * @author Ibrahim Chaehoi */ public class JawrServlet extends HttpServlet { /** The serial version UID */ private static final long serialVersionUID = -4551240917172286444L; /** The logger */ private static final Logger LOGGER = Logger.getLogger(JawrServlet.class); /** The request handler */ protected JawrRequestHandler requestHandler; /* (non-Javadoc) * @see javax.servlet.GenericServlet#init() */ public void init() throws ServletException { try { String type = getServletConfig().getInitParameter(JawrConstant.TYPE_INIT_PARAMETER); if(JawrConstant.IMG_TYPE.equals(type)){ requestHandler = new JawrImageRequestHandler(getServletContext(),getServletConfig()); }else{ requestHandler = new JawrRequestHandler(getServletContext(),getServletConfig()); } //getServletConfig().getServletContext(). }catch (ServletException e) { LOGGER.fatal("Jawr servlet with name " + getServletConfig().getServletName() +" failed to initialize properly. "); LOGGER.fatal("Cause:"); LOGGER.fatal(e.getMessage(),e); throw e; }catch (RuntimeException e) { LOGGER.fatal("Jawr servlet with name " + getServletConfig().getServletName() +" failed to initialize properly. "); LOGGER.fatal("Cause: "); LOGGER.fatal(e.getMessage(),e); throw new ServletException(e); } } /* (non-Javadoc) * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { requestHandler.doGet(req, resp); } /* (non-Javadoc) * @see javax.servlet.GenericServlet#destroy() */ public void destroy() { super.destroy(); requestHandler.destroy(); } }
apache-2.0
PinaeOS/simba
src/test/java/org/pinae/simba/context/AopTest.java
704
package org.pinae.simba.context; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.junit.Test; import org.pinae.simba.TestConstant; import org.pinae.simba.aop.pointcut.resource.ITarget; public class AopTest { private ResourceContext bean = null; @Test public void testNameMatcherPointcutAdvisor() { try { bean = new FileSystemResourceContext(TestConstant.TEST_AOP_XML); ITarget target = (ITarget)bean.getBean("NameMatcherPointcutAdvisorTest"); assertEquals(target.sayHello("Hui"), "Hello Hui"); assertEquals(target.sayHello(21), "21 years old"); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } }
apache-2.0
mariusj/org.openntf.domino
domino/externals/guava/src/main/java/com/google/common/collect/AbstractSortedKeySortedSetMultimap.java
1689
/* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import com.google.common.annotations.GwtCompatible; import java.util.Collection; import java.util.SortedMap; import java.util.SortedSet; /** * Basic implementation of a {@link SortedSetMultimap} with a sorted key set. * * This superclass allows {@code TreeMultimap} to override methods to return * navigable set and map types in non-GWT only, while GWT code will inherit the * SortedMap/SortedSet overrides. * * @author Louis Wasserman */ @GwtCompatible abstract class AbstractSortedKeySortedSetMultimap<K, V> extends AbstractSortedSetMultimap<K, V> { AbstractSortedKeySortedSetMultimap(SortedMap<K, Collection<V>> map) { super(map); } @Override public SortedMap<K, Collection<V>> asMap() { return (SortedMap<K, Collection<V>>) super.asMap(); } @Override SortedMap<K, Collection<V>> backingMap() { return (SortedMap<K, Collection<V>>) super.backingMap(); } @Override public SortedSet<K> keySet() { return (SortedSet<K>) super.keySet(); } }
apache-2.0
growbit/turbogwt-http
src/main/java/org/turbogwt/net/http/client/RequestFilter.java
1059
/* * Copyright 2014 Grow Bit * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.turbogwt.net.http.client; /** * An extension interface implemented by request filters. * Request filters are intended to manipulate the request before it is sent to the server. * * @author Danilo Reinert */ public interface RequestFilter { /** * Filter method called before a request has been dispatched to a client transport layer. * * @param request The request to be dispatched. */ void filter(Request request); }
apache-2.0
jwcarman/akka-introduction
java/src/main/java/com/carmanconsulting/akka/voting/msg/VoteCount.java
1167
package com.carmanconsulting.akka.voting.msg; public class VoteCount { //---------------------------------------------------------------------------------------------------------------------- // Fields //---------------------------------------------------------------------------------------------------------------------- private final String candidate; private final int count; //---------------------------------------------------------------------------------------------------------------------- // Constructors //---------------------------------------------------------------------------------------------------------------------- public VoteCount(String candidate, int count) { this.candidate = candidate; this.count = count; } //---------------------------------------------------------------------------------------------------------------------- // Getter/Setter Methods //---------------------------------------------------------------------------------------------------------------------- public String getCandidate() { return candidate; } public int getCount() { return count; } }
apache-2.0
mikeb01/Aeron
aeron-cluster/src/main/java/io/aeron/cluster/LogReplication.java
6131
/* * Copyright 2014-2022 Real Logic Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.cluster; import io.aeron.archive.client.AeronArchive; import io.aeron.archive.codecs.RecordingSignal; import io.aeron.archive.status.RecordingPos; import io.aeron.cluster.client.ClusterException; import io.aeron.exceptions.AeronException; import org.agrona.concurrent.status.CountersReader; import static io.aeron.archive.client.AeronArchive.NULL_POSITION; import static org.agrona.concurrent.status.CountersReader.NULL_COUNTER_ID; final class LogReplication { private final long replicationId; private final long stopPosition; private final long progressCheckTimeoutNs; private final long progressCheckIntervalNs; private int recordingPositionCounterId = NULL_COUNTER_ID; private long recordingId; private long position = NULL_POSITION; private long progressDeadlineNs; private long progressCheckDeadlineNs; private final AeronArchive archive; private RecordingSignal lastRecordingSignal = RecordingSignal.NULL_VAL; private boolean isStopped = false; LogReplication( final AeronArchive archive, final long srcRecordingId, final long dstRecordingId, final long stopPosition, final String srcArchiveChannel, final String replicationChannel, final long progressCheckTimeoutNs, final long progressCheckIntervalNs, final long nowNs) { this.archive = archive; this.stopPosition = stopPosition; this.progressCheckTimeoutNs = progressCheckTimeoutNs; this.progressCheckIntervalNs = progressCheckIntervalNs; this.progressDeadlineNs = nowNs + progressCheckTimeoutNs; this.progressCheckDeadlineNs = nowNs + progressCheckIntervalNs; replicationId = archive.replicate( srcRecordingId, dstRecordingId, stopPosition, archive.context().controlRequestStreamId(), srcArchiveChannel, null, replicationChannel); } boolean isDone(final long nowNs) { if (position == stopPosition && isStopped) { return true; } if (position > stopPosition) { throw new ClusterException("log replication has progressed past stopPosition: " + this); } if (nowNs >= progressCheckDeadlineNs) { progressCheckDeadlineNs = nowNs + progressCheckIntervalNs; if (NULL_COUNTER_ID != recordingPositionCounterId) { final CountersReader counters = archive.context().aeron().countersReader(); final long recordingPosition = counters.getCounterValue(recordingPositionCounterId); if (RecordingPos.isActive(counters, recordingPositionCounterId, recordingId) && recordingPosition > position) { position = recordingPosition; progressDeadlineNs = nowNs + progressCheckTimeoutNs; } } } if (nowNs >= progressDeadlineNs) { if (position < stopPosition) { throw new ClusterException("log replication has not progressed: " + this, AeronException.Category.WARN); } else { throw new ClusterException("log replication failed to stop: " + this); } } return false; } long position() { return position; } long recordingId() { return recordingId; } void close() { if (!isStopped) { try { isStopped = true; archive.tryStopReplication(replicationId); } catch (final Exception ex) { throw new ClusterException("failed to stop log replication", ex, AeronException.Category.WARN); } } } void onSignal(final long correlationId, final long recordingId, final long position, final RecordingSignal signal) { if (correlationId == replicationId) { switch (signal) { case EXTEND: final CountersReader counters = archive.context().aeron().countersReader(); recordingPositionCounterId = RecordingPos.findCounterIdByRecording(counters, recordingId); break; case DELETE: throw new ClusterException("recording was deleted during replication: " + this); case STOP: isStopped = true; break; } this.recordingId = recordingId; this.position = position; this.lastRecordingSignal = signal; } } public String toString() { return "LogReplication{" + "replicationId=" + replicationId + ", recordingPositionCounterId=" + recordingPositionCounterId + ", recordingId=" + recordingId + ", position=" + position + ", stopPosition=" + stopPosition + ", stopped=" + isStopped + ", lastRecordingSignal=" + lastRecordingSignal + ", progressDeadlineNs=" + progressDeadlineNs + ", progressCheckDeadlineNs=" + progressCheckDeadlineNs + ", progressCheckTimeoutNs=" + progressCheckTimeoutNs + ", progressCheckIntervalNs=" + progressCheckIntervalNs + '}'; } }
apache-2.0
txazo/txazo
txazo-application/txazo-blog/src/test/java/org/txazo/blog/module/blog/service/BlogServiceTest.java
791
package org.txazo.blog.module.blog.service; import org.junit.Assert; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.txazo.blog.SpringAbstractTest; import org.txazo.blog.module.blog.bean.Blog; import org.txazo.blog.module.blog.service.BlogService; /** * BlogServiceTest * * @author txazo * @email txazo1218@163.com * @since 10.08.2015 */ public class BlogServiceTest extends SpringAbstractTest { @Autowired private BlogService blogService; @Test public void testAddBlog() { Blog blog = new Blog(); blog.setUserId(1); blog.setCatalogId(2); blog.setTitle("title"); blog.setTags("1,2"); blog.setIsPublic(1); Assert.assertTrue(blogService.addBlog(blog)); } }
apache-2.0
dfish3r/vt-crypt
src/main/java/edu/vt/middleware/crypt/x509/types/GeneralName.java
1977
/* $Id$ Copyright (C) 2003-2013 Virginia Tech. All rights reserved. SEE LICENSE FOR MORE INFORMATION Author: Middleware Services Email: middleware@vt.edu Version: $Revision$ Updated: $Date$ */ package edu.vt.middleware.crypt.x509.types; /** * Representation of the <code>GeneralName</code> type defined in section * 4.2.1.7 of RFC 2459. * * @author Middleware Services * @version $Revision$ */ public class GeneralName { /** Hash code scale factor. */ private static final int HASH_FACTOR = 31; /** Name. */ private String name; /** Type of general name. */ private GeneralNameType type; /** * Creates a new instance with the given name and type. * * @param nameString String representation of name. * @param nameType Type of general name. */ public GeneralName(final String nameString, final GeneralNameType nameType) { if (nameString == null) { throw new IllegalArgumentException("Name cannot be null."); } name = nameString; type = nameType; } /** @return String representation of name. */ public String getName() { return name; } /** @return Type of name. */ public GeneralNameType getType() { return type; } /** @return Value of {@link #getName()}. */ @Override public String toString() { return name; } /** {@inheritDoc} */ @Override public boolean equals(final Object obj) { boolean result; if (obj == this) { result = true; } else if (obj == null || obj.getClass() != getClass()) { result = false; } else { final GeneralName other = (GeneralName) obj; result = other.getName().equals(name) && other.getType().equals(type); } return result; } /** {@inheritDoc} */ @Override public int hashCode() { int hash = getClass().hashCode(); hash = HASH_FACTOR * hash + type.ordinal(); hash = HASH_FACTOR * hash + name.hashCode(); return hash; } }
apache-2.0
harfalm/Sakai-10.1
entitybroker/mocks/src/java/org/sakaiproject/entitybroker/mocks/ActionsExecutionEntityProviderMock.java
4100
/** * $Id: ActionsExecutionEntityProviderMock.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $ * $URL: https://source.sakaiproject.org/svn/entitybroker/tags/sakai-10.1/mocks/src/java/org/sakaiproject/entitybroker/mocks/ActionsExecutionEntityProviderMock.java $ * RESTfulEntityProviderMock.java - entity-broker - Apr 9, 2008 10:31:13 AM - azeckoski ************************************************************************** * Copyright (c) 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.entitybroker.mocks; import java.io.OutputStream; import java.util.Map; import org.sakaiproject.entitybroker.EntityView; import org.sakaiproject.entitybroker.entityprovider.CoreEntityProvider; import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable; import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutionControllable; import org.sakaiproject.entitybroker.entityprovider.capabilities.CRUDable; import org.sakaiproject.entitybroker.entityprovider.capabilities.CollectionResolvable; import org.sakaiproject.entitybroker.entityprovider.capabilities.Resolvable; import org.sakaiproject.entitybroker.entityprovider.extension.ActionReturn; import org.sakaiproject.entitybroker.entityprovider.extension.CustomAction; import org.sakaiproject.entitybroker.entityprovider.extension.EntityData; import org.sakaiproject.entitybroker.mocks.data.MyEntity; /** * Stub class to make it possible to test the {@link ActionsExecutable} capabilities, will perform like the * actual class so it can be reliably used for testing<br/> * Will perform all {@link CRUDable} operations as well as allowing for internal data output processing<br/> * Returns {@link MyEntity} objects<br/> * Allows for testing {@link Resolvable} and {@link CollectionResolvable} as well, returns 2 {@link MyEntity} objects * if no search restrictions, 1 if "stuff" property is set, none if other properties are set * * @author Aaron Zeckoski (aaron@caret.cam.ac.uk) */ public class ActionsExecutionEntityProviderMock extends CRUDableEntityProviderMock implements CoreEntityProvider, ActionsExecutionControllable { public ActionsExecutionEntityProviderMock(String prefix, String[] ids) { super(prefix, ids); } public CustomAction[] defineActions() { return new CustomAction[] { new CustomAction("double", EntityView.VIEW_SHOW), // return the object with the number doubled new CustomAction("xxx", EntityView.VIEW_EDIT), // change all text fields to 3 x's new CustomAction("clear", EntityView.VIEW_LIST) // remove all items }; } public Object executeActions(EntityView entityView, String action, Map<String, Object> actionParams, OutputStream outputStream) { Object result = null; if ("double".equals(action)) { result = myDoubleAction(entityView); } else if ("xxx".equals(action)) { MyEntity me = (MyEntity) getEntity(entityView.getEntityReference()); me.extra = "xxx"; me.setStuff("xxx"); myEntities.put(me.getId(), me); } else if ("clear".equals(action)) { myEntities.clear(); } return result; } private Object myDoubleAction(EntityView view) { MyEntity me = (MyEntity) getEntity(view.getEntityReference()); MyEntity togo = me.copy(); togo.setNumber( togo.getNumber() * 2 ); return new ActionReturn(new EntityData(view.getEntityReference().toString(), togo.getStuff(), togo), (String)null); } }
apache-2.0
pepperonas/AndroidDemos
obd-bt/src/main/java/io/celox/androiddemos/obd_bt/MainActivity.java
945
/* * Copyright (c) 2017 Martin Pfeffer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.celox.androiddemos.obd_bt; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); } }
apache-2.0
ScaCap/spring-auto-restdocs
spring-auto-restdocs-core/src/test/java/capital/scalable/restdocs/payload/JacksonRequestFieldSnippetTest.java
14591
/*- * #%L * Spring Auto REST Docs Core * %% * Copyright (C) 2015 - 2021 Scalable Capital GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package capital.scalable.restdocs.payload; import static capital.scalable.restdocs.SnippetRegistry.AUTO_REQUEST_FIELDS; import static com.fasterxml.jackson.annotation.JsonProperty.Access.READ_ONLY; import static com.fasterxml.jackson.annotation.JsonProperty.Access.WRITE_ONLY; import static com.fasterxml.jackson.annotation.JsonTypeInfo.As.PROPERTY; import static com.fasterxml.jackson.annotation.JsonTypeInfo.Id.NAME; import static java.util.Collections.singletonList; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import javax.validation.constraints.NotBlank; import javax.validation.constraints.Size; import java.util.List; import capital.scalable.restdocs.constraints.ConstraintReader; import capital.scalable.restdocs.jackson.SardObjectMapper; import capital.scalable.restdocs.javadoc.JavadocReader; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.restdocs.AbstractSnippetTests; import org.springframework.restdocs.snippet.SnippetException; import org.springframework.restdocs.templates.TemplateFormat; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.method.HandlerMethod; public class JacksonRequestFieldSnippetTest extends AbstractSnippetTests { private ObjectMapper mapper; private JavadocReader javadocReader; private ConstraintReader constraintReader; @Rule public ExpectedException thrown = ExpectedException.none(); public JacksonRequestFieldSnippetTest(String name, TemplateFormat templateFormat) { super(name, templateFormat); } @Before public void setup() { mapper = new SardObjectMapper(new ObjectMapper()); mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY)); javadocReader = mock(JavadocReader.class); constraintReader = mock(ConstraintReader.class); } @Test public void simpleRequest() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItem", Item.class); mockFieldComment(Item.class, "field1", "A string"); mockFieldComment(Item.class, "field2", "An integer"); mockOptionalMessage(Item.class, "field1", "false"); mockConstraintMessage(Item.class, "field2", "A constraint"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("field1", "String", "false", "A string.") .row("field2", "Integer", "true", "An integer.\n\nA constraint.")); } @Test public void simpleRequestWithEnum() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItemWithWeight", ItemWithWeight.class); mockFieldComment(ItemWithWeight.class, "weight", "An enum"); mockConstraintMessage(ItemWithWeight.class, "weight", "Must be one of [LIGHT, HEAVY]"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("weight", "String", "true", "An enum.\n\nMust be one of [LIGHT, HEAVY].")); } @Test public void noRequestBody() throws Exception { HandlerMethod handlerMethod = new HandlerMethod(new TestResource(), "addItem2"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).isEqualTo("No request body."); } @Test public void noHandlerMethod() throws Exception { new JacksonRequestFieldSnippet().document(operationBuilder .attribute(ObjectMapper.class.getName(), mapper) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).isEqualTo("No request body."); } @Test public void listRequest() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItems", List.class); mockFieldComment(Item.class, "field1", "A string"); mockFieldComment(Item.class, "field2", "An integer"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), mock(ConstraintReader.class)) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("[].field1", "String", "true", "A string.") .row("[].field2", "Integer", "true", "An integer.")); } @Test public void jsonSubTypesRequest() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addSubItem", ParentItem.class); mockFieldComment(ParentItem.class, "type", "A type"); mockFieldComment(ParentItem.class, "commonField", "A common field"); mockFieldComment(SubItem1.class, "subItem1Field", "A sub item 1 field"); mockFieldComment(SubItem2.class, "subItem2Field", "A sub item 2 field"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("type", "String", "true", "A type.") .row("commonField", "String", "true", "A common field.") .row("subItem1Field", "Boolean", "true", "A sub item 1 field.") .row("subItem2Field", "Integer", "true", "A sub item 2 field.")); } @Test public void hasContentWithRequestBodyAnnotation() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItem", Item.class); boolean hasContent = new JacksonRequestFieldSnippet().hasContent(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .build()); assertThat(hasContent).isTrue(); } @Test public void noContent() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItem2"); boolean hasContent = new JacksonRequestFieldSnippet().hasContent(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .build()); assertThat(hasContent).isFalse(); } @Test public void failOnUndocumentedFields() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("addItem", Item.class); thrown.expect(SnippetException.class); thrown.expectMessage("Following request fields were not documented: [field1, field2]"); new JacksonRequestFieldSnippet().failOnUndocumentedFields(true).document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); } @Test public void deprecated() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("removeItem", DeprecatedItem.class); mockFieldComment(DeprecatedItem.class, "index", "item's index"); mockDeprecated(DeprecatedItem.class, "index", "use index2"); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("index", "Integer", "true", "**Deprecated.** Use index2.\n\nItem's index.")); } @Test public void accessors() throws Exception { HandlerMethod handlerMethod = createHandlerMethod("accessors", ReadWriteAccessors.class); new JacksonRequestFieldSnippet().document(operationBuilder .attribute(HandlerMethod.class.getName(), handlerMethod) .attribute(ObjectMapper.class.getName(), mapper) .attribute(JavadocReader.class.getName(), javadocReader) .attribute(ConstraintReader.class.getName(), constraintReader) .build()); assertThat(this.generatedSnippets.snippet(AUTO_REQUEST_FIELDS)).is( tableWithHeader("Path", "Type", "Optional", "Description") .row("writeOnly", "String", "true", "") .row("bothWays", "String", "true", "")); } private void mockConstraintMessage(Class<?> type, String fieldName, String comment) { when(constraintReader.getConstraintMessages(type, fieldName)) .thenReturn(singletonList(comment)); } private void mockOptionalMessage(Class<?> type, String fieldName, String comment) { when(constraintReader.getOptionalMessages(type, fieldName)) .thenReturn(singletonList(comment)); } private void mockFieldComment(Class<?> type, String fieldName, String comment) { when(javadocReader.resolveFieldComment(type, fieldName)) .thenReturn(comment); } private void mockDeprecated(Class<?> type, String fieldName, String comment) { when(javadocReader.resolveFieldTag(type, fieldName, "deprecated")) .thenReturn(comment); } private HandlerMethod createHandlerMethod(String name, Class<?>... parameterTypes) throws NoSuchMethodException { return new HandlerMethod(new TestResource(), name, parameterTypes); } private static class TestResource { public void addItem(@RequestBody Item item) { // NOOP } public void addItemWithWeight(@RequestBody ItemWithWeight item) { // NOOP } public void addItems(@RequestBody List<Item> items) { // NOOP } public void addItem2() { // NOOP } public void addSubItem(@RequestBody ParentItem item) { // NOOP } public void removeItem(@RequestBody DeprecatedItem item) { // NOOP } public void accessors(@RequestBody ReadWriteAccessors accessors) { // NOOP } } private static class ProcessingCommand { private String command; } private static class Item { @NotBlank private String field1; @Size(max = 10) private Integer field2; } private enum Weight { LIGHT, HEAVY } private static class ItemWithWeight { private Weight weight; } private static class DeprecatedItem { @Deprecated private int index; } @JsonTypeInfo(use = NAME, include = PROPERTY, property = "type", visible = true) @JsonSubTypes({ @JsonSubTypes.Type(value = SubItem1.class, name = "1"), @JsonSubTypes.Type(value = SubItem2.class, name = "2") }) private static abstract class ParentItem { private String type; private String commonField; } private static class SubItem1 extends ParentItem { private Boolean subItem1Field; } private static class SubItem2 extends ParentItem { private Integer subItem2Field; } private static class ReadWriteAccessors { @JsonProperty(access = READ_ONLY) private String readOnly; @JsonProperty(access = WRITE_ONLY) private String writeOnly; private String bothWays; } }
apache-2.0
Servoy/wicket
wicket/src/main/java/org/apache/wicket/util/watch/ModificationWatcher.java
5371
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.util.watch; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.wicket.util.listener.ChangeListenerSet; import org.apache.wicket.util.listener.IChangeListener; import org.apache.wicket.util.thread.ICode; import org.apache.wicket.util.thread.Task; import org.apache.wicket.util.time.Duration; import org.apache.wicket.util.time.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Monitors one or more <code>IModifiable</code> objects, calling a {@link IChangeListener * IChangeListener} when a given object's modification time changes. * * @author Jonathan Locke * @since 1.2.6 */ public class ModificationWatcher implements IModificationWatcher { /** logger */ private static final Logger log = LoggerFactory.getLogger(ModificationWatcher.class); /** maps <code>IModifiable</code> objects to <code>Entry</code> objects */ private final Map<IModifiable, Entry> modifiableToEntry = new ConcurrentHashMap<IModifiable, Entry>(); /** the <code>Task</code> to run */ private Task task; /** * Container class for holding modifiable entries to watch. */ private static final class Entry { // The most recent lastModificationTime polled on the object Time lastModifiedTime; // The set of listeners to call when the modifiable changes final ChangeListenerSet listeners = new ChangeListenerSet(); // The modifiable thing IModifiable modifiable; } /** * Default constructor for two-phase construction. */ public ModificationWatcher() { } /** * Constructor that accepts a <code>Duration</code> argument representing the poll frequency. * * @param pollFrequency * how often to check on <code>IModifiable</code>s */ public ModificationWatcher(final Duration pollFrequency) { start(pollFrequency); } /** * @see org.apache.wicket.util.watch.IModificationWatcher#add(org.apache.wicket.util.watch.IModifiable, * org.apache.wicket.util.listener.IChangeListener) */ public final boolean add(final IModifiable modifiable, final IChangeListener listener) { // Look up entry for modifiable final Entry entry = modifiableToEntry.get(modifiable); // Found it? if (entry == null) { Time lastModifiedTime = modifiable.lastModifiedTime(); if (lastModifiedTime != null) { // Construct new entry final Entry newEntry = new Entry(); newEntry.modifiable = modifiable; newEntry.lastModifiedTime = lastModifiedTime; newEntry.listeners.add(listener); // Put in map modifiableToEntry.put(modifiable, newEntry); } else { // The IModifiable is not returning a valid lastModifiedTime log.info("Cannot track modifications to resource " + modifiable); } return true; } else { // Add listener to existing entry return entry.listeners.add(listener); } } /** * @see org.apache.wicket.util.watch.IModificationWatcher#remove(org.apache.wicket.util.watch.IModifiable) */ public IModifiable remove(final IModifiable modifiable) { final Entry entry = modifiableToEntry.remove(modifiable); if (entry != null) { return entry.modifiable; } return null; } /** * @see org.apache.wicket.util.watch.IModificationWatcher#start(org.apache.wicket.util.time.Duration) */ public void start(final Duration pollFrequency) { // Construct task with the given polling frequency task = new Task("ModificationWatcher"); task.run(pollFrequency, new ICode() { public void run(final Logger log) { final Iterator<Entry> itor = modifiableToEntry.values().iterator(); while (itor.hasNext()) { final Entry entry = itor.next(); // If the modifiable has been modified after the last known // modification time final Time modifiableLastModified = entry.modifiable.lastModifiedTime(); if ((modifiableLastModified != null) && modifiableLastModified.after(entry.lastModifiedTime)) { // Notify all listeners that the modifiable was modified entry.listeners.notifyListeners(); // Update timestamp entry.lastModifiedTime = modifiableLastModified; } } } }); } /** * @see org.apache.wicket.util.watch.IModificationWatcher#destroy() */ public void destroy() { if (task != null) { // task.stop(); task.interrupt(); } } /** * @see org.apache.wicket.util.watch.IModificationWatcher#getEntries() */ public final Set<IModifiable> getEntries() { return modifiableToEntry.keySet(); } }
apache-2.0
consulo/consulo-groovy
groovy-psi/src/main/java/org/jetbrains/plugins/groovy/util/ClassInstanceCache.java
835
package org.jetbrains.plugins.groovy.util; import javax.annotation.Nonnull; import java.util.concurrent.ConcurrentHashMap; /** * @author Sergey Evdokimov */ public class ClassInstanceCache { private static final ConcurrentHashMap<String, Object> CACHE = new ConcurrentHashMap<String, Object>(); private ClassInstanceCache() { } @SuppressWarnings("unchecked") public static <T> T getInstance(@Nonnull String className, ClassLoader classLoader) { Object res = CACHE.get(className); if (res == null) { try { res = classLoader.loadClass(className).newInstance(); } catch (Exception e) { throw new RuntimeException(e); } Object oldValue = CACHE.putIfAbsent(className, res); if (oldValue != null) { res = oldValue; } } return (T)res; } }
apache-2.0
saulbein/web3j
core/src/main/java/org/web3j/abi/datatypes/generated/Fixed64x144.java
587
package org.web3j.abi.datatypes.generated; import java.math.BigInteger; import org.web3j.abi.datatypes.Fixed; /** * <p>Auto generated code.<br> * <strong>Do not modifiy!</strong><br> * Please use {@link org.web3j.codegen.AbiTypesGenerator} to update.</p> */ public class Fixed64x144 extends Fixed { public static final Fixed64x144 DEFAULT = new Fixed64x144(BigInteger.ZERO); public Fixed64x144(BigInteger value) { super(64, 144, value); } public Fixed64x144(int mBitSize, int nBitSize, BigInteger m, BigInteger n) { super(64, 144, m, n); } }
apache-2.0
devictr/fst-jit
java/src/main/java/generated/FstComputeTest2.java
2489
package generated; public class FstComputeTest2 { public static float compute(int[] token) { int pos=0; float result=0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 80: result+=2.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 79: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 80: return (pos!=token.length) ? -1 : result; default: return -1; } default: return -1; } case 84: result+=6.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 79: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 80: return (pos!=token.length) ? -1 : result; default: return -1; } default: return -1; } case 83: result+=3.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 84: result+=2.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 79: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 80: return (pos!=token.length) ? -1 : result; default: return -1; } default: return -1; } case 76: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 79: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 84: result+=1.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 72: return (pos!=token.length) ? -1 : result; default: return -1; } case 80: return (pos!=token.length) ? -1 : result; default: return -1; } default: return -1; } default: return -1; } case 77: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 79: if(pos>=token.length) {return -1;} switch(token[pos++]) { case 84: result+=1.0f; if(pos>=token.length) {return -1;} switch(token[pos++]) { case 72: return (pos!=token.length) ? -1 : result; default: return -1; } case 80: return (pos!=token.length) ? -1 : result; default: return -1; } default: return -1; } default: return -1; } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-servicequotas/src/main/java/com/amazonaws/services/servicequotas/AbstractAWSServiceQuotas.java
5397
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.servicequotas; import javax.annotation.Generated; import com.amazonaws.services.servicequotas.model.*; import com.amazonaws.*; /** * Abstract implementation of {@code AWSServiceQuotas}. Convenient method forms pass through to the corresponding * overload that takes a request object, which throws an {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAWSServiceQuotas implements AWSServiceQuotas { protected AbstractAWSServiceQuotas() { } @Override public AssociateServiceQuotaTemplateResult associateServiceQuotaTemplate(AssociateServiceQuotaTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DeleteServiceQuotaIncreaseRequestFromTemplateResult deleteServiceQuotaIncreaseRequestFromTemplate( DeleteServiceQuotaIncreaseRequestFromTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public DisassociateServiceQuotaTemplateResult disassociateServiceQuotaTemplate(DisassociateServiceQuotaTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetAWSDefaultServiceQuotaResult getAWSDefaultServiceQuota(GetAWSDefaultServiceQuotaRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetAssociationForServiceQuotaTemplateResult getAssociationForServiceQuotaTemplate(GetAssociationForServiceQuotaTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetRequestedServiceQuotaChangeResult getRequestedServiceQuotaChange(GetRequestedServiceQuotaChangeRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetServiceQuotaResult getServiceQuota(GetServiceQuotaRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public GetServiceQuotaIncreaseRequestFromTemplateResult getServiceQuotaIncreaseRequestFromTemplate(GetServiceQuotaIncreaseRequestFromTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListAWSDefaultServiceQuotasResult listAWSDefaultServiceQuotas(ListAWSDefaultServiceQuotasRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListRequestedServiceQuotaChangeHistoryResult listRequestedServiceQuotaChangeHistory(ListRequestedServiceQuotaChangeHistoryRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListRequestedServiceQuotaChangeHistoryByQuotaResult listRequestedServiceQuotaChangeHistoryByQuota( ListRequestedServiceQuotaChangeHistoryByQuotaRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListServiceQuotaIncreaseRequestsInTemplateResult listServiceQuotaIncreaseRequestsInTemplate(ListServiceQuotaIncreaseRequestsInTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListServiceQuotasResult listServiceQuotas(ListServiceQuotasRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListServicesResult listServices(ListServicesRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public PutServiceQuotaIncreaseRequestIntoTemplateResult putServiceQuotaIncreaseRequestIntoTemplate(PutServiceQuotaIncreaseRequestIntoTemplateRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public RequestServiceQuotaIncreaseResult requestServiceQuotaIncrease(RequestServiceQuotaIncreaseRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public TagResourceResult tagResource(TagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public UntagResourceResult untagResource(UntagResourceRequest request) { throw new java.lang.UnsupportedOperationException(); } @Override public void shutdown() { throw new java.lang.UnsupportedOperationException(); } @Override public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) { throw new java.lang.UnsupportedOperationException(); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-healthlake/src/main/java/com/amazonaws/services/healthlake/model/S3Configuration.java
5933
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.healthlake.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The configuration of the S3 bucket for either an import or export job. This includes assigning permissions for * access. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/healthlake-2017-07-01/S3Configuration" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class S3Configuration implements Serializable, Cloneable, StructuredPojo { /** * <p> * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. * </p> */ private String s3Uri; /** * <p> * The KMS key ID used to access the S3 bucket. * </p> */ private String kmsKeyId; /** * <p> * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. * </p> * * @param s3Uri * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. */ public void setS3Uri(String s3Uri) { this.s3Uri = s3Uri; } /** * <p> * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. * </p> * * @return The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. */ public String getS3Uri() { return this.s3Uri; } /** * <p> * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. * </p> * * @param s3Uri * The S3Uri is the user specified S3 location of the FHIR data to be imported into Amazon HealthLake. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Configuration withS3Uri(String s3Uri) { setS3Uri(s3Uri); return this; } /** * <p> * The KMS key ID used to access the S3 bucket. * </p> * * @param kmsKeyId * The KMS key ID used to access the S3 bucket. */ public void setKmsKeyId(String kmsKeyId) { this.kmsKeyId = kmsKeyId; } /** * <p> * The KMS key ID used to access the S3 bucket. * </p> * * @return The KMS key ID used to access the S3 bucket. */ public String getKmsKeyId() { return this.kmsKeyId; } /** * <p> * The KMS key ID used to access the S3 bucket. * </p> * * @param kmsKeyId * The KMS key ID used to access the S3 bucket. * @return Returns a reference to this object so that method calls can be chained together. */ public S3Configuration withKmsKeyId(String kmsKeyId) { setKmsKeyId(kmsKeyId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getS3Uri() != null) sb.append("S3Uri: ").append(getS3Uri()).append(","); if (getKmsKeyId() != null) sb.append("KmsKeyId: ").append(getKmsKeyId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof S3Configuration == false) return false; S3Configuration other = (S3Configuration) obj; if (other.getS3Uri() == null ^ this.getS3Uri() == null) return false; if (other.getS3Uri() != null && other.getS3Uri().equals(this.getS3Uri()) == false) return false; if (other.getKmsKeyId() == null ^ this.getKmsKeyId() == null) return false; if (other.getKmsKeyId() != null && other.getKmsKeyId().equals(this.getKmsKeyId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getS3Uri() == null) ? 0 : getS3Uri().hashCode()); hashCode = prime * hashCode + ((getKmsKeyId() == null) ? 0 : getKmsKeyId().hashCode()); return hashCode; } @Override public S3Configuration clone() { try { return (S3Configuration) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.healthlake.model.transform.S3ConfigurationMarshaller.getInstance().marshall(this, protocolMarshaller); } }
apache-2.0
robzor92/hops
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/io/hops/util/RMStorageFactory.java
8338
/* * Copyright (C) 2015 hops.io. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.hops.util; import io.hops.DalDriver; import io.hops.DalNdbEventStreaming; import io.hops.DalStorageFactory; import io.hops.StorageConnector; import io.hops.exception.StorageInitializtionException; import io.hops.metadata.common.EntityDataAccess; import io.hops.metadata.common.entity.ArrayVariable; import io.hops.metadata.common.entity.ByteArrayVariable; import io.hops.metadata.common.entity.IntVariable; import io.hops.metadata.common.entity.LongVariable; import io.hops.metadata.common.entity.StringVariable; import io.hops.metadata.common.entity.Variable; import io.hops.metadata.election.dal.LeDescriptorDataAccess; import io.hops.metadata.election.dal.YarnLeDescriptorDataAccess; import io.hops.metadata.election.entity.LeDescriptor; import io.hops.metadata.hdfs.dal.GroupDataAccess; import io.hops.metadata.hdfs.dal.UserDataAccess; import io.hops.metadata.hdfs.dal.UserGroupDataAccess; import io.hops.metadata.hdfs.dal.VariableDataAccess; import io.hops.security.UsersGroups; import io.hops.transaction.EntityManager; import io.hops.transaction.context.ContextInitializer; import io.hops.transaction.context.EntityContext; import io.hops.transaction.context.LeSnapshot; import io.hops.transaction.context.VariableContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.hadoop.yarn.conf.YarnConfiguration; public class RMStorageFactory { private static final Log LOG = LogFactory.getLog(RMStorageFactory.class); private static boolean isInitialized = false; private static DalStorageFactory dStorageFactory; private static Map<Class, EntityDataAccess> dataAccessAdaptors = new HashMap<Class, EntityDataAccess>(); private static DalNdbEventStreaming dNdbEventStreaming; private static boolean ndbStreaingRunning = false; public static StorageConnector getConnector() { return dStorageFactory.getConnector(); } public static synchronized void kickTheNdbEventStreamingAPI(boolean isLeader, Configuration conf) throws StorageInitializtionException { dNdbEventStreaming = DalDriver.loadHopsNdbEventStreamingLib( YarnAPIStorageFactory.NDB_EVENT_STREAMING_FOR_DISTRIBUTED_SERVICE); String connectionString = dStorageFactory.getConnector().getClusterConnectString() + ":" + conf.getInt(YarnConfiguration.HOPS_NDB_EVENT_STREAMING_DB_PORT, YarnConfiguration.DEFAULT_HOPS_NDB_EVENT_STREAMING_DB_PORT); dNdbEventStreaming.init(connectionString, dStorageFactory.getConnector().getDatabaseName()); dNdbEventStreaming.startHopsNdbEvetAPISession(isLeader); ndbStreaingRunning = true; } public static synchronized void stopTheNdbEventStreamingAPI() { if(ndbStreaingRunning && dNdbEventStreaming!=null){ ndbStreaingRunning = false; dNdbEventStreaming.closeHopsNdbEventAPISession(); } } public static void setConfiguration(Configuration conf) throws StorageInitializtionException, IOException { if (isInitialized) { return; } addToClassPath(conf.get(YarnAPIStorageFactory.DFS_STORAGE_DRIVER_JAR_FILE, YarnAPIStorageFactory.DFS_STORAGE_DRIVER_JAR_FILE_DEFAULT)); dStorageFactory = DalDriver.load( conf.get(YarnAPIStorageFactory.DFS_STORAGE_DRIVER_CLASS, YarnAPIStorageFactory.DFS_STORAGE_DRIVER_CLASS_DEFAULT)); dStorageFactory.setConfiguration(getMetadataClusterConfiguration(conf)); initDataAccessWrappers(); EntityManager.addContextInitializer(getContextInitializer()); if(conf.getBoolean(CommonConfigurationKeys.HOPS_GROUPS_ENABLE, CommonConfigurationKeys .HOPS_GROUPS_ENABLE_DEFAULT)) { UsersGroups.init((UserDataAccess) getDataAccess (UserDataAccess.class), (UserGroupDataAccess) getDataAccess (UserGroupDataAccess.class), (GroupDataAccess) getDataAccess (GroupDataAccess.class), conf.getInt(CommonConfigurationKeys .HOPS_GROUPS_UPDATER_ROUND, CommonConfigurationKeys .HOPS_GROUPS_UPDATER_ROUND_DEFAULT), conf.getInt(CommonConfigurationKeys .HOPS_USERS_LRU_THRESHOLD, CommonConfigurationKeys .HOPS_USERS_LRU_THRESHOLD_DEFAULT)); } isInitialized = true; } public static Properties getMetadataClusterConfiguration(Configuration conf) throws IOException { String configFile = conf.get(YarnAPIStorageFactory.DFS_STORAGE_DRIVER_CONFIG_FILE, YarnAPIStorageFactory.DFS_STORAGE_DRIVER_CONFIG_FILE_DEFAULT); Properties clusterConf = new Properties(); InputStream inStream = StorageConnector.class.getClassLoader(). getResourceAsStream(configFile); clusterConf.load(inStream); return clusterConf; } //[M]: just for testing purposes private static void addToClassPath(String s) throws StorageInitializtionException { try { File f = new File(s); URL u = f.toURI().toURL(); URLClassLoader urlClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Class urlClass = URLClassLoader.class; Method method = urlClass.getDeclaredMethod("addURL", new Class[]{URL.class}); method.setAccessible(true); method.invoke(urlClassLoader, new Object[]{u}); } catch (MalformedURLException ex) { throw new StorageInitializtionException(ex); } catch (IllegalAccessException ex) { throw new StorageInitializtionException(ex); } catch (IllegalArgumentException ex) { throw new StorageInitializtionException(ex); } catch (InvocationTargetException ex) { throw new StorageInitializtionException(ex); } catch (NoSuchMethodException ex) { throw new StorageInitializtionException(ex); } catch (SecurityException ex) { throw new StorageInitializtionException(ex); } } private static void initDataAccessWrappers() { dataAccessAdaptors.clear(); } private static ContextInitializer getContextInitializer() { return new ContextInitializer() { @Override public Map<Class, EntityContext> createEntityContexts() { Map<Class, EntityContext> entityContexts = new HashMap<Class, EntityContext>(); VariableContext variableContext = new VariableContext( (VariableDataAccess) getDataAccess(VariableDataAccess.class)); entityContexts.put(IntVariable.class, variableContext); entityContexts.put(LongVariable.class, variableContext); entityContexts.put(ByteArrayVariable.class, variableContext); entityContexts.put(StringVariable.class, variableContext); entityContexts.put(ArrayVariable.class, variableContext); entityContexts.put(Variable.class, variableContext); entityContexts.put(LeDescriptor.YarnLeDescriptor.class, new LeSnapshot.YarnLESnapshot((LeDescriptorDataAccess) getDataAccess(YarnLeDescriptorDataAccess.class))); return entityContexts; } @Override public StorageConnector getConnector() { return dStorageFactory.getConnector(); } }; } public static EntityDataAccess getDataAccess(Class type) { if (dataAccessAdaptors.containsKey(type)) { return dataAccessAdaptors.get(type); } return dStorageFactory.getDataAccess(type); } }
apache-2.0
vam-google/google-cloud-java
google-api-grpc/proto-google-cloud-automl-v1beta1/src/main/java/com/google/cloud/automl/v1beta1/Operations.java
5054
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/automl/v1beta1/operations.proto package com.google.cloud.automl.v1beta1; public final class Operations { private Operations() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_automl_v1beta1_OperationMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_automl_v1beta1_OperationMetadata_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_automl_v1beta1_CreateModelOperationMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_automl_v1beta1_CreateModelOperationMetadata_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n,google/cloud/automl/v1beta1/operations" + ".proto\022\033google.cloud.automl.v1beta1\032\034goo" + "gle/api/annotations.proto\032\'google/cloud/" + "automl/v1beta1/model.proto\0322google/cloud" + "/automl/v1beta1/model_evaluation.proto\032\033" + "google/protobuf/empty.proto\032\037google/prot" + "obuf/timestamp.proto\032\027google/rpc/status." + "proto\"\243\002\n\021OperationMetadata\022Y\n\024create_mo" + "del_details\030\n \001(\01329.google.cloud.automl." + "v1beta1.CreateModelOperationMetadataH\000\022\030" + "\n\020progress_percent\030\r \001(\005\022,\n\020partial_fail" + "ures\030\002 \003(\0132\022.google.rpc.Status\022/\n\013create" + "_time\030\003 \001(\0132\032.google.protobuf.Timestamp\022" + "/\n\013update_time\030\004 \001(\0132\032.google.protobuf.T" + "imestampB\t\n\007details\"\036\n\034CreateModelOperat" + "ionMetadataB\204\001\n\037com.google.cloud.automl." + "v1beta1P\001ZAgoogle.golang.org/genproto/go" + "ogleapis/cloud/automl/v1beta1;automl\312\002\033G" + "oogle\\Cloud\\AutoMl\\V1beta1b\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.cloud.automl.v1beta1.ModelOuterClass.getDescriptor(), com.google.cloud.automl.v1beta1.ModelEvaluationOuterClass.getDescriptor(), com.google.protobuf.EmptyProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), }, assigner); internal_static_google_cloud_automl_v1beta1_OperationMetadata_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_automl_v1beta1_OperationMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_automl_v1beta1_OperationMetadata_descriptor, new java.lang.String[] { "CreateModelDetails", "ProgressPercent", "PartialFailures", "CreateTime", "UpdateTime", "Details", }); internal_static_google_cloud_automl_v1beta1_CreateModelOperationMetadata_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_automl_v1beta1_CreateModelOperationMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_automl_v1beta1_CreateModelOperationMetadata_descriptor, new java.lang.String[] {}); com.google.api.AnnotationsProto.getDescriptor(); com.google.cloud.automl.v1beta1.ModelOuterClass.getDescriptor(); com.google.cloud.automl.v1beta1.ModelEvaluationOuterClass.getDescriptor(); com.google.protobuf.EmptyProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
TheRingbearer/HAWKS
ode/jca-ra/src/main/java/org/apache/ode/ra/OdeManagedConnectionFactory.java
4387
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.ra; import org.apache.ode.ra.transports.OdeTransport; import org.apache.ode.ra.transports.rmi.RMITransport; import java.io.PrintWriter; import java.rmi.RemoteException; import java.util.Set; import javax.resource.ResourceException; import javax.resource.spi.ConnectionManager; import javax.resource.spi.ConnectionRequestInfo; import javax.resource.spi.ManagedConnection; import javax.resource.spi.ManagedConnectionFactory; import javax.security.auth.Subject; /** * JCA {@link ManagedConnectionFactory} implementation. */ public class OdeManagedConnectionFactory implements ManagedConnectionFactory { private static final long serialVersionUID = 1L; private PrintWriter _logWriter; /** Default connection request information. */ private OdeConnectionRequestInfo _defaultCRI = new OdeConnectionRequestInfo( null, ""); public OdeManagedConnectionFactory() { try { setTransport(RMITransport.class.getName()); } catch (ResourceException re) { // ignore (perhaps we should log) } } public void setTransport(String transportClassName) throws ResourceException { try { Class tclass = Class.forName(transportClassName); _defaultCRI.transport = (OdeTransport) tclass.newInstance(); } catch (IllegalAccessException e) { ResourceException re = new ResourceException( "Class-access error for transport class \"" + transportClassName + "\". ", e); throw re; } catch (InstantiationException e) { ResourceException re = new ResourceException( "Error instantiating transport class \"" + transportClassName + "\". ", e); throw re; } catch (ClassNotFoundException e) { ResourceException re = new ResourceException("Transport class \"" + transportClassName + "\" not found in class path. ", e); throw re; } } public void setURL(String url) throws ResourceException { _defaultCRI.url = url; } public void setProperty(String key, String val) throws ResourceException { if (key.equals("URL")) setURL(val); else if (key.equals("Transport")) setTransport(val); else _defaultCRI.properties.setProperty(key, val); } public Object createConnectionFactory() throws ResourceException { return new OdeConnectionFactoryImpl(this, new OdeConnectionManager()); } public Object createConnectionFactory(ConnectionManager connectionManager) throws ResourceException { return new OdeConnectionFactoryImpl(this, connectionManager); } public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo connectionRequestInfo) throws ResourceException { OdeConnectionRequestInfo cri = (OdeConnectionRequestInfo) (connectionRequestInfo != null ? connectionRequestInfo : _defaultCRI); if (cri.transport == null) throw new ResourceException("No transport."); try { return new OdeManagedConnectionImpl(cri.transport.createPipe( cri.url, cri.properties), subject, connectionRequestInfo); } catch (RemoteException ex) { ResourceException re = new ResourceException( "Unable to create connection: " + ex.getMessage(), ex); throw re; } } public ManagedConnection matchManagedConnections(Set candidates, Subject subject, ConnectionRequestInfo connectionRequestInfo) throws ResourceException { return null; // To change body of implemented methods use File | // Settings | File Templates. } public PrintWriter getLogWriter() throws ResourceException { return _logWriter; } public void setLogWriter(PrintWriter printWriter) throws ResourceException { _logWriter = printWriter; } }
apache-2.0
emite-mx/ef-sdk-java
ef-sdk-java/src/main/java/mx/emite/sdk/ret10/comp/arrendamientofideicomiso/package-info.java
642
@javax.xml.bind.annotation.XmlSchema(namespace = "http://www.sat.gob.mx/esquemas/retencionpago/1" , xmlns = { @XmlNs(prefix = "arrendamientoenfideicomiso", namespaceURI = "http://www.sat.gob.mx/esquemas/retencionpago/1/arrendamientoenfideicomiso"), } , elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED ) @XmlJavaTypeAdapter(value=StringAdapter.class, type=String.class) package mx.emite.sdk.ret10.comp.arrendamientofideicomiso; import javax.xml.bind.annotation.XmlNs; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import mx.emite.sdk.enums.sat.adaptadores.StringAdapter;
apache-2.0
wittyResry/topcoder
TCO 2016 Round 3A/TopologicalOrdering.java
5125
import java.io.*; import java.util.*; public class TopologicalOrdering { static int div; static List<Integer> stack; public int[] construct(int n) { if (stack != null) stack.clear(); stack = null; int v = 0; List<Integer> ans = new ArrayList<Integer>(); List<Integer> connect = new ArrayList<Integer>(); stack = new ArrayList<Integer>(); for (div = 2; div <= n; ++div) { while (n % div == 0) { n /= div; if (!go(1, 1)) { throw new AssertionError(); } if (v > 0) { int conn = connect.get(v - 1); for (int j = 0; j < conn; ++j) { ans.add(v); ans.add(v - 1 - j); } } connect.add(1); ++v; for (int i = stack.size() - 1; i >= 0; --i) { int cur = stack.get(i); for (int j = 0; j < cur - 1; ++j) { int conn = connect.get(v - 1); for (int k = 0; k < conn; ++k) { ans.add(v); ans.add(v - 1 - k); } connect.add(1); ++v; } if (v > cur) { int conn = connect.get(v - cur - 1); for (int j = 0; j < conn; ++j) { ans.add(v); ans.add(v - cur - 1 - j); } } connect.add(2); ++v; } stack.clear(); } } if (v == 0) { v = 1; } if (v > 50 || ans.size() > 200) { throw new AssertionError(); } int res[] = new int[ans.size() + 1]; res[0] = v; for (int i = 1; i < res.length; ++i) { res[i] = ans.get(i - 1); } return res; } static boolean go(int cur, int prev) { if (cur == div) { return true; } if (cur > div) { return false; } for (int i = 1; i <= 3; ++i) { if (go(i * cur + prev, cur)) { stack.add(i); return true; } } return false; } // CUT begin public static void main(String[] args){ System.err.println("TopologicalOrdering (1000 Points)"); System.err.println(); HashSet<Integer> cases = new HashSet<Integer>(); for (int i = 0; i < args.length; ++i) cases.add(Integer.parseInt(args[i])); runTest(cases); } static void runTest(HashSet<Integer> caseSet) { int cases = 0, passed = 0; while (true) { String label = Reader.nextLine(); if (label == null || !label.startsWith("--")) break; int n = Integer.parseInt(Reader.nextLine()); Reader.nextLine(); int[] __answer = new int[Integer.parseInt(Reader.nextLine())]; for (int i = 0; i < __answer.length; ++i) __answer[i] = Integer.parseInt(Reader.nextLine()); cases++; if (caseSet.size() > 0 && !caseSet.contains(cases - 1)) continue; System.err.print(String.format(" Testcase #%d ... ", cases - 1)); if (doTest(n, __answer)) passed++; } if (caseSet.size() > 0) cases = caseSet.size(); System.err.println(String.format("%nPassed : %d/%d cases", passed, cases)); int T = (int)(System.currentTimeMillis() / 1000) - 1471012559; double PT = T / 60.0, TT = 75.0; System.err.println(String.format("Time : %d minutes %d secs%nScore : %.2f points", T / 60, T % 60, 1000 * (0.3 + (0.7 * TT * TT) / (10.0 * PT * PT + TT * TT)))); } static boolean doTest(int n, int[] __expected) { long startTime = System.currentTimeMillis(); Throwable exception = null; TopologicalOrdering instance = new TopologicalOrdering(); int[] __result = new int[0]; try { __result = instance.construct(n); } catch (Throwable e) { exception = e; } double elapsed = (System.currentTimeMillis() - startTime) / 1000.0; if (exception != null) { System.err.println("RUNTIME ERROR!"); exception.printStackTrace(); return false; } else if (equals(__result, __expected)) { System.err.println("PASSED! " + String.format("(%.2f seconds)", elapsed)); return true; } else { System.err.println("FAILED! " + String.format("(%.2f seconds)", elapsed)); System.err.println(" Expected: " + toString(__expected)); System.err.println(" Received: " + toString(__result)); return false; } } static boolean equals(int[] a, int[] b) { if (a.length != b.length) return false; for (int i = 0; i < a.length; ++i) if (a[i] != b[i]) return false; return true; } static String toString(int[] arr) { StringBuffer sb = new StringBuffer(); sb.append("[ "); for (int i = 0; i < arr.length; ++i) { if (i > 0) sb.append(", "); sb.append(arr[i]); } return sb.toString() + " ]"; } static class Reader { private static final String dataFileName = "/Users/resry/topcoder/TCO 2016 Round 3A/TopologicalOrdering.sample"; private static BufferedReader reader; public static String nextLine() { try { if (reader == null) { reader = new BufferedReader(new InputStreamReader(new FileInputStream(dataFileName))); } return reader.readLine(); } catch (IOException e) { System.err.println("FATAL!! IOException"); e.printStackTrace(); System.exit(1); } return ""; } } // CUT end }
apache-2.0
realityforge/arez
processor/src/test/fixtures/bad_input/com/example/observe/ObserveParametersModel.java
210
package com.example.observe; import arez.annotations.ArezComponent; import arez.annotations.Observe; @ArezComponent public abstract class ObserveParametersModel { @Observe void doStuff( int i ) { } }
apache-2.0
JeffreyDeYoung/CassandraCurator
src/main/java/com/github/cassandracurator/PlaceHolder.java
162
package com.github.cassandracurator; /** * Only used to keep netbeans from acting weird. TODO: Delete. * @author jeffrey */ public class PlaceHolder { }
apache-2.0
seeburger-ag/commons-vfs
commons-vfs2/src/main/java/org/apache/commons/vfs2/cache/LRUFilesCache.java
7243
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.cache; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.collections4.map.AbstractLinkedMap; import org.apache.commons.collections4.map.LRUMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.VfsLog; import org.apache.commons.vfs2.util.Messages; /** * This implementation caches every file using {@link LRUMap}. * <p> * The default constructor uses a LRU size of 100 per file system. * </p> */ public class LRUFilesCache extends AbstractFilesCache { /** The default LRU size */ private static final int DEFAULT_LRU_SIZE = 100; /** The logger to use. */ private static final Log log = LogFactory.getLog(LRUFilesCache.class); /** The FileSystem cache */ private final ConcurrentMap<FileSystem, Map<FileName, FileObject>> filesystemCache = new ConcurrentHashMap<>(10); /** The size of the cache */ private final int lruSize; private final ReadWriteLock rwLock = new ReentrantReadWriteLock(); private final Lock readLock = rwLock.readLock(); private final Lock writeLock = rwLock.writeLock(); /** * The file cache */ private class MyLRUMap extends LRUMap<FileName, FileObject> { /** * serialVersionUID format is YYYYMMDD for the date of the last binary change. */ private static final long serialVersionUID = 20101208L; /** The FileSystem */ private final FileSystem filesystem; public MyLRUMap(final FileSystem filesystem, final int size) { super(size, true); this.filesystem = filesystem; } @Override protected boolean removeLRU(final AbstractLinkedMap.LinkEntry<FileName, FileObject> linkEntry) { synchronized (LRUFilesCache.this) { final FileObject file = linkEntry.getValue(); // System.err.println(">>> " + size() + " check removeLRU:" + linkEntry.getKey().toString()); if (file.isAttached() || file.isContentOpen()) { // do not allow open or attached files to be removed // System.err.println(">>> " + size() + " VETO removeLRU:" + // linkEntry.getKey().toString() + " (" + file.isAttached() + "/" + // file.isContentOpen() + ")"); return false; } // System.err.println(">>> " + size() + " removeLRU:" + linkEntry.getKey().toString()); if (super.removeLRU(linkEntry)) { try { // force detach file.close(); } catch (final FileSystemException e) { VfsLog.warn(getLogger(), log, Messages.getString("vfs.impl/LRUFilesCache-remove-ex.warn"), e); } final Map<?, ?> files = filesystemCache.get(filesystem); if (files.size() < 1) { filesystemCache.remove(filesystem); } return true; } return false; } } } /** * Default constructor. Uses a LRU size of 100 per file system. */ public LRUFilesCache() { this(DEFAULT_LRU_SIZE); } /** * Set the desired LRU size. * * @param lruSize the LRU size */ public LRUFilesCache(final int lruSize) { this.lruSize = lruSize; } @Override public void putFile(final FileObject file) { final Map<FileName, FileObject> files = getOrCreateFilesystemCache(file.getFileSystem()); writeLock.lock(); try { files.put(file.getName(), file); } finally { writeLock.unlock(); } } @Override public boolean putFileIfAbsent(final FileObject file) { final Map<FileName, FileObject> files = getOrCreateFilesystemCache(file.getFileSystem()); writeLock.lock(); try { final FileName name = file.getName(); if (files.containsKey(name)) { return false; } files.put(name, file); return true; } finally { writeLock.unlock(); } } @Override public FileObject getFile(final FileSystem filesystem, final FileName name) { final Map<FileName, FileObject> files = getOrCreateFilesystemCache(filesystem); readLock.lock(); try { return files.get(name); } finally { readLock.unlock(); } } @Override public void clear(final FileSystem filesystem) { final Map<FileName, FileObject> files = getOrCreateFilesystemCache(filesystem); writeLock.lock(); try { files.clear(); filesystemCache.remove(filesystem); } finally { writeLock.unlock(); } } protected Map<FileName, FileObject> getOrCreateFilesystemCache(final FileSystem filesystem) { Map<FileName, FileObject> files = filesystemCache.get(filesystem); if (files == null) { files = new MyLRUMap(filesystem, lruSize); filesystemCache.putIfAbsent(filesystem, files); } return files; } @Override public void close() { super.close(); filesystemCache.clear(); } @Override public void removeFile(final FileSystem filesystem, final FileName name) { final Map<?, ?> files = getOrCreateFilesystemCache(filesystem); writeLock.lock(); try { files.remove(name); if (files.size() < 1) { filesystemCache.remove(filesystem); } } finally { writeLock.unlock(); } } @Override public void touchFile(final FileObject file) { // this moves the file back on top getFile(file.getFileSystem(), file.getName()); } }
apache-2.0
Gericop/TalkR
TalkR Java/src/com/takisoft/talkr/data/Category.java
2580
package com.takisoft.talkr.data; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.neo4j.graphdb.Node; /** * * @author Gericop */ public class Category implements Comparable<Category> { private String title; private ArrayList<Category> linkedCategories; public Category(Node node) { title = (String) node.getProperty(DetailConstants.PROP_KEY_OBJECT_ID); } private Category(String title) { this(title, false); } private Category(String title, boolean removeExtra) { if (removeExtra) { this.title = title.substring(DetailConstants.CAT_GENERAL_PREFIX_SUB.length()).trim(); } else { this.title = title; } } public static Category getCategories(PageData data) { Category category = new Category(data.getTitle(), true); Pattern pattern = Pattern.compile(DetailConstants.CAT_GENERAL_PREFIX, Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE | Pattern.MULTILINE); Matcher matcher = pattern.matcher(data.getText().trim()); while (matcher.find()) { String raw = matcher.group(); category.addLinkedCategory(new Category(raw.substring(raw.indexOf("[[") + 2, raw.indexOf("]]")), true)); } return category; } public static ArrayList<Category> getCategoriesForWord(String text) { ArrayList<Category> categories = new ArrayList<>(); Pattern pattern = Pattern.compile(DetailConstants.CAT_GENERAL_PREFIX, Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE | Pattern.MULTILINE); Matcher matcher = pattern.matcher(text.trim()); while (matcher.find()) { String raw = matcher.group(); categories.add(new Category(raw.substring(raw.indexOf("[[") + 2, raw.indexOf("]]")), true)); } return categories; } public String getTitle() { return title; } private void setTitle(String title) { this.title = title; } public ArrayList<Category> getLinkedCategories() { return linkedCategories; } private void addLinkedCategory(Category category) { if (linkedCategories == null) { linkedCategories = new ArrayList<>(); } linkedCategories.add(category); } @Override public int compareTo(Category o) { if (o == this) { return 0; } if (o == null) { return 1; } return this.getTitle().compareToIgnoreCase(o.getTitle()); } }
apache-2.0
naver/pinpoint
web/src/main/java/com/navercorp/pinpoint/web/websocket/ActiveThreadCountWorker.java
12246
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.websocket; import com.navercorp.pinpoint.rpc.packet.stream.StreamClosePacket; import com.navercorp.pinpoint.rpc.packet.stream.StreamCode; import com.navercorp.pinpoint.rpc.packet.stream.StreamResponsePacket; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannel; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelEventHandler; import com.navercorp.pinpoint.rpc.stream.StreamChannel; import com.navercorp.pinpoint.rpc.stream.StreamChannelStateCode; import com.navercorp.pinpoint.rpc.stream.StreamException; import com.navercorp.pinpoint.thrift.dto.command.TCmdActiveThreadCount; import com.navercorp.pinpoint.thrift.dto.command.TCommandTransferResponse; import com.navercorp.pinpoint.thrift.dto.command.TRouteResult; import com.navercorp.pinpoint.web.service.AgentService; import com.navercorp.pinpoint.web.vo.AgentActiveThreadCount; import com.navercorp.pinpoint.web.vo.AgentActiveThreadCountFactory; import com.navercorp.pinpoint.web.vo.AgentInfo; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Objects; /** * @author Taejin Koo */ public class ActiveThreadCountWorker implements PinpointWebSocketHandlerWorker { private static final TCmdActiveThreadCount COMMAND_INSTANCE = new TCmdActiveThreadCount(); private static final ActiveThreadCountErrorType INTERNAL_ERROR = ActiveThreadCountErrorType.PINPOINT_INTERNAL_ERROR; private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final Object lock = new Object(); private final AgentService agentService; private final String applicationName; private final String agentId; private final PinpointWebSocketResponseAggregator responseAggregator; private final WorkerActiveManager workerActiveManager; private final AgentActiveThreadCountFactory failResponseFactory; private volatile AgentActiveThreadCount defaultFailResponse; private final EventHandler eventHandler = new EventHandler(); private volatile boolean started = false; private volatile boolean active = false; private volatile boolean stopped = false; private StreamChannel streamChannel; public ActiveThreadCountWorker(AgentService agentService, AgentInfo agentInfo, PinpointWebSocketResponseAggregator webSocketResponseAggregator, WorkerActiveManager workerActiveManager) { this(agentService, agentInfo.getApplicationName(), agentInfo.getAgentId(), webSocketResponseAggregator, workerActiveManager); } public ActiveThreadCountWorker(AgentService agentService, String applicationName, String agentId, PinpointWebSocketResponseAggregator webSocketResponseAggregator, WorkerActiveManager workerActiveManager) { this.agentService = Objects.requireNonNull(agentService, "agentService"); this.applicationName = Objects.requireNonNull(applicationName, "applicationName"); this.agentId = Objects.requireNonNull(agentId, "agentId"); this.responseAggregator = Objects.requireNonNull(webSocketResponseAggregator, "responseAggregator"); this.workerActiveManager = Objects.requireNonNull(workerActiveManager, "workerActiveManager"); AgentActiveThreadCountFactory failResponseFactory = new AgentActiveThreadCountFactory(); failResponseFactory.setAgentId(agentId); this.failResponseFactory = failResponseFactory; this.defaultFailResponse = failResponseFactory.createFail(INTERNAL_ERROR.getMessage()); } @Override public StreamChannel connect(AgentInfo agentInfo) { if (!applicationName.equals(agentInfo.getApplicationName())) { return null; } if (!agentId.equals(agentInfo.getAgentId())) { return null; } synchronized (lock) { if (!started) { started = true; logger.info("ActiveThreadCountWorker start. applicationName:{}, agentId:{}", applicationName, agentId); StreamChannel streamChannel = null; try { streamChannel = connect0(agentInfo); return streamChannel; } catch (StreamException streamException) { if (streamChannel != null) { streamChannel.close(streamException.getStreamCode()); } StreamCode streamCode = streamException.getStreamCode(); if (streamCode == StreamCode.CONNECTION_NOT_FOUND) { workerActiveManager.addReactiveWorker(agentInfo); } setDefaultErrorMessage(streamCode.name()); } catch (TException exception) { if (streamChannel != null) { streamChannel.close(StreamCode.TYPE_UNKNOWN); } setDefaultErrorMessage(TRouteResult.NOT_SUPPORTED_REQUEST.name()); } } } return null; } @Override public void active(StreamChannel streamChannel, long waitTimeout) { synchronized (lock) { if (started) { if (streamChannel != null) { this.active = active0(streamChannel, waitTimeout); } else { workerActiveManager.addReactiveWorker(applicationName, agentId); } } } } @Override public boolean reactive(AgentInfo agentInfo) { synchronized (lock) { if (isTurnOn()) { if (active) { return true; } logger.info("ActiveThreadCountWorker reactive. applicationName:{}, agentId:{}", applicationName, agentId); active = active0(agentInfo); return active; } } return false; } @Override public void stop() { synchronized (lock) { if (isTurnOn()) { stopped = true; logger.info("ActiveThreadCountWorker stop. applicationName:{}, agentId:{}, streamChannel:{}", applicationName, agentId, streamChannel); try { closeStreamChannel(); } catch (Exception ignored) { } return; } } } private boolean active0(AgentInfo agentInfo) { synchronized (lock) { StreamChannel streamChannel = null; try { streamChannel = connect0(agentInfo); return active0(streamChannel, 3000); } catch (StreamException streamException) { if (streamChannel != null) { streamChannel.close(streamException.getStreamCode()); } StreamCode streamCode = streamException.getStreamCode(); if (streamCode == StreamCode.CONNECTION_NOT_FOUND) { workerActiveManager.addReactiveWorker(agentInfo); } setDefaultErrorMessage(streamCode.name()); } catch (TException exception) { if (streamChannel != null) { streamChannel.close(StreamCode.TYPE_UNKNOWN); } setDefaultErrorMessage(TRouteResult.NOT_SUPPORTED_REQUEST.name()); } return false; } } private boolean active0(StreamChannel streamChannel, long timeout) { synchronized (lock) { boolean connected = streamChannel.awaitOpen(timeout); if (connected) { this.streamChannel = streamChannel; setDefaultErrorMessage(TRouteResult.TIMEOUT.name()); return true; } else { streamChannel.close(StreamCode.CONNECTION_TIMEOUT); return false; } } } private StreamChannel connect0(AgentInfo agentInfo) throws TException, StreamException { return agentService.openStream(agentInfo, COMMAND_INSTANCE, eventHandler); } private boolean isTurnOn() { if (started && !stopped) { return true; } else { return false; } } private void closeStreamChannel() { if (streamChannel != null) { streamChannel.close(); } setDefaultErrorMessage(StreamCode.STATE_CLOSED.name()); } private void setDefaultErrorMessage(String message) { ActiveThreadCountErrorType errorType = ActiveThreadCountErrorType.getType(message); AgentActiveThreadCount failResponse = failResponseFactory.createFail(errorType.getCode(), errorType.getMessage()); defaultFailResponse = failResponse; } public String getAgentId() { return agentId; } public AgentActiveThreadCount getDefaultFailResponse() { return defaultFailResponse; } private class EventHandler extends ClientStreamChannelEventHandler { @Override public void handleStreamResponsePacket(ClientStreamChannel streamChannel, StreamResponsePacket packet) { if (logger.isDebugEnabled()) { logger.debug("handleStreamResponsePacket() streamChannel:{}, packet:{}", streamChannel, packet); } TBase<?, ?> response = agentService.deserializeResponse(packet.getPayload(), null); AgentActiveThreadCount activeThreadCount = getAgentActiveThreadCount(response); responseAggregator.response(activeThreadCount); } @Override public void handleStreamClosePacket(ClientStreamChannel streamChannel, StreamClosePacket packet) { if (logger.isDebugEnabled()) { logger.debug("handleStreamClosePacket() streamChannel:{}, packet:{}", streamChannel, packet); } setDefaultErrorMessage(StreamCode.STATE_CLOSED.name()); } @Override public void stateUpdated(ClientStreamChannel streamChannel, StreamChannelStateCode updatedStateCode) { if (logger.isDebugEnabled()) { logger.debug("stateUpdated() streamChannel:{}, stateCode:{}", streamChannel, updatedStateCode); } switch (updatedStateCode) { case CLOSED: case ILLEGAL_STATE: if (isTurnOn()) { active = false; workerActiveManager.addReactiveWorker(agentId); setDefaultErrorMessage(StreamCode.STATE_CLOSED.name()); } break; } } private AgentActiveThreadCount getAgentActiveThreadCount(TBase<?, ?> routeResponse) { if (routeResponse instanceof TCommandTransferResponse) { byte[] payload = ((TCommandTransferResponse) routeResponse).getPayload(); TBase<?, ?> activeThreadCountResponse = agentService.deserializeResponse(payload, null); AgentActiveThreadCountFactory factory = new AgentActiveThreadCountFactory(); factory.setAgentId(agentId); return factory.create(activeThreadCountResponse); } else { logger.warn("getAgentActiveThreadCount failed. applicationName:{}, agentId:{}", applicationName, agentId); AgentActiveThreadCountFactory factory = new AgentActiveThreadCountFactory(); factory.setAgentId(agentId); return factory.createFail(INTERNAL_ERROR.getMessage()); } } } }
apache-2.0
spohnan/geowave
analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFunctionRegistry.java
2016
/** * Copyright (c) 2013-2019 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import java.io.Serializable; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.types.DataTypes; import org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeomFunctionRegistry implements Serializable { private static final long serialVersionUID = -1729498500215830962L; private static final Logger LOGGER = LoggerFactory.getLogger(GeomFunctionRegistry.class); private static GeomDistance geomDistanceInstance = new GeomDistance(); private static GeomFromWKT geomWKTInstance = new GeomFromWKT(); public static void registerGeometryFunctions(final SparkSession spark) { // Distance UDF is only exception to GeomFunction interface since it // returns Double spark.udf().register("GeomDistance", geomDistanceInstance, DataTypes.DoubleType); spark.udf().register("GeomFromWKT", geomWKTInstance, GeoWaveSpatialEncoders.geometryUDT); // Register all UDF functions from RegistrySPI final UDFNameAndConstructor[] supportedUDFs = UDFRegistrySPI.getSupportedUDFs(); for (int iUDF = 0; iUDF < supportedUDFs.length; iUDF += 1) { final UDFNameAndConstructor udf = supportedUDFs[iUDF]; final GeomFunction funcInstance = udf.getPredicateConstructor().get(); spark.udf().register(funcInstance.getRegisterName(), funcInstance, DataTypes.BooleanType); } } }
apache-2.0
DieBauer/flink
flink-core/src/main/java/org/apache/flink/core/memory/ByteArrayInputStreamWithPos.java
2757
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.core.memory; import org.apache.flink.annotation.Internal; import org.apache.flink.util.Preconditions; import java.io.IOException; import java.io.InputStream; /** * Un-synchronized stream similar to Java's ByteArrayInputStream that also exposes the current position. */ @Internal public class ByteArrayInputStreamWithPos extends InputStream { protected byte[] buffer; protected int position; protected int count; protected int mark = 0; public ByteArrayInputStreamWithPos(byte[] buffer) { this(buffer, 0, buffer.length); } public ByteArrayInputStreamWithPos(byte[] buffer, int offset, int length) { this.position = offset; this.buffer = buffer; this.mark = offset; this.count = Math.min(buffer.length, offset + length); } @Override public int read() { return (position < count) ? 0xFF & (buffer[position++]) : -1; } @Override public int read(byte[] b, int off, int len) { Preconditions.checkNotNull(b); if (off < 0 || len < 0 || len > b.length - off) { throw new IndexOutOfBoundsException(); } if (position >= count) { return -1; // signal EOF } int available = count - position; if (len > available) { len = available; } if (len <= 0) { return 0; } System.arraycopy(buffer, position, b, off, len); position += len; return len; } @Override public long skip(long toSkip) { long remain = count - position; if (toSkip < remain) { remain = toSkip < 0 ? 0 : toSkip; } position += remain; return remain; } @Override public boolean markSupported() { return true; } @Override public void mark(int readAheadLimit) { mark = position; } @Override public void reset() { position = mark; } @Override public int available() { return count - position; } @Override public void close() throws IOException { } public int getPosition() { return position; } public void setPos(int pos) { this.position = pos; } }
apache-2.0
martenscs/optaplanner-osgi
org.optaplanner.examples.projectjobscheduling/src/org/optaplanner/examples/projectjobscheduling/domain/solver/PredecessorsDoneDateUpdatingVariableListener.java
2597
package org.optaplanner.examples.projectjobscheduling.domain.solver; import java.util.ArrayDeque; import java.util.Queue; import org.apache.commons.lang.ObjectUtils; import org.optaplanner.core.impl.domain.variable.listener.VariableListener; import org.optaplanner.core.impl.score.director.ScoreDirector; import org.optaplanner.examples.projectjobscheduling.domain.Allocation; public class PredecessorsDoneDateUpdatingVariableListener implements VariableListener<Allocation> { public void beforeEntityAdded(ScoreDirector scoreDirector, Allocation allocation) { // Do nothing } public void afterEntityAdded(ScoreDirector scoreDirector, Allocation allocation) { updateAllocation(scoreDirector, allocation); } public void beforeVariableChanged(ScoreDirector scoreDirector, Allocation allocation) { // Do nothing } public void afterVariableChanged(ScoreDirector scoreDirector, Allocation allocation) { updateAllocation(scoreDirector, allocation); } public void beforeEntityRemoved(ScoreDirector scoreDirector, Allocation allocation) { // Do nothing } public void afterEntityRemoved(ScoreDirector scoreDirector, Allocation allocation) { // Do nothing } protected void updateAllocation(ScoreDirector scoreDirector, Allocation originalAllocation) { Queue<Allocation> uncheckedSuccessorQueue = new ArrayDeque<Allocation>(); uncheckedSuccessorQueue.addAll(originalAllocation .getSuccessorAllocationList()); while (!uncheckedSuccessorQueue.isEmpty()) { Allocation allocation = uncheckedSuccessorQueue.remove(); boolean updated = updatePredecessorsDoneDate(scoreDirector, allocation); if (updated) { uncheckedSuccessorQueue.addAll(allocation .getSuccessorAllocationList()); } } } /** * @param scoreDirector * never null * @param allocation * never null * @return true if the startDate changed */ protected boolean updatePredecessorsDoneDate(ScoreDirector scoreDirector, Allocation allocation) { // For the source the doneDate must be 0. Integer doneDate = 0; for (Allocation predecessorAllocation : allocation .getPredecessorAllocationList()) { int endDate = predecessorAllocation.getEndDate(); doneDate = Math.max(doneDate, endDate); } if (ObjectUtils.equals(doneDate, allocation.getPredecessorsDoneDate())) { return false; } scoreDirector.beforeVariableChanged(allocation, "predecessorsDoneDate"); allocation.setPredecessorsDoneDate(doneDate); scoreDirector.afterVariableChanged(allocation, "predecessorsDoneDate"); return true; } }
apache-2.0
Skullabs/kikaha
kikaha-maven-plugin/source/kikaha/mojo/generator/Config.java
454
package kikaha.mojo.generator; import static java.util.Arrays.asList; import java.io.File; import java.util.List; public class Config { public List<File> sourceDir = asList( file( "source" ), file( "src/main/java" ) ); public List<File> outputDir = asList( file( "target" ), file( "output" ) ); public List<File> classOutputDir = outputDir; public List<File> classPath; private static File file( String path ) { return new File( path ); } }
apache-2.0
kaqqao/csvorpal
src/main/java/kaqqao/csvorpal/deserialization/Deserializer.java
419
package kaqqao.csvorpal.deserialization; /** * A deserializer capable of converting a String value into objects of generic type * Created by bojan.tomic on 9/4/16. */ public interface Deserializer<T> { /** * Deserialize a string into an object of type {@code T} * @param value The string value to deserialize * @return Object instance deserialized from the given string */ T deserialize(String value); }
apache-2.0
mchristensson/schematool-viz
src/main/java/org/mac/schematool/viz/model/def/TextElement.java
409
package org.mac.schematool.viz.model.def; import org.codehaus.jackson.annotate.JsonProperty; import org.mac.schematool.viz.model.Renderable; import org.mac.schematool.viz.model.TextRenderable; /** * @author mac * */ public class TextElement implements Renderable{ @JsonProperty(value = "Text") private TextRenderable graphic; public void setText(TextRenderable text) { this.graphic = text; } }
apache-2.0
Veeson/easydict
app/src/main/java/com/veeson/easydict/common/capsulation/DownloadManager.java
2779
package com.veeson.easydict.common.capsulation; import android.content.Context; import android.widget.Toast; import com.veeson.easydict.AppConstants; import com.veeson.easydict.network.Network; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import okhttp3.ResponseBody; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; /** * 下载管理类 * Created by Wilson on 2016/6/11. */ public class DownloadManager { /** * 下载每日一句英文发音文件 * * @param date * @return */ public static void downloadDailyVoice(final Context context, final String date) { Network.getDailyVoiceApi().downloadDailyVoice("http://news.iciba.com/admin/tts/" + date + "-day.mp3").enqueue(new Callback<ResponseBody>() { @Override public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) { Toast.makeText(context, "正在获取发音...", Toast.LENGTH_SHORT).show(); FileOutputStream fos = null; InputStream is = null; try { String file_path = AppConstants.APP_VOICE_DIR; File dir = new File(file_path); if (!dir.exists()) { dir.mkdirs(); } File file = new File(dir, date + ".mp3"); fos = new FileOutputStream(file); is = response.body().byteStream(); byte[] buffer = new byte[1024]; int len1 = 0; while ((len1 = is.read(buffer)) != -1) { fos.write(buffer, 0, len1); } PlayAudioManager.playAudio(context, AppConstants.APP_VOICE_DIR + "/" + date + ".mp3"); } catch (Exception ex) { ex.printStackTrace(); } finally { if (fos != null) { try { fos.close(); } catch (IOException e) { e.printStackTrace(); } } if (is != null) { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } } } @Override public void onFailure(Call<ResponseBody> call, Throwable t) { Toast.makeText(context, "获取发音失败,请检查网络设置", Toast.LENGTH_SHORT).show(); } }); } }
apache-2.0
joewalnes/idea-community
platform/lang-impl/src/com/intellij/refactoring/changeSignature/DefaultValueChooser.java
3141
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.changeSignature; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.MultiLineLabelUI; import com.intellij.refactoring.util.RadioUpDownListener; import com.intellij.ui.EditorTextField; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * User: anna * Date: Sep 13, 2010 */ public class DefaultValueChooser extends DialogWrapper{ private JRadioButton myLeaveBlankRadioButton; private JRadioButton myFeelLuckyRadioButton; private JLabel myFeelLuckyDescription; private JRadioButton myUseValueRadioButton; private EditorTextField myValueEditor; private JPanel myWholePanel; private JLabel myBlankDescription; public DefaultValueChooser(Project project, String name, String defaultValue) { super(project); new RadioUpDownListener(myLeaveBlankRadioButton, myFeelLuckyRadioButton, myUseValueRadioButton); final ActionListener actionListener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myValueEditor.setEnabled(myUseValueRadioButton.isSelected()); if (myUseValueRadioButton.isSelected()) { myValueEditor.selectAll(); myValueEditor.requestFocus(); } } }; myLeaveBlankRadioButton.addActionListener(actionListener); myFeelLuckyRadioButton.addActionListener(actionListener); myUseValueRadioButton.addActionListener(actionListener); setTitle("Default value for parameter \"" + name + "\" needed"); myLeaveBlankRadioButton.setSelected(true); myValueEditor.setEnabled(false); myFeelLuckyDescription.setText("In method call place variable of the same type would be searched.\n" + "When exactly one is found - it would be used.\n" + "Blank place would be used otherwise"); myFeelLuckyDescription.setUI(new MultiLineLabelUI()); myBlankDescription.setUI(new MultiLineLabelUI()); myValueEditor.setText(defaultValue); init(); } public boolean feelLucky() { return myFeelLuckyRadioButton.isSelected(); } public String getDefaultValue() { if (myLeaveBlankRadioButton.isSelected()) { return ""; } return myValueEditor.getText(); } @Override public JComponent getPreferredFocusedComponent() { return myLeaveBlankRadioButton; } @Override protected JComponent createCenterPanel() { return myWholePanel; } }
apache-2.0
nedap/archie
src/main/java/com/nedap/archie/serializer/adl/constraints/CDateTimeSerializer.java
1162
/* * ADL2-core * Copyright (c) 2013-2014 Marand d.o.o. (www.marand.com) * * This file is part of ADL2-core. * * ADL2-core is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.nedap.archie.serializer.adl.constraints; import com.nedap.archie.aom.primitives.CDateTime; import com.nedap.archie.serializer.adl.ADLDefinitionSerializer; /** * @author Marko Pipan */ public class CDateTimeSerializer extends CTemporalSerializer<CDateTime> { public CDateTimeSerializer(ADLDefinitionSerializer serializer) { super(serializer); } }
apache-2.0
currying/molecule
molecule/src/main/java/com/toparchy/molecule/push/baidu/model/QueryMsgStatusRequest.java
2185
package com.toparchy.molecule.push.baidu.model; import com.toparchy.molecule.push.baidu.constants.BaiduPushConstants; import com.toparchy.molecule.push.baidu.core.annotation.HttpParamKeyName; import com.toparchy.molecule.push.baidu.core.annotation.R; import net.sf.json.JSONArray; public class QueryMsgStatusRequest extends PushRequest{ // 当查询单个消息的状态时,传入消息的String 类型的id; // 当查询批量消息的状态时,需要传入String类型的消息Id的数组。 @HttpParamKeyName(name=BaiduPushConstants.MSG_ID, param=R.REQUIRE) private String msgId = null; // @HttpParamKeyName(name=BaiduPushConstants.STATISTIC_TYPE, param=R.OPTIONAL) // private String statisticType = null; // get public String getMsgIdInString () { return msgId; } public String[] getMsgIdInArray () { JSONArray jsonMsgIds = JSONArray.fromObject(msgId); return (String[])jsonMsgIds.toArray(); } // public String getStatType() { // return statisticType; // } // set public void setMsgId (String msgId) { this.msgId = msgId; } public void setMsgIds (String[] msgIds) { JSONArray tmpMsgIds = new JSONArray(); for (int i = 0; i < msgIds.length; i++){ tmpMsgIds.add(i, msgIds[i]); } this.msgId = tmpMsgIds.toString(); } // public void setStatType(String statisticType) { // this.statisticType = statisticType; // } // add public QueryMsgStatusRequest addMsgId (String msgId) { this.msgId = msgId; return this; } public QueryMsgStatusRequest addMsgIds (String[] msgIds) { JSONArray tmpMsgIds = new JSONArray(); for (int i = 0; i < msgIds.length; i++){ tmpMsgIds.add(i, msgIds[i]); } this.msgId = tmpMsgIds.toString(); return this; } // public QueryMsgStatusRequest addStatType (String statisticType) { // this.statisticType = statisticType; // return this; // } public QueryMsgStatusRequest addDeviceType (Integer deviceType) { this.deviceType = deviceType; return this; } public QueryMsgStatusRequest addExpires(Long requestTimeOut) { this.expires = requestTimeOut; return this; } }
apache-2.0
caskdata/cdap
cdap-app-fabric/src/main/java/co/cask/cdap/internal/app/runtime/DefaultTaskLocalizationContext.java
1777
/* * Copyright © 2015 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.internal.app.runtime; import co.cask.cdap.api.TaskLocalizationContext; import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.FileNotFoundException; import java.io.Serializable; import java.util.Map; /** * An implementation of {@link TaskLocalizationContext} that can be initialized with some localized resources. */ public class DefaultTaskLocalizationContext implements TaskLocalizationContext, Serializable { private final Map<String, File> localizedResources; public DefaultTaskLocalizationContext(Map<String, File> localizedResources) { this.localizedResources = ImmutableMap.copyOf(localizedResources); } @Override public File getLocalFile(String name) throws FileNotFoundException { if (!localizedResources.containsKey(name)) { throw new FileNotFoundException(String.format("The specified file %s was not found. Please make sure it was " + "localized using context.localize().", name)); } return localizedResources.get(name); } @Override public Map<String, File> getAllLocalFiles() { return localizedResources; } }
apache-2.0
zpao/buck
src/com/facebook/buck/android/exopackage/ExopackageInstaller.java
15345
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android.exopackage; import com.facebook.buck.android.AdbHelper; import com.facebook.buck.android.ApkInfo; import com.facebook.buck.android.agent.util.AgentUtil; import com.facebook.buck.core.build.execution.context.ExecutionContext; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.event.PerfEventId; import com.facebook.buck.event.SimplePerfEvent; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.util.NamedTemporaryFile; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import com.google.common.io.Closer; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.annotation.Nullable; /** ExopackageInstaller manages the installation of apps with the "exopackage" flag set to true. */ public class ExopackageInstaller { private static final Logger LOG = Logger.get(ExopackageInstaller.class); public static final Path EXOPACKAGE_INSTALL_ROOT = Paths.get("/data/local/tmp/exopackage/"); public static final String SECONDARY_DEX_TYPE = "secondary_dex"; public static final String NATIVE_LIBRARY_TYPE = "native_library"; public static final String RESOURCES_TYPE = "resources"; private final ProjectFilesystem projectFilesystem; private final BuckEventBus eventBus; private final SourcePathResolverAdapter pathResolver; private final AndroidDevice device; private final String packageName; private final Path dataRoot; public ExopackageInstaller( SourcePathResolverAdapter pathResolver, ExecutionContext context, ProjectFilesystem projectFilesystem, String packageName, AndroidDevice device) { this.pathResolver = pathResolver; this.projectFilesystem = projectFilesystem; this.eventBus = context.getBuckEventBus(); this.device = device; this.packageName = packageName; this.dataRoot = EXOPACKAGE_INSTALL_ROOT.resolve(packageName); Preconditions.checkArgument(AdbHelper.PACKAGE_NAME_PATTERN.matcher(packageName).matches()); } /** @return Returns true. */ // TODO(cjhopman): This return value is silly. Change it to be void. public boolean doInstall(ApkInfo apkInfo, @Nullable String processName) throws Exception { if (exopackageEnabled(apkInfo)) { device.mkDirP(dataRoot.toString()); ImmutableSortedSet<Path> presentFiles = device.listDirRecursive(dataRoot); ExopackageInfo exoInfo = apkInfo.getExopackageInfo().get(); installMissingExopackageFiles(presentFiles, exoInfo); finishExoFileInstallation(presentFiles, exoInfo); } installApkIfNecessary(apkInfo); killApp(apkInfo, processName); return true; } public void killApp(ApkInfo apkInfo, @Nullable String processName) throws Exception { // TODO(dreiss): Make this work on Gingerbread. try (SimplePerfEvent.Scope ignored = SimplePerfEvent.scope(eventBus, "kill_app")) { // If a specific process name is given and we're not installing a full APK, // just kill that process, otherwise kill everything in the package if (shouldAppBeInstalled(apkInfo) || processName == null) { device.stopPackage(packageName); } else { device.killProcess(processName); } } } public void installApkIfNecessary(ApkInfo apkInfo) throws Exception { File apk = pathResolver.getAbsolutePath(apkInfo.getApkPath()).toFile(); // TODO(dreiss): Support SD installation. boolean installViaSd = false; if (shouldAppBeInstalled(apkInfo)) { try (SimplePerfEvent.Scope ignored = SimplePerfEvent.scope(eventBus, "install_exo_apk")) { boolean success = device.installApkOnDevice(apk, installViaSd, false); if (!success) { throw new RuntimeException("Installing Apk failed."); } } } } public void finishExoFileInstallation( ImmutableSortedSet<Path> presentFiles, ExopackageInfo exoInfo) throws Exception { ImmutableSet.Builder<Path> wantedPaths = ImmutableSet.builder(); ImmutableMap.Builder<Path, String> metadata = ImmutableMap.builder(); if (exoInfo.getDexInfo().isPresent()) { DexExoHelper dexExoHelper = new DexExoHelper(pathResolver, projectFilesystem, exoInfo.getDexInfo().get()); wantedPaths.addAll(dexExoHelper.getFilesToInstall().keySet()); metadata.putAll(dexExoHelper.getMetadataToInstall()); } if (exoInfo.getNativeLibsInfo().isPresent()) { NativeExoHelper nativeExoHelper = new NativeExoHelper( () -> { try { return device.getDeviceAbis(); } catch (Exception e) { throw new HumanReadableException("Unable to communicate with device", e); } }, pathResolver, projectFilesystem, exoInfo.getNativeLibsInfo().get()); wantedPaths.addAll(nativeExoHelper.getFilesToInstall().keySet()); metadata.putAll(nativeExoHelper.getMetadataToInstall()); } if (exoInfo.getResourcesInfo().isPresent()) { ResourcesExoHelper resourcesExoHelper = new ResourcesExoHelper(pathResolver, projectFilesystem, exoInfo.getResourcesInfo().get()); wantedPaths.addAll(resourcesExoHelper.getFilesToInstall().keySet()); metadata.putAll(resourcesExoHelper.getMetadataToInstall()); } if (exoInfo.getModuleInfo().isPresent()) { ModuleExoHelper moduleExoHelper = new ModuleExoHelper(pathResolver, projectFilesystem, exoInfo.getModuleInfo().get()); wantedPaths.addAll(moduleExoHelper.getFilesToInstall().keySet()); metadata.putAll(moduleExoHelper.getMetadataToInstall()); } deleteUnwantedFiles(presentFiles, wantedPaths.build()); installMetadata(metadata.build()); } public void installMissingExopackageFiles( ImmutableSortedSet<Path> presentFiles, ExopackageInfo exoInfo) throws Exception { if (exoInfo.getDexInfo().isPresent()) { DexExoHelper dexExoHelper = new DexExoHelper(pathResolver, projectFilesystem, exoInfo.getDexInfo().get()); installMissingFiles(presentFiles, dexExoHelper.getFilesToInstall(), SECONDARY_DEX_TYPE); } if (exoInfo.getNativeLibsInfo().isPresent()) { NativeExoHelper nativeExoHelper = new NativeExoHelper( () -> { try { return device.getDeviceAbis(); } catch (Exception e) { throw new HumanReadableException("Unable to communicate with device", e); } }, pathResolver, projectFilesystem, exoInfo.getNativeLibsInfo().get()); installMissingFiles(presentFiles, nativeExoHelper.getFilesToInstall(), NATIVE_LIBRARY_TYPE); } if (exoInfo.getResourcesInfo().isPresent()) { ResourcesExoHelper resourcesExoHelper = new ResourcesExoHelper(pathResolver, projectFilesystem, exoInfo.getResourcesInfo().get()); installMissingFiles(presentFiles, resourcesExoHelper.getFilesToInstall(), RESOURCES_TYPE); } if (exoInfo.getModuleInfo().isPresent()) { ModuleExoHelper moduleExoHelper = new ModuleExoHelper(pathResolver, projectFilesystem, exoInfo.getModuleInfo().get()); installMissingFiles(presentFiles, moduleExoHelper.getFilesToInstall(), "modular_dex"); } } /** * @param apkInfo the apk info to examine for exopackage items * @return true if the given apk info contains any items which need to be installed via exopackage */ public static boolean exopackageEnabled(ApkInfo apkInfo) { return apkInfo .getExopackageInfo() .map( exoInfo -> exoInfo.getDexInfo().isPresent() || exoInfo.getNativeLibsInfo().isPresent() || exoInfo.getResourcesInfo().isPresent() || exoInfo.getModuleInfo().isPresent()) .orElse(false); } private Optional<PackageInfo> getPackageInfo(String packageName) throws Exception { try (SimplePerfEvent.Scope ignored = SimplePerfEvent.scope( eventBus, PerfEventId.of("get_package_info"), "package", packageName)) { return device.getPackageInfo(packageName); } } private boolean shouldAppBeInstalled(ApkInfo apkInfo) throws Exception { Optional<PackageInfo> appPackageInfo = getPackageInfo(packageName); if (!appPackageInfo.isPresent()) { eventBus.post(ConsoleEvent.info("App not installed. Installing now.")); return true; } LOG.debug("App path: %s", appPackageInfo.get().apkPath); String installedAppSignature = getInstalledAppSignature(appPackageInfo.get().apkPath); String localAppSignature = AgentUtil.getJarSignature(pathResolver.getAbsolutePath(apkInfo.getApkPath()).toString()); LOG.debug("Local app signature: %s", localAppSignature); LOG.debug("Remote app signature: %s", installedAppSignature); if (!installedAppSignature.equals(localAppSignature)) { LOG.debug("App signatures do not match. Must re-install."); return true; } LOG.debug("App signatures match. No need to install."); return false; } private String getInstalledAppSignature(String packagePath) throws Exception { try (SimplePerfEvent.Scope ignored = SimplePerfEvent.scope(eventBus, "get_app_signature")) { String output = device.getSignature(packagePath); String result = output.trim(); if (result.contains("\n") || result.contains("\r")) { throw new IllegalStateException("Unexpected return from get-signature:\n" + output); } return result; } } public void installMissingFiles( ImmutableSortedSet<Path> presentFiles, ImmutableMap<Path, Path> wantedFilesToInstall, String filesType) throws Exception { ImmutableSortedMap<Path, Path> filesToInstall = wantedFilesToInstall.entrySet().stream() .filter(entry -> !presentFiles.contains(entry.getKey())) .collect( ImmutableSortedMap.toImmutableSortedMap( Ordering.natural(), Map.Entry::getKey, Map.Entry::getValue)); installFiles(filesType, filesToInstall); } private void deleteUnwantedFiles( ImmutableSortedSet<Path> presentFiles, ImmutableSet<Path> wantedFiles) { ImmutableSortedSet<Path> filesToDelete = presentFiles.stream() .filter(p -> !p.getFileName().toString().equals("lock") && !wantedFiles.contains(p)) .collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural())); deleteFiles(filesToDelete); } private void deleteFiles(ImmutableSortedSet<Path> filesToDelete) { filesToDelete.stream() .collect( ImmutableListMultimap.toImmutableListMultimap( path -> dataRoot.resolve(path).getParent(), path -> path.getFileName().toString())) .asMap() .forEach( (dir, files) -> { device.rmFiles(dir.toString(), files); }); } private void installFiles(String filesType, ImmutableMap<Path, Path> filesToInstall) throws Exception { try (SimplePerfEvent.Scope ignored = SimplePerfEvent.scope(eventBus, "multi_install_" + filesType); AutoCloseable ignored1 = device.createForward()) { // Make sure all the directories exist. filesToInstall.keySet().stream() .map(p -> dataRoot.resolve(p).getParent()) .distinct() .forEach( p -> { try { device.mkDirP(p.toString()); } catch (Exception e) { throw new RuntimeException(e); } }); // Plan the installation. Map<Path, Path> installPaths = filesToInstall.entrySet().stream() .collect( Collectors.toMap( entry -> dataRoot.resolve(entry.getKey()), entry -> projectFilesystem.resolve(entry.getValue()))); // Install the files. device.installFiles(filesType, installPaths); } } private void installMetadata(ImmutableMap<Path, String> metadataToInstall) throws Exception { try (Closer closer = Closer.create()) { Map<Path, Path> filesToInstall = new HashMap<>(); for (Map.Entry<Path, String> entry : metadataToInstall.entrySet()) { NamedTemporaryFile temp = closer.register(new NamedTemporaryFile("metadata", "tmp")); com.google.common.io.Files.write( entry.getValue().getBytes(Charsets.UTF_8), temp.get().toFile()); filesToInstall.put(entry.getKey(), temp.get()); } installFiles("metadata", ImmutableMap.copyOf(filesToInstall)); } } /** * Parses a text file which is supposed to be in the following format: "file_path_without_spaces * file_hash ...." i.e. it parses the first two columns of each line and ignores the rest of it. * * @return A multi map from the file hash to its path, which equals the raw path resolved against * {@code resolvePathAgainst}. */ @VisibleForTesting public static ImmutableMultimap<String, Path> parseExopackageInfoMetadata( Path metadataTxt, Path resolvePathAgainst, ProjectFilesystem filesystem) throws IOException { ImmutableMultimap.Builder<String, Path> builder = ImmutableMultimap.builder(); for (String line : filesystem.readLines(metadataTxt)) { // ignore lines that start with '.' if (line.startsWith(".")) { continue; } List<String> parts = Splitter.on(' ').splitToList(line); if (parts.size() < 2) { throw new RuntimeException("Illegal line in metadata file: " + line); } builder.put(parts.get(1), resolvePathAgainst.resolve(parts.get(0))); } return builder.build(); } }
apache-2.0
mprobst/closure-compiler
test/com/google/javascript/jscomp/NodeTraversalTest.java
27612
/* * Copyright 2007 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import static com.google.javascript.jscomp.CompilerTestCase.lines; import static com.google.javascript.rhino.testing.NodeSubject.assertNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.NodeTraversal.AbstractNodeTypePruningCallback; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallbackInterface; import com.google.javascript.jscomp.NodeTraversal.ChangeScopeRootCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.TestCase; /** * Tests for {@link NodeTraversal}. */ public final class NodeTraversalTest extends TestCase { public void testPruningCallbackShouldTraverse1() { PruningCallback include = new PruningCallback(ImmutableSet.of(Token.SCRIPT, Token.VAR), true); Node script = new Node(Token.SCRIPT); assertTrue(include.shouldTraverse(null, script, null)); assertTrue(include.shouldTraverse(null, new Node(Token.VAR), null)); assertFalse(include.shouldTraverse(null, new Node(Token.NAME), null)); assertFalse(include.shouldTraverse(null, new Node(Token.ADD), null)); } public void testPruningCallbackShouldTraverse2() { PruningCallback include = new PruningCallback(ImmutableSet.of(Token.SCRIPT, Token.VAR), false); Node script = new Node(Token.SCRIPT); assertFalse(include.shouldTraverse(null, script, null)); assertFalse(include.shouldTraverse(null, new Node(Token.VAR), null)); assertTrue(include.shouldTraverse(null, new Node(Token.NAME), null)); assertTrue(include.shouldTraverse(null, new Node(Token.ADD), null)); } /** * Concrete implementation of AbstractPrunedCallback to test the * AbstractNodeTypePruningCallback shouldTraverse method. */ static class PruningCallback extends AbstractNodeTypePruningCallback { public PruningCallback(Set<Token> nodeTypes, boolean include) { super(nodeTypes, include); } @Override public void visit(NodeTraversal t, Node n, Node parent) { throw new UnsupportedOperationException(); } } public void testReport() { final List<JSError> errors = new ArrayList<>(); Compiler compiler = new Compiler(new BasicErrorManager() { @Override public void report(CheckLevel level, JSError error) { errors.add(error); } @Override public void println(CheckLevel level, JSError error) { } @Override protected void printSummary() { } }); compiler.initCompilerOptionsIfTesting(); NodeTraversal t = new NodeTraversal(compiler, null, new Es6SyntacticScopeCreator(compiler)); DiagnosticType dt = DiagnosticType.warning("FOO", "{0}, {1} - {2}"); t.report(new Node(Token.EMPTY), dt, "Foo", "Bar", "Hello"); assertThat(errors).hasSize(1); assertEquals("Foo, Bar - Hello", errors.get(0).description); } private static final String TEST_EXCEPTION = "test me"; public void testUnexpectedException() { AbstractPostOrderCallbackInterface cb = (NodeTraversal t, Node n, Node parent) -> { throw new RuntimeException(TEST_EXCEPTION); }; Compiler compiler = new Compiler(); try { String code = "function foo() {}"; Node tree = parse(compiler, code); NodeTraversal.traversePostOrder(compiler, tree, cb); fail("Expected RuntimeException"); } catch (RuntimeException e) { assertThat(e) .hasMessageThat() .startsWith("INTERNAL COMPILER ERROR.\nPlease report this problem.\n\ntest me"); } } public void testGetScopeRoot() { Compiler compiler = new Compiler(); String code = lines( "var a;", "function foo() {", " var b", "}"); Node tree = parse(compiler, code); NodeTraversal.traverse( compiler, tree, new NodeTraversal.ScopedCallback() { @Override public void enterScope(NodeTraversal t) { Node root1 = t.getScopeRoot(); Scope scope2 = t.getScope(); Node root2 = scope2.getRootNode(); assertNode(root2).isEqualTo(root1); } @Override public void exitScope(NodeTraversal t) {} @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) {} }); } public void testGetHoistScopeRoot() { Compiler compiler = new Compiler(); String code = lines( "function foo() {", " if (true) { var XXX; }", "}"); Node tree = parse(compiler, code); NodeTraversal.traverse(compiler, tree, new NodeTraversal.Callback() { @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName() && n.getString().equals("XXX")) { Node root = t.getClosestHoistScopeRoot(); assertThat(NodeUtil.isFunctionBlock(root)).isTrue(); t.getScope(); // force scope creation root = t.getClosestHoistScopeRoot(); assertThat(NodeUtil.isFunctionBlock(root)).isTrue(); } } } ); } private static class NameChangingCallback implements NodeTraversal.Callback { @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName() && n.getString().equals("change")) { n.setString("xx"); t.reportCodeChange(); } } } public void testReportChange1() { String code = lines( "var change;", "function foo() {", " var b", "}"); assertChangesRecorded(code, new NameChangingCallback()); } public void testReportChange2() { String code = lines( "var a;", "function foo() {", " var change", "}"); assertChangesRecorded(code, new NameChangingCallback()); } public void testReportChange3() { String code = lines( "var a;", "function foo() {", " var b", "}", "var change"); assertChangesRecorded(code, new NameChangingCallback()); } public void testReportChange4() { String code = lines( "function foo() {", " function bar() {", " var change", " }", "}"); assertChangesRecorded(code, new NameChangingCallback()); } private void assertChangesRecorded(String code, NodeTraversal.Callback callback) { final String externs = ""; Compiler compiler = new Compiler(); Node tree = parseRoots(compiler, externs, code); ChangeVerifier changeVerifier = new ChangeVerifier(compiler).snapshot(tree); NodeTraversal.traverseRoots( compiler, callback, tree.getFirstChild(), tree.getSecondChild()); changeVerifier.checkRecordedChanges(tree); } public void testGetLineNoAndGetCharno() { Compiler compiler = new Compiler(); String code = "" + "var a; \n" + "function foo() {\n" + " var b;\n" + " if (a) { var c;}\n" + "}"; Node tree = parse(compiler, code); final StringBuilder builder = new StringBuilder(); NodeTraversal.traverse(compiler, tree, new NodeTraversal.ScopedCallback() { @Override public void enterScope(NodeTraversal t) { } @Override public void exitScope(NodeTraversal t) { } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public void visit(NodeTraversal t, Node n, Node parent) { builder.append("visit "); builder.append(t.getCurrentNode().toString(false, true, true)); builder.append(" @"); builder.append(t.getLineNumber()); builder.append(":"); builder.append(t.getCharno()); builder.append("\n"); } } ); // Note the char numbers are 0-indexed but the line numbers are 1-indexed. String expectedResult = lines( "visit NAME a [source_file: [testcode]] @1:4", "visit VAR [source_file: [testcode]] @1:0", "visit NAME foo [source_file: [testcode]] @2:9", "visit PARAM_LIST [source_file: [testcode]] @2:12", "visit NAME b [source_file: [testcode]] @3:6", "visit VAR [source_file: [testcode]] @3:2", "visit NAME a [source_file: [testcode]] @4:6", "visit NAME c [source_file: [testcode]] @4:15", "visit VAR [source_file: [testcode]] @4:11", "visit BLOCK [source_file: [testcode]] @4:9", "visit IF [source_file: [testcode]] @4:2", "visit BLOCK [source_file: [testcode]] @2:15", "visit FUNCTION foo [source_file: [testcode]] @2:0", "visit SCRIPT [source_file: [testcode]]" + " [input_id: InputId: [testcode]]" + " [feature_set: []] @1:0\n"); assertEquals(expectedResult, builder.toString()); } public void testGetCurrentNode() { Compiler compiler = new Compiler(); ScopeCreator creator = SyntacticScopeCreator.makeUntyped(compiler); ExpectNodeOnEnterScope callback = new ExpectNodeOnEnterScope(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); String code = lines( "var a;", "function foo() {", " var b;", "}"); Node tree = parse(compiler, code); Scope topScope = (Scope) creator.createScope(tree, null); // Calling #traverseWithScope uses the given scope but starts traversal at // the given node. callback.expect(tree.getFirstChild(), tree); t.traverseWithScope(tree.getFirstChild(), topScope); callback.assertEntered(); // Calling #traverse creates a new scope with the given node as the root. callback.expect(tree.getFirstChild(), tree.getFirstChild()); t.traverse(tree.getFirstChild()); callback.assertEntered(); // Calling #traverseAtScope starts traversal from the scope's root. Node fn = tree.getSecondChild(); Scope fnScope = (Scope) creator.createScope(fn, topScope); callback.expect(fn, fn); t.traverseAtScope(fnScope); callback.assertEntered(); } public void testTraverseAtScopeWithBlockScope() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT_NEXT); compiler.initOptions(options); Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler); ExpectNodeOnEnterScope callback = new ExpectNodeOnEnterScope(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); String code = lines( "function foo() {", " if (bar) {", " let x;", " }", "}"); Node tree = parse(compiler, code); Scope topScope = creator.createScope(tree, null); Node innerBlock = tree // script .getFirstChild() // function .getLastChild() // function body .getFirstChild() // if .getLastChild(); // block Scope blockScope = creator.createScope(innerBlock, topScope); callback.expect(innerBlock, innerBlock); t.traverseAtScope(blockScope); callback.assertEntered(); } public void testTraverseAtScopeWithForScope() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT_2015); compiler.initOptions(options); Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler); ExpectNodeOnEnterScope callback = new ExpectNodeOnEnterScope(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); String code = lines( "function foo() {", " var b = [0];", " for (let a of b) {", " let x;", " }", "}"); Node tree = parse(compiler, code); Scope topScope = creator.createScope(tree, null); Node forNode = tree // script .getFirstChild() // function .getLastChild() // function body .getSecondChild(); // for (first child is var b) Node innerBlock = forNode.getLastChild(); Scope forScope = creator.createScope(forNode, topScope); creator.createScope(innerBlock, forScope); callback.expect(forNode, forNode); t.traverseAtScope(forScope); callback.assertEntered(); } public void testTraverseAtScopeWithSwitchScope() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT_2015); compiler.initOptions(options); Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler); ExpectNodeOnEnterScope callback = new ExpectNodeOnEnterScope(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); String code = lines( "function foo() {", " var b = [0];", " switch(b) {", " case 1:", " return b;", " case 2:", " }", "}"); Node tree = parse(compiler, code); Scope topScope = creator.createScope(tree, null); Node innerBlock = tree // script .getFirstChild() // function .getLastChild() // function body .getSecondChild(); // switch (first child is var b) Scope blockScope = creator.createScope(innerBlock, topScope); callback.expect(innerBlock, innerBlock); t.traverseAtScope(blockScope); callback.assertEntered(); } public void testTraverseAtScopeWithModuleScope() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT_NEXT); compiler.initOptions(options); Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler); ExpectNodeOnEnterScope callback = new ExpectNodeOnEnterScope(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); String code = lines( "goog.module('example.module');", "", "var x;"); Node tree = parse(compiler, code); Scope globalScope = creator.createScope(tree, null); Node moduleBody = tree.getFirstChild(); Scope moduleScope = creator.createScope(moduleBody, globalScope); callback.expect(moduleBody, moduleBody); t.traverseAtScope(moduleScope); callback.assertEntered(); } public void testGetVarAccessible() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguageIn(LanguageMode.ECMASCRIPT_2015); compiler.initOptions(options); Es6SyntacticScopeCreator creator = new Es6SyntacticScopeCreator(compiler); AccessibleCallback callback = new AccessibleCallback(); NodeTraversal t = new NodeTraversal(compiler, callback, creator); // variables are hoisted to their enclosing scope String code = lines( "var varDefinedInScript;", "var foo = function(param) {", " var varDefinedInFoo;", " var baz = function() {", " var varDefinedInBaz;", " }", "}", "var bar = function() {", " var varDefinedInBar;", "}"); // the function scope should have access to all variables defined before and in the function // scope Node tree = parse(compiler, code); Node fooNode = tree // script .getSecondChild() // var foo declaration (first child is var varDefinedInScript) .getFirstFirstChild(); // child of the var foo declaration is the foo function Scope topScope = creator.createScope(tree, null); Scope fooScope = creator.createScope(fooNode, topScope); callback.expect(4); t.traverseAtScope(fooScope); callback.assertAccessible(fooScope); // the function block scope should have access to all variables defined in the global, function, // and function block scopes Node fooBlockNode = fooNode.getLastChild(); Scope fooBlockScope = creator.createScope(fooBlockNode, fooScope); callback.expect(6); t.traverseAtScope(fooBlockScope); callback.assertAccessible(fooBlockScope); // let and const variables are block scoped code = lines( "var foo = function() {", " var varDefinedInFoo;", " var baz = function() {", " var varDefinedInBaz;", " let varDefinedInFoo;", // shadows parent scope " }", " let bar = 1;", "}"); // the baz block scope has access to variables in its scope and parent scopes tree = parse(compiler, code); fooNode = tree // script .getFirstChild()// var foo declaration (first child is var varDefinedInScript) .getFirstFirstChild(); // child of the var foo declaration is the foo function fooBlockNode = fooNode.getLastChild(); // first child is param list of foo Node bazNode = fooBlockNode.getSecondChild().getFirstFirstChild(); Node bazBlockNode = bazNode.getLastChild(); topScope = creator.createScope(tree, null); fooScope = creator.createScope(fooNode, topScope); fooBlockScope = creator.createScope(fooBlockNode, fooScope); Scope bazScope = creator.createScope(bazNode, fooBlockScope); Scope bazBlockScope = creator.createScope(bazBlockNode, bazScope); // bar, baz, foo, varDefinedInFoo(in baz function), varDefinedInBaz callback.expect(5); t.traverseAtScope(bazBlockScope); callback.assertAccessible(bazBlockScope); } public void testTraverseEs6ScopeRoots_isLimitedToScope() { Compiler compiler = new Compiler(); StringAccumulator callback = new StringAccumulator(); String code = lines( "function foo() {", " 'string in foo';", " function baz() {", " 'string nested in baz';", " }", "}", "function bar() {", " 'string in bar';", "}"); Node tree = parse(compiler, code); Node fooFunction = tree.getFirstChild(); // Traverse without entering nested scopes. NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction), callback, false); assertThat(callback.strings).containsExactly("string in foo"); callback.strings.clear(); // Traverse *with* entering nested scopes, now also sees "string nested in baz". NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction), callback, true); assertThat(callback.strings).containsExactly("string in foo", "string nested in baz"); } public void testTraverseEs6ScopeRoots_parentScopesWork() { Compiler compiler = new Compiler(); LexicallyScopedVarsAccumulator callback = new LexicallyScopedVarsAccumulator(); String code = lines( "var varDefinedInScript;", "var foo = function() {", " var varDefinedInFoo;", " var baz = function() {", " var varDefinedInBaz;", " }", "}", "var bar = function() {", " var varDefinedInBar;", "}"); Node tree = parse(compiler, code); Node fooFunction = tree.getSecondChild().getFirstFirstChild(); // Traverse without entering nested scopes. NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction), callback, false); assertThat(callback.varNames) .containsExactly("varDefinedInScript", "foo", "bar", "varDefinedInFoo", "baz"); callback.varNames.clear(); // Traverse *with* entering nested scopes, now also sees "varDefinedInBaz". NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction), callback, true); assertThat(callback.varNames) .containsExactly( "varDefinedInScript", "foo", "bar", "varDefinedInFoo", "baz", "varDefinedInBaz"); } public void testTraverseEs6ScopeRoots_callsEnterFunction() { Compiler compiler = new Compiler(); EnterFunctionAccumulator callback = new EnterFunctionAccumulator(); String code = lines( "function foo() {}", "function bar() {}", "function baz() {}"); Node tree = parse(compiler, code); Node fooFunction = tree.getFirstChild(); Node barFunction = fooFunction.getNext(); Node bazFunction = barFunction.getNext(); NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction, barFunction, bazFunction), callback, callback, // FunctionCallback false); assertThat(callback.enteredFunctions).containsExactly(fooFunction, barFunction, bazFunction); } public void testTraverseEs6ScopeRoots_callsEnterScope() { Compiler compiler = new Compiler(); List<Node> scopesEntered = new ArrayList<>(); NodeTraversal.Callback callback = new NodeTraversal.ScopedCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) {} @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public void enterScope(NodeTraversal t) { scopesEntered.add(t.getScopeRoot()); } @Override public void exitScope(NodeTraversal t) {} }; String code = "function foo() { {} }"; Node tree = parse(compiler, code); Node fooFunction = tree.getFirstChild(); NodeTraversal.traverseScopeRoots( compiler, null, ImmutableList.of(fooFunction), callback, true); assertThat(scopesEntered).hasSize(3); // Function, function's body, and the block inside it. } public void testNodeTraversalInterruptable() { Compiler compiler = new Compiler(); String code = "var a; \n"; Node tree = parse(compiler, code); final AtomicInteger counter = new AtomicInteger(0); AbstractPostOrderCallbackInterface countingCallback = (NodeTraversal t, Node n, Node parent) -> { counter.incrementAndGet(); }; NodeTraversal.traversePostOrder(compiler, tree, countingCallback); assertThat(counter.get()).isEqualTo(3); counter.set(0); Thread.currentThread().interrupt(); try { NodeTraversal.traversePostOrder(compiler, tree, countingCallback); fail("Expected a RuntimeException;"); } catch (RuntimeException e) { assertThat(e).hasCauseThat().hasCauseThat().isInstanceOf(InterruptedException.class); } } private static final class EnterFunctionAccumulator extends AbstractPostOrderCallback implements ChangeScopeRootCallback { List<Node> enteredFunctions = new ArrayList<>(); @Override public void visit(NodeTraversal t, Node n, Node parent) {} @Override public void enterChangeScopeRoot(AbstractCompiler compiler, Node root) { enteredFunctions.add(root); } } // Helper class used to collect all the vars from current scope and its parent scopes private static final class LexicallyScopedVarsAccumulator extends AbstractPostOrderCallback { final Set<String> varNames = new LinkedHashSet<>(); @Override public void visit(NodeTraversal t, Node n, Node parent) { Scope firstScope = t.getScope(); if (firstScope == null) { return; } for (Scope scope = firstScope; scope != null; scope = scope.getParent()) { for (Var var : scope.getVarIterable()) { varNames.add(var.getName()); } } } } private static final class StringAccumulator extends AbstractPostOrderCallback { final List<String> strings = new ArrayList<>(); @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isString()) { strings.add(n.getString()); } } } // Helper class used to test getCurrentNode private static class ExpectNodeOnEnterScope extends NodeTraversal.AbstractPreOrderCallback implements NodeTraversal.ScopedCallback { private Node node; private Node scopeRoot; private boolean entered = false; private void expect(Node node, Node scopeRoot) { this.node = node; this.scopeRoot = scopeRoot; entered = false; } private void assertEntered() { assertTrue(entered); } @Override public void enterScope(NodeTraversal t) { assertNode(t.getCurrentNode()).isEqualTo(node); assertNode(t.getScopeRoot()).isEqualTo(scopeRoot); if (t.getScopeCreator().hasBlockScope() && (node.isForIn() || node.isForOf())) { node = node.getLastChild(); scopeRoot = scopeRoot.getLastChild(); } entered = true; } @Override public void exitScope(NodeTraversal t) {} @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } } // Helper class used to test accessible variables private static class AccessibleCallback extends NodeTraversal.AbstractPreOrderCallback implements NodeTraversal.ScopedCallback { private int numAccessible; private void expect(int accessible) { this.numAccessible = accessible; } private void assertAccessible(Scope s) { assertThat(s.getAllAccessibleVariables()).hasSize(numAccessible); } @Override public void enterScope(NodeTraversal t) { } @Override public void exitScope(NodeTraversal t) { } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } } private static Node parse(Compiler compiler, String js) { Node n = compiler.parseTestCode(js); assertThat(compiler.getErrors()).isEmpty(); IR.root(n); return n; } private static Node parseRoots(Compiler compiler, String externs, String js) { Node extern = parse(compiler, externs).detach(); Node main = parse(compiler, js).detach(); return IR.root(IR.root(extern), IR.root(main)); } }
apache-2.0
vladmihalcea/high-performance-java-persistence
core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/association/ElementCollectionArrayTest.java
2176
package com.vladmihalcea.book.hpjp.hibernate.association; import com.vladmihalcea.book.hpjp.util.AbstractMySQLIntegrationTest; import org.junit.Test; import javax.persistence.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * @author Vlad Mihalcea */ public class ElementCollectionArrayTest extends AbstractMySQLIntegrationTest { @Override protected Class<?>[] entities() { return new Class<?>[] { Post.class }; } @Test public void testLifecycle() { doInJPA(entityManager -> { Post post = new Post() .setId(1L) .setTitle("High-Performance Java Persistence"); post.setComments(new String[] { "My first review", "My second review", "My third review", }); entityManager.persist(post); }); doInJPA(entityManager -> { Post post = entityManager.find(Post.class, 1L); LOGGER.info("Remove tail"); post.setComments(Arrays.copyOf(post.getComments(), 2)); }); doInJPA(entityManager -> { Post post = entityManager.find(Post.class, 1L); LOGGER.info("Remove head"); post.setComments(Arrays.copyOfRange(post.getComments(), 1, 2)); }); } @Entity(name = "Post") @Table(name = "post") public static class Post { @Id private Long id; private String title; @ElementCollection @OrderColumn(name = "position") private String[] comments; public Long getId() { return id; } public Post setId(Long id) { this.id = id; return this; } public String getTitle() { return title; } public Post setTitle(String title) { this.title = title; return this; } public String[] getComments() { return comments; } public void setComments(String[] comments) { this.comments = comments; } } }
apache-2.0
tabish121/OpenWire
openwire-core/src/main/java/io/openwire/commands/CommandVisitor.java
3528
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.openwire.commands; public interface CommandVisitor { Response processAddConnection(ConnectionInfo info) throws Exception; Response processAddSession(SessionInfo info) throws Exception; Response processAddProducer(ProducerInfo info) throws Exception; Response processAddConsumer(ConsumerInfo info) throws Exception; Response processRemoveConnection(ConnectionId id, long lastDeliveredSequenceId) throws Exception; Response processRemoveSession(SessionId id, long lastDeliveredSequenceId) throws Exception; Response processRemoveProducer(ProducerId id) throws Exception; Response processRemoveConsumer(ConsumerId id, long lastDeliveredSequenceId) throws Exception; Response processAddDestination(DestinationInfo info) throws Exception; Response processRemoveDestination(DestinationInfo info) throws Exception; Response processRemoveSubscription(RemoveSubscriptionInfo info) throws Exception; Response processMessage(Message send) throws Exception; Response processMessageAck(MessageAck ack) throws Exception; Response processMessagePull(MessagePull pull) throws Exception; Response processBeginTransaction(TransactionInfo info) throws Exception; Response processPrepareTransaction(TransactionInfo info) throws Exception; Response processCommitTransactionOnePhase(TransactionInfo info) throws Exception; Response processCommitTransactionTwoPhase(TransactionInfo info) throws Exception; Response processRollbackTransaction(TransactionInfo info) throws Exception; Response processWireFormat(WireFormatInfo info) throws Exception; Response processKeepAlive(KeepAliveInfo info) throws Exception; Response processShutdown(ShutdownInfo info) throws Exception; Response processFlush(FlushCommand command) throws Exception; Response processBrokerInfo(BrokerInfo info) throws Exception; Response processRecoverTransactions(TransactionInfo info) throws Exception; Response processForgetTransaction(TransactionInfo info) throws Exception; Response processEndTransaction(TransactionInfo info) throws Exception; Response processMessageDispatchNotification(MessageDispatchNotification notification) throws Exception; Response processProducerAck(ProducerAck ack) throws Exception; Response processMessageDispatch(MessageDispatch dispatch) throws Exception; Response processControlCommand(ControlCommand command) throws Exception; Response processConnectionError(ConnectionError error) throws Exception; Response processConnectionControl(ConnectionControl control) throws Exception; Response processConsumerControl(ConsumerControl control) throws Exception; }
apache-2.0
ernestp/consulo
platform/platform-impl/src/com/intellij/ide/actions/BaseNavigateToSourceAction.java
3073
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.actions; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.DumbAware; import com.intellij.pom.Navigatable; import com.intellij.pom.NavigatableWithText; import com.intellij.util.OpenSourceUtil; import org.jetbrains.annotations.Nullable; public abstract class BaseNavigateToSourceAction extends AnAction implements DumbAware { private final boolean myFocusEditor; protected BaseNavigateToSourceAction(boolean focusEditor) { myFocusEditor = focusEditor; } public void actionPerformed(AnActionEvent e) { DataContext dataContext = e.getDataContext(); OpenSourceUtil.navigate(myFocusEditor, getNavigatables(dataContext)); } public void update(AnActionEvent event) { DataContext dataContext = event.getDataContext(); final Navigatable target = getTarget(dataContext); boolean enabled = target != null; if (ActionPlaces.isPopupPlace(event.getPlace())) { event.getPresentation().setVisible(enabled); } else { event.getPresentation().setEnabled(enabled); } if (target != null && target instanceof NavigatableWithText) { //as myFocusEditor is always ignored - Main Menu|View always contains 2 actions with the same name and actually same behaviour if (!myFocusEditor) { event.getPresentation().setVisible(false); return; } final String navigateActionText = ((NavigatableWithText)target).getNavigateActionText(myFocusEditor); if (navigateActionText != null) { event.getPresentation().setText(navigateActionText); } else { event.getPresentation().setText(getTemplatePresentation().getText()); } } else { event.getPresentation().setText(getTemplatePresentation().getText()); } } @Nullable private Navigatable getTarget(final DataContext dataContext) { if (!myFocusEditor && PlatformDataKeys.EDITOR.getData(dataContext) != null) { // makes no sense in editor and conflicts with another action there (ctrl+enter) return null; } Navigatable[] navigatables = getNavigatables(dataContext); if (navigatables != null) { for (Navigatable navigatable : navigatables) { if (navigatable.canNavigate()) return navigatable; } } return null; } @Nullable protected Navigatable[] getNavigatables(final DataContext dataContext) { return PlatformDataKeys.NAVIGATABLE_ARRAY.getData(dataContext); } }
apache-2.0
Activiti/Activiti
activiti-core/activiti-api-impl/activiti-api-task-runtime-impl/src/main/java/org/activiti/runtime/api/event/impl/TaskCandidateGroupRemovedImpl.java
1454
/* * Copyright 2010-2020 Alfresco Software, Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.runtime.api.event.impl; import org.activiti.api.runtime.event.impl.RuntimeEventImpl; import org.activiti.api.task.model.TaskCandidateGroup; import org.activiti.api.task.model.events.TaskCandidateGroupEvent; import org.activiti.api.task.runtime.events.TaskCandidateGroupRemovedEvent; public class TaskCandidateGroupRemovedImpl extends RuntimeEventImpl<TaskCandidateGroup, TaskCandidateGroupEvent.TaskCandidateGroupEvents> implements TaskCandidateGroupRemovedEvent { public TaskCandidateGroupRemovedImpl() { } public TaskCandidateGroupRemovedImpl(TaskCandidateGroup entity) { super(entity); } @Override public TaskCandidateGroupEvent.TaskCandidateGroupEvents getEventType() { return TaskCandidateGroupEvent.TaskCandidateGroupEvents.TASK_CANDIDATE_GROUP_REMOVED; } }
apache-2.0
nwnpallewela/developer-studio
datamapper-tool/org.wso2.developerstudio.visualdatamapper.diagram/src/org/wso2/developerstudio/datamapper/diagram/custom/util/AddNewRecordListDialog.java
5425
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.datamapper.diagram.custom.util; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; public class AddNewRecordListDialog extends Dialog { private Text textRootName; private Combo schemaTypeCombo; private Text textDoc; private Composite compositeRecordList; private String name; private String schemaType; private String doc; private String[] DATA_TYPES = { "ARRAY","STRING", "INT","BOOLEAN","BYTES","DOUBLE","ENUM","FIXED","FLOAT","INT","LONG","MAP","NULL","RECORD","UNION"}; private static final String DIALOG_TITLE = "Add new Record List"; private static final String LABEL_NAME = "Name :"; private static final String LABEL_SCHEMATYPE = "Schema Data Type :"; private static final String LABEL_DOC = "Doc :"; private static final String NEW_ROOT_RECORD_ID = "NewList"; /** * Create the dialog. * * @param parentShell */ public AddNewRecordListDialog(Shell parentShell, Class<?>[] type) { super(parentShell); setShellStyle(SWT.CLOSE | SWT.TITLE | SWT.BORDER | SWT.OK | SWT.APPLICATION_MODAL); } /** * Create contents of the dialog. * * @param parent */ @Override protected Control createDialogArea(Composite parent) { Composite container = (Composite) super.createDialogArea(parent); getShell().setText(DIALOG_TITLE); compositeRecordList = new Composite(container, SWT.NONE); GridData gd_composite_2 = new GridData(SWT.LEFT, SWT.CENTER, false, false, 1, 1); gd_composite_2.widthHint = 575; compositeRecordList.setLayoutData(gd_composite_2); compositeRecordList.setLayout(new GridLayout(5, false)); Label lblRootNameLabel = new Label(compositeRecordList, SWT.NONE); lblRootNameLabel.setText(LABEL_NAME); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); textRootName = new Text(compositeRecordList, SWT.BORDER); textRootName.setText(NEW_ROOT_RECORD_ID); textRootName.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent arg0) { } }); textRootName.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); Label lblSchemaTypeLabel = new Label(compositeRecordList, SWT.NONE); lblSchemaTypeLabel.setText(LABEL_SCHEMATYPE); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); schemaTypeCombo = new Combo(compositeRecordList, SWT.DROP_DOWN | SWT.READ_ONLY); schemaTypeCombo.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent arg0) { } }); schemaTypeCombo.setItems(DATA_TYPES); schemaTypeCombo.select(0); schemaTypeCombo.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); Label lbldocLabel = new Label(compositeRecordList, SWT.NONE); lbldocLabel.setText(LABEL_DOC); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); new Label(compositeRecordList, SWT.NONE); textDoc = new Text(compositeRecordList, SWT.BORDER); textDoc.setText(""); textDoc.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent arg0) { } }); textDoc.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); return container; } /** * Create contents of the button bar. * * @param parent */ @Override protected void createButtonsForButtonBar(Composite parent) { createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } /** * Return the initial size of the dialog. */ @Override protected Point getInitialSize() { return new Point(620, 210); } @Override protected void okPressed() { setName(textRootName.getText()); setSchemaType(schemaTypeCombo.getText()); setDoc(textDoc.getText()); super.okPressed(); } public void setName(String name){ this.name= name; } public void setSchemaType(String schemaType){ this.schemaType= schemaType; } public void setDoc(String doc){ this.doc= doc; } public String getName(){ return name; } public String getSchemaType(){ return schemaType; } public String getDoc(){ return doc; } }
apache-2.0
syntelos/shapefile-java
src/com/esri/core/geometry/MultiPathImpl.java
75345
/* Copyright 1995-2013 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.core.geometry; final class MultiPathImpl extends MultiVertexGeometryImpl { protected boolean m_bPolygon; protected Point m_moveToPoint; protected double m_cachedLength2D; protected double m_cachedArea2D; protected AttributeStreamOfDbl m_cachedRingAreas2D; protected boolean m_bPathStarted; // Contains starting points of the parts. The size is getPartCount() + 1. // First element is 0, last element is equal to the getPointCount(). protected AttributeStreamOfInt32 m_paths; // same size as m_parts. Holds flags for each part (whether the part is // closed, etc. See PathFlags) protected AttributeStreamOfInt8 m_pathFlags; // The segment flags. Size is getPointCount(). This is not a vertex // attribute, because we may want to use indexed access later (via an index // buffer). // Can be NULL if the MultiPathImpl contains straight lines only. protected AttributeStreamOfInt8 m_segmentFlags; // An index into the m_segmentParams stream. Size is getPointCount(). Can be // NULL if the MultiPathImpl contains straight lines only. protected AttributeStreamOfInt32 m_segmentParamIndex; protected AttributeStreamOfDbl m_segmentParams; protected int m_curveParamwritePoint; private int m_currentPathIndex; static int[] _segmentParamSizes = { 0, 0, 6, 0, 8, 0 }; // None, Line, // Bezier, XXX, Arc, // XXX; public boolean hasNonLinearSegments() { return m_curveParamwritePoint > 0; } // / Cpp /// // Reviewed vs. Native Jan 11, 2011 public MultiPathImpl(boolean bPolygon) { m_bPolygon = bPolygon; m_bPathStarted = false; m_curveParamwritePoint = 0; m_cachedLength2D = 0; m_cachedArea2D = 0; m_pointCount = 0; m_description = VertexDescriptionDesignerImpl.getDefaultDescriptor2D(); m_cachedRingAreas2D = null; m_currentPathIndex = 0; } // Reviewed vs. Native Jan 11, 2011 public MultiPathImpl(boolean bPolygon, VertexDescription description) { if (description == null) throw new IllegalArgumentException(); m_bPolygon = bPolygon; m_bPathStarted = false; m_curveParamwritePoint = 0; m_cachedLength2D = 0; m_cachedArea2D = 0; m_pointCount = 0; m_description = description; m_cachedRingAreas2D = null; m_currentPathIndex = 0; } // Reviewed vs. Native Jan 11, 2011 protected void _initPathStartPoint() { _touch(); if (m_moveToPoint == null) m_moveToPoint = new Point(m_description); else m_moveToPoint.assignVertexDescription(m_description); } // Reviewed vs. Native Jan 11, 2011 /** * Starts a new Path at the Point. */ public void startPath(double x, double y) { Point2D endPoint = new Point2D(); endPoint.x = x; endPoint.y = y; startPath(endPoint); } // Reviewed vs. Native Jan 11, 2011 public void startPath(Point2D point) { _initPathStartPoint(); m_moveToPoint.setXY(point); m_bPathStarted = true; } // Reviewed vs. Native Jan 11, 2011 public void startPath(Point3D point) { _initPathStartPoint(); m_moveToPoint.setXYZ(point); assignVertexDescription(m_moveToPoint.getDescription()); m_bPathStarted = true; } // Reviewed vs. Native Jan 11, 2011 public void startPath(Point point) { if (point.isEmpty()) throw new IllegalArgumentException();// throw new // IllegalArgumentException(); _initPathStartPoint(); point.copyTo(m_moveToPoint); // TODO check MultiPathImpl.cpp comment // "//the description will be merged later" // assignVertexDescription(m_moveToPoint.getDescription()); m_bPathStarted = true; } // Reviewed vs. Native Jan 11, 2011 protected void _beforeNewSegment(int resizeBy) { // Called for each new segment being added. if (m_bPathStarted) { _initPathStartPoint();// make sure the m_movetoPoint exists and has // right vertex description // The new path is started. Need to grow m_parts and m_pathFlags. if (m_paths == null) { m_paths = (AttributeStreamOfInt32) AttributeStreamBase .createIndexStream(2); m_paths.write(0, 0); m_pathFlags = (AttributeStreamOfInt8) AttributeStreamBase .createByteStream(2, (byte) 0); } else { // _ASSERT(m_parts.size() >= 2); m_paths.resize(m_paths.size() + 1, 0); m_pathFlags.resize(m_pathFlags.size() + 1, 0); } if (m_bPolygon) { // Mark the path as closed m_pathFlags.write(m_pathFlags.size() - 2, (byte) PathFlags.enumClosed); } resizeBy++; // +1 for the StartPath point. } int oldcount = m_pointCount; m_paths.write(m_paths.size() - 1, m_pointCount + resizeBy); // The // NotifyModified // will // update // the // m_pointCount // with this // value. _resizeImpl(oldcount + resizeBy); m_pathFlags.write(m_paths.size() - 1, (byte) 0); if (m_bPathStarted) { setPointByVal(oldcount, m_moveToPoint);// setPoint(oldcount, // m_moveToPoint); //finally // set the start point to // the geometry m_bPathStarted = false; } } // Reviewed vs. Native Jan 11, 2011 protected void _finishLineTo() { } // Reviewed vs. Native Jan 11, 2011 /** * adds a Line Segment from the last Point to the given endPoint. */ public void lineTo(double x, double y) { _beforeNewSegment(1); setXY(m_pointCount - 1, x, y); _finishLineTo(); // Point2D endPoint = new Point2D(); // endPoint.x = x; endPoint.y = y; // lineTo(endPoint); } // Reviewed vs. Native Jan 11, 2011 public void lineTo(Point2D endPoint) { _beforeNewSegment(1); setXY(m_pointCount - 1, endPoint); _finishLineTo(); } // Reviewed vs. Native Jan 11, 2011 public void lineTo(Point3D endPoint) { _beforeNewSegment(1); setXYZ(m_pointCount - 1, endPoint); _finishLineTo(); } // Reviewed vs. Native Jan 11, 2011 public void lineTo(Point endPoint) { _beforeNewSegment(1); setPointByVal(m_pointCount - 1, endPoint); _finishLineTo(); } // Reviewed vs. Native Jan 11, 2011 protected void _initSegmentData(int sz) { if (m_segmentParamIndex == null) { m_segmentFlags = (AttributeStreamOfInt8) AttributeStreamBase .createByteStream(m_pointCount, (byte) SegmentFlags.enumLineSeg); m_segmentParamIndex = (AttributeStreamOfInt32) AttributeStreamBase .createIndexStream(m_pointCount, -1); } int size = m_curveParamwritePoint + sz; if (m_segmentParams == null) { m_segmentParams = (AttributeStreamOfDbl) AttributeStreamBase .createAttributeStreamWithPersistence( VertexDescription.Persistence.enumDouble, size); } else { m_segmentParams.resize(size, 0); } } // Reviewed vs. Native Jan 11, 2011 protected void _finishBezierTo() { // _ASSERT(m_segmentFlags != null); // _ASSERT(m_segmentParamIndex != null); m_segmentFlags.write(m_pointCount - 2, (byte) SegmentFlags.enumBezierSeg); } // Reviewed vs. Native Jan 11, 2011 /** * adds a Cubic Bezier Segment to the current Path. The Bezier Segment * connects the current last Point and the given endPoint. */ public void bezierTo(Point2D controlPoint1, Point2D controlPoint2, Point2D endPoint) { _beforeNewSegment(1); setXY(m_pointCount - 1, endPoint); double z; _initSegmentData(6); m_pathFlags.setBits(m_pathFlags.size() - 1, (byte) PathFlags.enumHasNonlinearSegments); m_segmentParamIndex.write(m_pointCount - 2, m_curveParamwritePoint); m_curveParamwritePoint += 6; int curveIndex = m_curveParamwritePoint; m_segmentParams.write(curveIndex, controlPoint1.x); m_segmentParams.write(curveIndex + 1, controlPoint1.y); z = 0;// TODO: calculate me. m_segmentParams.write(curveIndex + 2, z); m_segmentParams.write(curveIndex + 3, controlPoint2.x); m_segmentParams.write(curveIndex + 4, controlPoint2.y); z = 0;// TODO: calculate me. m_segmentParams.write(curveIndex + 5, z); _finishBezierTo(); } // Reviewed vs. Native Jan 11, 2011 public void openPath(int pathIndex) { _touch(); if (m_bPolygon) throw new GeometryException("internal error");// do not call this // method on a // polygon int pathCount = getPathCount(); if (pathIndex > getPathCount()) throw new IllegalArgumentException(); if (m_pathFlags == null) throw new GeometryException("internal error"); m_pathFlags.clearBits(pathIndex, (byte) PathFlags.enumClosed); } // Reviewed vs. Native Jan 11, 2011 // Major Changes on 16th of January public void openPathAndDuplicateStartVertex(int pathIndex) { _touch(); if (m_bPolygon) throw new GeometryException("internal error");// do not call this // method on a // polygon int pathCount = getPathCount(); if (pathIndex > pathCount) throw new GeometryException("internal error"); if (!isClosedPath(pathIndex)) return;// do not open if open if (m_pathFlags == null)// if (!m_pathFlags) throw new GeometryException("nternal_error"); int oldPointCount = m_pointCount; int pathIndexStart = getPathStart(pathIndex); int pathIndexEnd = getPathEnd(pathIndex); _resizeImpl(m_pointCount + 1); // resize does not write into m_paths // anymore! _verifyAllStreams(); for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { if (m_vertexAttributes[iattr] != null)// if // (m_vertexAttributes[iattr]) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); m_vertexAttributes[iattr].insertRange(comp * pathIndexEnd, m_vertexAttributes[iattr], comp * pathIndexStart, comp, true, 1, comp * oldPointCount); } } for (int ipath = pathCount; ipath > pathIndex; ipath--) { int iend = m_paths.read(ipath); m_paths.write(ipath, iend + 1); } m_pathFlags.clearBits(pathIndex, (byte) PathFlags.enumClosed); } // Reviewed vs. Native Jan 11, 2011 // Major Changes on 16th of January public void openAllPathsAndDuplicateStartVertex() { _touch(); if (m_bPolygon) throw new GeometryException("internal error");// do not call this // method on a // polygon if (m_pathFlags == null)// if (!m_pathFlags) throw new GeometryException("nternal_error"); _verifyAllStreams(); int closedPathCount = 0; int pathCount = getPathCount(); for (int i = 0; i < pathCount; i++) { if (m_pathFlags.read(i) == (byte) PathFlags.enumClosed) { closedPathCount++; } } for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { if (m_vertexAttributes[iattr] != null) { int semantics = m_description._getSemanticsImpl(iattr);// int // semantics // = // m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); int newSize = comp * (m_pointCount + closedPathCount); m_vertexAttributes[iattr].resize(newSize); int offset = closedPathCount; int ipath = pathCount; for (int i = m_pointCount - 1; i >= 0; i--) { if (i + 1 == m_paths.read(ipath)) { ipath--; if (m_pathFlags.read(ipath) == (byte) PathFlags.enumClosed) { int istart = m_paths.read(ipath); for (int c = 0; c < comp; c++) { double v = m_vertexAttributes[iattr] .readAsDbl(comp * istart + c); m_vertexAttributes[iattr].writeAsDbl(comp * (offset + i) + c, v); } if (--offset == 0) break; } } for (int c = 0; c < comp; c++) { double v = m_vertexAttributes[iattr].readAsDbl(comp * i + c); m_vertexAttributes[iattr].writeAsDbl(comp * (offset + i) + c, v); } } } } int offset = closedPathCount; for (int ipath = pathCount; ipath > 0; ipath--) { int iend = m_paths.read(ipath); m_paths.write(ipath, iend + offset); if (m_pathFlags.read(ipath - 1) == (byte) PathFlags.enumClosed) { m_pathFlags.clearBits(ipath - 1, (byte) PathFlags.enumClosed); if (--offset == 0) { break; } } } m_pointCount += closedPathCount; } void closePathWithLine(int path_index) { // touch_(); throwIfEmpty(); byte pf = m_pathFlags.read(path_index); m_pathFlags.write(path_index, (byte) (pf | PathFlags.enumClosed)); if (m_segmentFlags != null) { int vindex = getPathEnd(path_index) - 1; m_segmentFlags.write(vindex, (byte) SegmentFlags.enumLineSeg); m_segmentParamIndex.write(vindex, -1); } } void closePathWithLine() { throwIfEmpty(); m_bPathStarted = false; closePathWithLine(getPathCount() - 1); } // Reviewed vs. Native Jan 11, 2011 /** * Closes all open curves by adding an implicit line segment from the end * point to the start point. */ public void closeAllPaths() { _touch(); if (m_bPolygon || isEmptyImpl()) return; m_bPathStarted = false; for (int ipath = 0, npart = m_paths.size() - 1; ipath < npart; ipath++) { if (isClosedPath(ipath)) continue; byte pf = m_pathFlags.read(ipath); m_pathFlags.write(ipath, (byte) (pf | PathFlags.enumClosed)); // if (m_segmentFlags) // { // m_segmentFlags.write(m_pointCount - 1, // (byte)SegmentFlags.LineSeg)); // m_segmentParamIndex.write(m_pointCount - 1, -1); // } } } // Reviewed vs. Native Jan 11, 2011 /** * Returns the size of the segment data for the given segment type. * * @param flag * is one of the segment flags from the SegmentFlags enum. * @return the size of the segment params as the number of doubles. */ public static int getSegmentDataSize(byte flag) { return _segmentParamSizes[flag]; } // Reviewed vs. Native Jan 11, 2011 /** * Closes last path of the MultiPathImpl with the Bezier Segment. * * The start point of the Bezier is the last point of the path and the last * point of the bezier is the first point of the path. */ public void closePathWithBezier(Point2D controlPoint1, Point2D controlPoint2) { _touch(); if (isEmptyImpl()) throw new GeometryException( "Invalid call. This operation cannot be performed on an empty geometry."); m_bPathStarted = false; int pathIndex = m_paths.size() - 2; byte pf = m_pathFlags.read(pathIndex); m_pathFlags .write(pathIndex, (byte) (pf | PathFlags.enumClosed | PathFlags.enumHasNonlinearSegments)); _initSegmentData(6); byte oldType = m_segmentFlags .read((byte) ((m_pointCount - 1) & SegmentFlags.enumSegmentMask)); m_segmentFlags.write(m_pointCount - 1, (byte) (SegmentFlags.enumBezierSeg)); int curveIndex = m_curveParamwritePoint; if (getSegmentDataSize(oldType) < getSegmentDataSize((byte) SegmentFlags.enumBezierSeg)) { m_segmentParamIndex.write(m_pointCount - 1, m_curveParamwritePoint); m_curveParamwritePoint += 6; } else { // there was a closing bezier curve or an arc here. We can reuse the // storage. curveIndex = m_segmentParamIndex.read(m_pointCount - 1); } double z; m_segmentParams.write(curveIndex, controlPoint1.x); m_segmentParams.write(curveIndex + 1, controlPoint1.y); z = 0;// TODO: calculate me. m_segmentParams.write(curveIndex + 2, z); m_segmentParams.write(curveIndex + 3, controlPoint2.x); m_segmentParams.write(curveIndex + 4, controlPoint2.y); z = 0;// TODO: calculate me. m_segmentParams.write(curveIndex + 5, z); } // Reviewed vs. Native Jan 11, 2011 /** * Returns True if the given path is closed (represents a Ring). */ public boolean isClosedPath(int ipath) { // Should we make a function called _UpdateClosedPathFlags and call it // here? return ((byte) (m_pathFlags.read(ipath) & PathFlags.enumClosed)) != 0; } public boolean isClosedPathInXYPlane(int path_index) { if (isClosedPath(path_index)) return true; int istart = getPathStart(path_index); int iend = getPathEnd(path_index) - 1; if (istart > iend) return false; Point2D ptS = getXY(istart); Point2D ptE = getXY(iend); return ptS.isEqual(ptE); } // Reviewed vs. Native Jan 11, 2011 /** * Returns True if the given path might have non-linear segments. */ public boolean hasNonLinearSegments(int ipath) { // Should we make a function called _UpdateHasNonLinearSegmentsFlags and // call it here? return (m_pathFlags.read(ipath) & PathFlags.enumHasNonlinearSegments) != 0; } // Reviewed vs. Native Jan 11, 2011 public void addSegment(Segment segment, boolean bStartNewPath) { mergeVertexDescription(segment.getDescription()); if (segment.getType() == Type.Line) { Point point = new Point(); if (bStartNewPath || isEmpty()) { segment.queryStart(point); startPath(point); } segment.queryEnd(point); lineTo(point); } else { throw new GeometryException("internal error"); } } // Reviewed vs. Native Jan 11, 2011 /** * adds a rectangular closed Path to the MultiPathImpl. * * @param envSrc * is the source rectangle. * @param bReverse * Creates reversed path. */ public void addEnvelope(Envelope2D envSrc, boolean bReverse) { boolean bWasEmpty = m_pointCount == 0; startPath(envSrc.xmin, envSrc.ymin); if (bReverse) { lineTo(envSrc.xmax, envSrc.ymin); lineTo(envSrc.xmax, envSrc.ymax); lineTo(envSrc.xmin, envSrc.ymax); } else { lineTo(envSrc.xmin, envSrc.ymax); lineTo(envSrc.xmax, envSrc.ymax); lineTo(envSrc.xmax, envSrc.ymin); } closePathWithLine(); m_bPathStarted = false; if (bWasEmpty && !bReverse) { _setDirtyFlag(DirtyFlags.DirtyIsEnvelope, false);// now we no(sic?) // the polypath // is envelope } } // Reviewed vs. Native Jan 11, 2011 /** * adds a rectangular closed Path to the MultiPathImpl. * * @param envSrc * is the source rectangle. * @param bReverse * Creates reversed path. */ public void addEnvelope(Envelope envSrc, boolean bReverse) { if (envSrc.isEmpty()) return; boolean bWasEmpty = m_pointCount == 0; Point pt = new Point(m_description);// getDescription()); for (int i = 0, n = 4; i < n; i++) { int j = bReverse ? n - i - 1 : i; envSrc.queryCornerByVal(j, pt); if (i == 0) startPath(pt); else lineTo(pt); } closePathWithLine(); m_bPathStarted = false; if (bWasEmpty && !bReverse) _setDirtyFlag(DirtyFlags.DirtyIsEnvelope, false);// now we know the // polypath is // envelope } // Reviewed vs. Native Jan 11, 2011 public void add(MultiPathImpl src, boolean bReversePaths) { for (int i = 0; i < src.getPathCount(); i++) addPath(src, i, !bReversePaths); } public void addPath(MultiPathImpl src, int srcPathIndex, boolean bForward) { insertPath(-1, src, srcPathIndex, bForward); } // Reviewed vs. Native Jan 11, 2011 Significant changes to last for loop public void addPath(Point2D[] _points, int count, boolean bForward) { insertPath(-1, _points, 0, count, bForward); } public void addSegmentsFromPath(MultiPathImpl src, int src_path_index, int src_segment_from, int src_segment_count, boolean b_start_new_path) { if (!b_start_new_path && getPathCount() == 0) b_start_new_path = true; if (src_path_index < 0) src_path_index = src.getPathCount() - 1; if (src_path_index >= src.getPathCount() || src_segment_from < 0 || src_segment_count < 0 || src_segment_count > src.getSegmentCount(src_path_index)) throw new GeometryException("index out of bounds"); if (src_segment_count == 0) return; boolean bIncludesClosingSegment = src.isClosedPath(src_path_index) && src_segment_from + src_segment_count == src .getSegmentCount(src_path_index); if (bIncludesClosingSegment && src_segment_count == 1) return;// cannot add a closing segment alone. m_bPathStarted = false; mergeVertexDescription(src.getDescription()); int src_point_count = src_segment_count; int srcFromPoint = src.getPathStart(src_path_index) + src_segment_from + 1; if (b_start_new_path)// adding a new path. { src_point_count++;// add start point. srcFromPoint--; } if (bIncludesClosingSegment) { src_point_count--; } int oldPointCount = m_pointCount; _resizeImpl(m_pointCount + src_point_count); _verifyAllStreams(); if (b_start_new_path) { if (src_point_count == 0) return;// happens when adding a single closing segment to the // new path m_paths.add(m_pointCount); byte flags = src.m_pathFlags.read(src_path_index); flags &= ~(byte) PathFlags.enumCalcMask;// remove calculated flags if (m_bPolygon) flags |= (byte) PathFlags.enumClosed; m_pathFlags.write(m_pathFlags.size() - 1, flags); m_pathFlags.add((byte) 0); } else { m_paths.write(m_pathFlags.size() - 1, m_pointCount); } // Index_type absoluteIndex = pathStart + before_point_index; for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description.getSemantics(iattr); int comp = VertexDescription.getComponentCount(semantics); int isrcAttr = src.m_description.getAttributeIndex(semantics); if (isrcAttr < 0 || src.m_vertexAttributes[isrcAttr] == null) {// The // source // does // not // have // the // attribute. // insert // default // value double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(comp * oldPointCount, v, src_point_count * comp, comp * oldPointCount); continue; } // add vertices to the given stream boolean b_forward = true; m_vertexAttributes[iattr].insertRange(comp * oldPointCount, src.m_vertexAttributes[isrcAttr], comp * srcFromPoint, src_point_count * comp, b_forward, comp, comp * oldPointCount); } if (hasNonLinearSegments()) { // TODO: implement me. For example as a while loop over all curves. // Replace, calling ReplaceSegment throw new GeometryException("internal error"); // m_segment_flags->write_range((get_path_start(path_index) + // before_point_index + src_point_count), (oldPointCount - // get_path_start(path_index) - before_point_index), // m_segment_flags, (get_path_start(path_index) + // before_point_index), true, 1); // m_segment_param_index->write_range((get_path_start(path_index) + // before_point_index + src_point_count), (oldPointCount - // get_path_start(path_index) - before_point_index), // m_segment_param_index, (get_path_start(path_index) + // before_point_index), true, 1); // for (Index_type i = get_path_start(path_index) + // before_point_index, n = get_path_start(path_index) + // before_point_index + src_point_count; i < n; i++) // { // m_segment_flags->write(i, (int8_t)enum_value1(Segment_flags, // enum_line_seg)); // m_segment_param_index->write(i, -1); // } } if (src.hasNonLinearSegments(src_path_index)) { // TODO: implement me. For example as a while loop over all curves. // Replace, calling ReplaceSegment throw new GeometryException("internal error"); } notifyModified(DirtyFlags.DirtyCoordinates); } // Reviewed vs. Native Jan 11, 2011 public void reverseAllPaths() { for (int i = 0, n = getPathCount(); i < n; i++) { reversePath(i); } } // Reviewed vs. Native Jan 11, 2011 public void reversePath(int pathIndex) { _verifyAllStreams(); int pathCount = getPathCount(); if (pathIndex >= pathCount) throw new IllegalArgumentException(); int reversedPathStart = getPathStart(pathIndex); int reversedPathSize = getPathSize(pathIndex); int offset = isClosedPath(pathIndex) ? 1 : 0; // TODO: a bug for the non linear segments here. // There could be an issue here if someone explicity closes the path // with the same start/end point. for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { if (m_vertexAttributes[iattr] != null) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); m_vertexAttributes[iattr].reverseRange(comp * (reversedPathStart + offset), comp * (reversedPathSize - offset), comp); } } notifyModified(DirtyFlags.DirtyCoordinates); } // Reviewed vs. Native Jan 11, 2011 // TODO: Nonlinearsegments public void removePath(int pathIndex) { _verifyAllStreams(); int pathCount = getPathCount(); if (pathIndex < 0) pathIndex = pathCount - 1; if (pathIndex >= pathCount) throw new IllegalArgumentException(); boolean bDirtyRingAreas2D = _hasDirtyFlag(DirtyFlags.DirtyRingAreas2D); int removedPathStart = getPathStart(pathIndex); int removedPathSize = getPathSize(pathIndex); // Remove the attribute values for the path for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { if (m_vertexAttributes[iattr] != null) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); m_vertexAttributes[iattr].eraseRange(comp * removedPathStart, comp * removedPathSize, comp * m_pointCount); } } // Change the start of each path after the removed path for (int i = pathIndex + 1; i <= pathCount; i++) { int istart = m_paths.read(i); m_paths.write(i - 1, istart - removedPathSize); } if (m_pathFlags == null) { for (int i = pathIndex + 1; i <= pathCount; i++) { byte flags = m_pathFlags.read(i); m_pathFlags.write(i - 1, flags); } } m_paths.resize(pathCount); m_pathFlags.resize(pathCount); m_pointCount -= removedPathSize; m_reservedPointCount -= removedPathSize; notifyModified(DirtyFlags.DirtyCoordinates); } // TODO: Nonlinearsegments public void insertPath(int pathIndex, MultiPathImpl src, int srcPathIndex, boolean bForward) { if (src == this) throw new IllegalArgumentException(); if (srcPathIndex >= src.getPathCount()) throw new IllegalArgumentException(); int oldPathCount = getPathCount(); if (pathIndex > oldPathCount) throw new IllegalArgumentException(); if (pathIndex < 0) pathIndex = oldPathCount; if (srcPathIndex < 0) srcPathIndex = src.getPathCount() - 1; m_bPathStarted = false; mergeVertexDescription(src.m_description);// merge attributes from the // source src._verifyAllStreams();// the source need to be correct. int srcPathIndexStart = src.getPathStart(srcPathIndex); int srcPathSize = src.getPathSize(srcPathIndex); int oldPointCount = m_pointCount; int offset = src.isClosedPath(srcPathIndex) && !bForward ? 1 : 0; _resizeImpl(m_pointCount + srcPathSize); _verifyAllStreams(); int pathIndexStart = pathIndex < oldPathCount ? getPathStart(pathIndex) : oldPointCount; // Copy all attribute values. for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); int isrcAttr = src.m_description.getAttributeIndex(semantics); int comp = VertexDescription.getComponentCount(semantics); if (isrcAttr >= 0 && src.m_vertexAttributes[isrcAttr] != null) { if (offset != 0) m_vertexAttributes[iattr].insertRange( pathIndexStart * comp, src.m_vertexAttributes[isrcAttr], comp * srcPathIndexStart, comp, true, comp, comp * oldPointCount); m_vertexAttributes[iattr].insertRange((pathIndexStart + offset) * comp, src.m_vertexAttributes[isrcAttr], comp * (srcPathIndexStart + offset), comp * (srcPathSize - offset), bForward, comp, comp * (oldPointCount + offset)); } else { // Need to make room for the attributes, so we copy default // values in double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(pathIndexStart * comp, v, comp * srcPathSize, comp * oldPointCount); } } int newPointCount = oldPointCount + srcPathSize; m_paths.add(newPointCount); for (int ipath = oldPathCount; ipath >= pathIndex + 1; ipath--) { int iend = m_paths.read(ipath - 1); m_paths.write(ipath, iend + srcPathSize); } // ========================== todo: NonLinearSegments ================= if (src.hasNonLinearSegments(srcPathIndex)) { } m_pathFlags.add((byte) 0); // _ASSERT(m_pathFlags.size() == m_paths.size()); for (int ipath = oldPathCount - 1; ipath >= pathIndex + 1; ipath--) { byte flags = m_pathFlags.read(ipath); flags &= ~(byte) PathFlags.enumCalcMask;// remove calculated flags m_pathFlags.write(ipath + 1, flags); } AttributeStreamOfInt8 srcPathFlags = src.getPathFlagsStreamRef(); byte flags = srcPathFlags.read(srcPathIndex); flags &= ~(byte) PathFlags.enumCalcMask;// remove calculated flags if (m_bPolygon) flags |= (byte) PathFlags.enumClosed; m_pathFlags.write(pathIndex, flags); } public void insertPath(int pathIndex, Point2D[] points, int pointsOffset, int count, boolean bForward) { int oldPathCount = getPathCount(); if (pathIndex > oldPathCount) throw new IllegalArgumentException(); if (pathIndex < 0) pathIndex = oldPathCount; m_bPathStarted = false; int oldPointCount = m_pointCount; // Copy all attribute values. if (points != null) { _resizeImpl(m_pointCount + count); _verifyAllStreams(); int pathStart = pathIndex < oldPathCount ? getPathStart(pathIndex) : oldPointCount; for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); if (semantics == VertexDescription.Semantics.POSITION) { // copy range to make place for new vertices m_vertexAttributes[iattr].writeRange( 2 * (pathStart + count), 2 * (oldPointCount - pathIndex), m_vertexAttributes[iattr], 2 * pathStart, true, 2); AttributeStreamOfDbl position = (AttributeStreamOfDbl) (AttributeStreamBase) getAttributeStreamRef(semantics); int j = pathStart; for (int i = 0; i < count; i++, j++) { int index = (bForward ? pointsOffset + i : pointsOffset + count - i - 1); position.write(2 * j, points[index].x); position.write(2 * j + 1, points[index].y); } } else { // Need to make room for the attributes, so we copy default // values in int comp = VertexDescription.getComponentCount(semantics); double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(pathStart * comp, v, comp * count, comp * oldPointCount); } } } else { _verifyAllStreams(); } m_paths.add(m_pointCount); for (int ipath = oldPathCount; ipath >= pathIndex + 1; ipath--) { int iend = m_paths.read(ipath - 1); m_paths.write(ipath, iend + count); } m_pathFlags.add((byte) 0); // _ASSERT(m_pathFlags.size() == m_paths.size()); for (int ipath = oldPathCount - 1; ipath >= pathIndex + 1; ipath--) { byte flags = m_pathFlags.read(ipath); flags &= ~(byte) PathFlags.enumCalcMask;// remove calculated flags m_pathFlags.write(ipath + 1, flags); } if (m_bPolygon) m_pathFlags.write(pathIndex, (byte) PathFlags.enumClosed); } public void insertPoints(int pathIndex, int beforePointIndex, MultiPathImpl src, int srcPathIndex, int srcPointIndexFrom, int srcPointCount, boolean bForward) { if (pathIndex < 0) pathIndex = getPathCount(); if (srcPathIndex < 0) srcPathIndex = src.getPathCount() - 1; if (pathIndex > getPathCount() || beforePointIndex > getPathSize(pathIndex) || srcPathIndex >= src.getPathCount() || srcPointCount > src.getPathSize(srcPathIndex)) throw new GeometryException("index out of bounds"); if (srcPointCount == 0) return; mergeVertexDescription(src.m_description); if (pathIndex == getPathCount())// adding a new path. { m_paths.add(m_pointCount); byte flags = src.m_pathFlags.read(srcPathIndex); flags &= ~(byte) PathFlags.enumCalcMask;// remove calculated flags if (!m_bPolygon) m_pathFlags.add(flags); else m_pathFlags.add((byte) (flags | PathFlags.enumClosed)); } if (beforePointIndex < 0) beforePointIndex = getPathSize(pathIndex); int oldPointCount = m_pointCount; _resizeImpl(m_pointCount + srcPointCount); _verifyAllStreams(); src._verifyAllStreams(); int pathStart = getPathStart(pathIndex); int absoluteIndex = pathStart + beforePointIndex; if (srcPointCount < 0) srcPointCount = src.getPathSize(srcPathIndex); int srcPathStart = src.getPathStart(srcPathIndex); int srcAbsoluteIndex = srcPathStart + srcPointCount; for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); int isrcAttr = src.m_description.getAttributeIndex(semantics); if (isrcAttr < 0 || src.m_vertexAttributes[isrcAttr] == null) // The // source // does // not // have // the // attribute. { double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(comp * absoluteIndex, v, srcAbsoluteIndex * comp, comp * oldPointCount); continue; } // add vertices to the given stream m_vertexAttributes[iattr].insertRange(comp * (pathStart + beforePointIndex), src.m_vertexAttributes[isrcAttr], comp * (srcPathStart + srcPointIndexFrom), srcPointCount * comp, bForward, comp, comp * oldPointCount); } if (hasNonLinearSegments()) {// TODO: probably a bug here when a new // path is added. m_segmentFlags.writeRange((getPathStart(pathIndex) + beforePointIndex + srcPointCount), (oldPointCount - getPathStart(pathIndex) - beforePointIndex), m_segmentFlags, (getPathStart(pathIndex) + beforePointIndex), true, 1); m_segmentParamIndex.writeRange((getPathStart(pathIndex) + beforePointIndex + srcPointCount), (oldPointCount - getPathStart(pathIndex) - beforePointIndex), m_segmentParamIndex, (getPathStart(pathIndex) + beforePointIndex), true, 1); for (int i = getPathStart(pathIndex) + beforePointIndex, n = getPathStart(pathIndex) + beforePointIndex + srcPointCount; i < n; i++) { m_segmentFlags.write(i, (byte) SegmentFlags.enumLineSeg); m_segmentParamIndex.write(i, -1); } } if (src.hasNonLinearSegments(srcPathIndex)) { // TODO: implement me. For example as a while loop over all curves. // Replace, calling ReplaceSegment throw new GeometryException("internal error"); } for (int ipath = pathIndex + 1, npaths = getPathCount(); ipath <= npaths; ipath++) { int num = m_paths.read(ipath); m_paths.write(ipath, num + srcPointCount); } } public void insertPoints(int pathIndex, int beforePointIndex, Point2D[] src, int srcPointIndexFrom, int srcPointCount, boolean bForward) { if (pathIndex < 0) pathIndex = getPathCount(); if (pathIndex > getPathCount() || beforePointIndex > getPathSize(pathIndex) || srcPointIndexFrom < 0 || srcPointCount > src.length) throw new GeometryException("index out of bounds"); if (srcPointCount == 0) return; if (pathIndex == getPathCount())// adding a new path. { m_paths.add(m_pointCount); if (!m_bPolygon) m_pathFlags.add((byte) 0); else m_pathFlags.add((byte) PathFlags.enumClosed); } if (beforePointIndex < 0) beforePointIndex = getPathSize(pathIndex); _verifyAllStreams(); int oldPointCount = m_pointCount; _resizeImpl(m_pointCount + srcPointCount); _verifyAllStreams(); for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); // copy range to make place for new vertices m_vertexAttributes[iattr] .writeRange( comp * (getPathStart(pathIndex) + beforePointIndex + srcPointCount), (oldPointCount - getPathStart(pathIndex) - beforePointIndex) * comp, m_vertexAttributes[iattr], comp * (getPathStart(pathIndex) + beforePointIndex), true, comp); if (iattr == 0) { // add vertices to the given stream ((AttributeStreamOfDbl) (AttributeStreamBase) m_vertexAttributes[iattr]) .writeRange(comp * (getPathStart(pathIndex) + beforePointIndex), srcPointCount, src, srcPointIndexFrom, bForward); } else { double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].setRange(v, (getPathStart(pathIndex) + beforePointIndex) * comp, srcPointCount * comp); } } if (hasNonLinearSegments()) { m_segmentFlags.writeRange((getPathStart(pathIndex) + beforePointIndex + srcPointCount), (oldPointCount - getPathStart(pathIndex) - beforePointIndex), m_segmentFlags, (getPathStart(pathIndex) + beforePointIndex), true, 1); m_segmentParamIndex.writeRange((getPathStart(pathIndex) + beforePointIndex + srcPointCount), (oldPointCount - getPathStart(pathIndex) - beforePointIndex), m_segmentParamIndex, (getPathStart(pathIndex) + beforePointIndex), true, 1); m_segmentFlags.setRange((byte) SegmentFlags.enumLineSeg, getPathStart(pathIndex) + beforePointIndex, srcPointCount); m_segmentParamIndex.setRange(-1, getPathStart(pathIndex) + beforePointIndex, srcPointCount); } for (int ipath = pathIndex + 1, npaths = getPathCount(); ipath <= npaths; ipath++) { m_paths.write(ipath, m_paths.read(ipath) + srcPointCount); } } public void insertPoint(int pathIndex, int beforePointIndex, Point2D pt) { int pathCount = getPathCount(); if (pathIndex < 0) pathIndex = getPathCount(); if (pathIndex >= pathCount || beforePointIndex > getPathSize(pathIndex)) throw new GeometryException("index out of bounds"); if (pathIndex == getPathCount())// adding a new path. { m_paths.add(m_pointCount); if (!m_bPolygon) m_pathFlags.add((byte) 0); else m_pathFlags.add((byte) PathFlags.enumClosed); } if (beforePointIndex < 0) beforePointIndex = getPathSize(pathIndex); int oldPointCount = m_pointCount; _resizeImpl(m_pointCount + 1); _verifyAllStreams(); int pathStart = getPathStart(pathIndex); ((AttributeStreamOfDbl) (AttributeStreamBase) m_vertexAttributes[0]) .insert(2 * (pathStart + beforePointIndex), pt, 2 * oldPointCount); for (int iattr = 1, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); // Need to make room for the attribute, so we copy a default value // in double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(comp * (pathStart + beforePointIndex), v, comp, comp * oldPointCount); } for (int ipath = pathIndex + 1, npaths = pathCount; ipath <= npaths; ipath++) { m_paths.write(ipath, m_paths.read(ipath) + 1); } } public void insertPoint(int pathIndex, int beforePointIndex, Point pt) { int pathCount = getPathCount(); if (pathIndex < 0) pathIndex = getPathCount(); if (pathIndex >= pathCount || beforePointIndex > getPathSize(pathIndex)) throw new GeometryException("index out of bounds"); if (pathIndex == getPathCount())// adding a new path. { m_paths.add(m_pointCount); if (!m_bPolygon) m_pathFlags.add((byte) 0); else m_pathFlags.add((byte) PathFlags.enumClosed); } if (beforePointIndex < 0) beforePointIndex = getPathSize(pathIndex); mergeVertexDescription(pt.getDescription()); int oldPointCount = m_pointCount; _resizeImpl(m_pointCount + 1); _verifyAllStreams(); int pathStart = getPathStart(pathIndex); for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); if (pt.hasAttribute(semantics)) { m_vertexAttributes[iattr].insertAttributes(comp * (pathStart + beforePointIndex), pt, semantics, comp * oldPointCount); } else { // Need to make room for the attribute, so we copy a default // value in double v = VertexDescription.getDefaultValue(semantics); m_vertexAttributes[iattr].insertRange(comp * (pathStart + beforePointIndex), v, comp, comp * oldPointCount); } } for (int ipath = pathIndex + 1, npaths = pathCount; ipath <= npaths; ipath++) { m_paths.write(ipath, m_paths.read(ipath) + 1); } notifyModified(DirtyFlags.DirtyCoordinates); } public void removePoint(int pathIndex, int pointIndex) { int pathCount = getPathCount(); if (pathIndex < 0) pathIndex = pathCount - 1; if (pathIndex >= pathCount || pointIndex >= getPathSize(pathIndex)) throw new GeometryException("index out of bounds"); _verifyAllStreams(); int pathStart = getPathStart(pathIndex); if (pointIndex < 0) pointIndex = getPathSize(pathIndex) - 1; int absoluteIndex = pathStart + pointIndex; // Remove the attribute values for the path for (int iattr = 0, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) { if (m_vertexAttributes[iattr] != null) { int semantics = m_description._getSemanticsImpl(iattr); int comp = VertexDescription.getComponentCount(semantics); m_vertexAttributes[iattr].eraseRange(comp * absoluteIndex, comp, comp * m_pointCount); } } for (int ipath = pathCount; ipath >= pathIndex + 1; ipath--) { int iend = m_paths.read(ipath); m_paths.write(ipath, iend - 1); } m_pointCount--; m_reservedPointCount--; notifyModified(DirtyFlags.DirtyCoordinates); } public double calculatePathLength2D(int pathIndex) /* const */ { SegmentIteratorImpl segIter = querySegmentIteratorAtVertex(getPathStart(pathIndex)); MathUtils.KahanSummator len = new MathUtils.KahanSummator(0); while (segIter.hasNextSegment()) { len.add(segIter.nextSegment().calculateLength2D()); } return len.getResult(); } double calculateSubLength2D(int from_path_index, int from_point_index, int to_path_index, int to_point_index) { int absolute_from_index = getPathStart(from_path_index) + from_point_index; int absolute_to_index = getPathStart(to_path_index) + to_point_index; if (absolute_to_index < absolute_from_index || absolute_from_index < 0 || absolute_to_index > getPointCount() - 1) throw new IllegalArgumentException(); SegmentIteratorImpl seg_iter = querySegmentIterator(); double sub_length = 0.0; seg_iter.resetToVertex(absolute_from_index); do { while (seg_iter.hasNextSegment()) { Segment segment = seg_iter.nextSegment(); if (seg_iter.getStartPointIndex() == absolute_to_index) break; double segment_length = segment.calculateLength2D(); sub_length += segment_length; } if (seg_iter.getStartPointIndex() == absolute_to_index) break; } while (seg_iter.nextPath()); return sub_length; } double calculateSubLength2D(int path_index, int from_point_index, int to_point_index) { int absolute_from_index = getPathStart(path_index) + from_point_index; int absolute_to_index = getPathStart(path_index) + to_point_index; if (absolute_from_index < 0 || absolute_to_index > getPointCount() - 1) throw new IllegalArgumentException(); SegmentIteratorImpl seg_iter = querySegmentIterator(); if (absolute_from_index > absolute_to_index) { if (!isClosedPath(path_index)) throw new IllegalArgumentException( "cannot iterate across an open path"); seg_iter.setCirculator(true); } double prev_length = 0.0; double sub_length = 0.0; seg_iter.resetToVertex(absolute_from_index); do { assert (seg_iter.hasNextSegment()); sub_length += prev_length; Segment segment = seg_iter.nextSegment(); prev_length = segment.calculateLength2D(); } while (seg_iter.getStartPointIndex() != absolute_to_index); return sub_length; } Geometry getBoundary() { return Boundary.calculate(this, null); } // TODO: Add code fore interpolation type (none and angular) void interpolateAttributes(int from_path_index, int from_point_index, int to_path_index, int to_point_index) { for (int ipath = from_path_index; ipath < to_path_index - 1; ipath++) { if (isClosedPath(ipath)) throw new IllegalArgumentException( "cannot interpolate across closed paths"); } int nattr = m_description.getAttributeCount(); if (nattr == 1) return; // only has position double sub_length = calculateSubLength2D(from_path_index, from_point_index, to_path_index, to_point_index); if (sub_length == 0.0) return; for (int iattr = 1; iattr < nattr; iattr++) { int semantics = m_description.getSemantics(iattr); int interpolation = VertexDescription.getInterpolation(semantics); if (interpolation == VertexDescription.Interpolation.ANGULAR) continue; int components = VertexDescription.getComponentCount(semantics); for (int ordinate = 0; ordinate < components; ordinate++) interpolateAttributes_(semantics, from_path_index, from_point_index, to_path_index, to_point_index, sub_length, ordinate); } } // TODO: Add code for interpolation type (none and angular) void interpolateAttributesForSemantics(int semantics, int from_path_index, int from_point_index, int to_path_index, int to_point_index) { if (semantics == VertexDescription.Semantics.POSITION) return; if (!hasAttribute(semantics)) throw new IllegalArgumentException( "does not have the given attribute"); int interpolation = VertexDescription.getInterpolation(semantics); if (interpolation == VertexDescription.Interpolation.ANGULAR) throw new IllegalArgumentException( "not implemented for the given semantics"); for (int ipath = from_path_index; ipath < to_path_index - 1; ipath++) { if (isClosedPath(ipath)) throw new IllegalArgumentException( "cannot interpolate across closed paths"); } double sub_length = calculateSubLength2D(from_path_index, from_point_index, to_path_index, to_point_index); if (sub_length == 0.0) return; int components = VertexDescription.getComponentCount(semantics); for (int ordinate = 0; ordinate < components; ordinate++) interpolateAttributes_(semantics, from_path_index, from_point_index, to_path_index, to_point_index, sub_length, ordinate); } void interpolateAttributes(int path_index, int from_point_index, int to_point_index) { int nattr = m_description.getAttributeCount(); if (nattr == 1) return; // only has position double sub_length = calculateSubLength2D(path_index, from_point_index, to_point_index); if (sub_length == 0.0) return; for (int iattr = 1; iattr < nattr; iattr++) { int semantics = m_description.getSemantics(iattr); int interpolation = VertexDescription.getInterpolation(semantics); if (interpolation == VertexDescription.Interpolation.ANGULAR) continue; int components = VertexDescription.getComponentCount(semantics); for (int ordinate = 0; ordinate < components; ordinate++) interpolateAttributes_(semantics, path_index, from_point_index, to_point_index, sub_length, ordinate); } } void interpolateAttributesForSemantics(int semantics, int path_index, int from_point_index, int to_point_index) { if (semantics == VertexDescription.Semantics.POSITION) return; if (!hasAttribute(semantics)) throw new IllegalArgumentException( "does not have the given attribute"); int interpolation = VertexDescription.getInterpolation(semantics); if (interpolation == VertexDescription.Interpolation.ANGULAR) throw new IllegalArgumentException( "not implemented for the given semantics"); double sub_length = calculateSubLength2D(path_index, from_point_index, to_point_index); if (sub_length == 0.0) return; int components = VertexDescription.getComponentCount(semantics); for (int ordinate = 0; ordinate < components; ordinate++) interpolateAttributes_(semantics, path_index, from_point_index, to_point_index, sub_length, ordinate); } // TODO: Add code fore interpolation type (none and angular) void interpolateAttributes_(int semantics, int from_path_index, int from_point_index, int to_path_index, int to_point_index, double sub_length, int ordinate) { SegmentIteratorImpl seg_iter = querySegmentIterator(); int absolute_from_index = getPathStart(from_path_index) + from_point_index; int absolute_to_index = getPathStart(to_path_index) + to_point_index; double from_attribute = getAttributeAsDbl(semantics, absolute_from_index, ordinate); double to_attribute = getAttributeAsDbl(semantics, absolute_to_index, ordinate); double interpolated_attribute = from_attribute; double cumulative_length = 0.0; seg_iter.resetToVertex(absolute_from_index); do { if (seg_iter.hasNextSegment()) { seg_iter.nextSegment(); if (seg_iter.getStartPointIndex() == absolute_to_index) return; setAttribute(semantics, seg_iter.getStartPointIndex(), ordinate, interpolated_attribute); seg_iter.previousSegment(); do { Segment segment = seg_iter.nextSegment(); if (seg_iter.getEndPointIndex() == absolute_to_index) return; double segment_length = segment.calculateLength2D(); cumulative_length += segment_length; double t = cumulative_length / sub_length; interpolated_attribute = (1.0 - t) * from_attribute + t * to_attribute; if (!seg_iter.isClosingSegment()) setAttribute(semantics, seg_iter.getEndPointIndex(), ordinate, interpolated_attribute); } while (seg_iter.hasNextSegment()); } } while (seg_iter.nextPath()); } void interpolateAttributes_(int semantics, int path_index, int from_point_index, int to_point_index, double sub_length, int ordinate) { assert (m_bPolygon); SegmentIteratorImpl seg_iter = querySegmentIterator(); int absolute_from_index = getPathStart(path_index) + from_point_index; int absolute_to_index = getPathStart(path_index) + to_point_index; if (absolute_to_index == absolute_from_index) return; double from_attribute = getAttributeAsDbl(semantics, absolute_from_index, ordinate); double to_attribute = getAttributeAsDbl(semantics, absolute_to_index, ordinate); double cumulative_length = 0.0; seg_iter.resetToVertex(absolute_from_index); seg_iter.setCirculator(true); double prev_interpolated_attribute = from_attribute; do { Segment segment = seg_iter.nextSegment(); setAttribute(semantics, seg_iter.getStartPointIndex(), ordinate, prev_interpolated_attribute); double segment_length = segment.calculateLength2D(); cumulative_length += segment_length; double t = cumulative_length / sub_length; prev_interpolated_attribute = (1.0 - t) * from_attribute + t * to_attribute; } while (seg_iter.getEndPointIndex() != absolute_to_index); } @Override public void setEmpty() { m_curveParamwritePoint = 0; m_bPathStarted = false; m_paths = null; m_pathFlags = null; m_segmentParamIndex = null; m_segmentFlags = null; m_segmentParams = null; _setEmptyImpl(); } @Override public void applyTransformation(Transformation2D transform) { applyTransformation(transform, -1); } public void applyTransformation(Transformation2D transform, int pathIndex) { if (isEmpty()) return; if (transform.isIdentity()) return; _verifyAllStreams(); AttributeStreamOfDbl points = (AttributeStreamOfDbl) m_vertexAttributes[0]; Point2D ptStart = new Point2D(); Point2D ptControl = new Point2D(); boolean bHasNonLinear; int fistIdx; int lastIdx; if (pathIndex < 0) { bHasNonLinear = hasNonLinearSegments(); fistIdx = 0; lastIdx = m_pointCount; } else { bHasNonLinear = hasNonLinearSegments(pathIndex); fistIdx = getPathStart(pathIndex); lastIdx = getPathEnd(pathIndex); } for (int ipoint = fistIdx; ipoint < lastIdx; ipoint++) { ptStart.x = points.read(ipoint * 2); ptStart.y = points.read(ipoint * 2 + 1); if (bHasNonLinear) { int segIndex = m_segmentParamIndex.read(ipoint); if (segIndex >= 0) { int segmentType = (int) m_segmentFlags.read(ipoint); int type = segmentType & SegmentFlags.enumSegmentMask; switch (type) { case SegmentFlags.enumBezierSeg: { ptControl.x = m_segmentParams.read(segIndex); ptControl.y = m_segmentParams.read(segIndex + 1); transform.transform(ptControl, ptControl); m_segmentParams.write(segIndex, ptControl.x); m_segmentParams.write(segIndex + 1, ptControl.y); ptControl.x = m_segmentParams.read(segIndex + 3); ptControl.y = m_segmentParams.read(segIndex + 4); transform.transform(ptControl, ptControl); m_segmentParams.write(segIndex + 3, ptControl.x); m_segmentParams.write(segIndex + 4, ptControl.y); } break; case SegmentFlags.enumArcSeg: throw new GeometryException("internal error"); } } } transform.transform(ptStart, ptStart); points.write(ipoint * 2, ptStart.x); points.write(ipoint * 2 + 1, ptStart.y); } notifyModified(DirtyFlags.DirtyCoordinates); // REFACTOR: reset the exact envelope only and transform the loose // envelope } @Override public void applyTransformation(Transformation3D transform) { if (isEmpty()) return; addAttribute(VertexDescription.Semantics.Z); _verifyAllStreams(); AttributeStreamOfDbl points = (AttributeStreamOfDbl) m_vertexAttributes[0]; AttributeStreamOfDbl zs = (AttributeStreamOfDbl) m_vertexAttributes[1]; Point3D ptStart = new Point3D(); Point3D ptControl = new Point3D(); boolean bHasNonLinear = hasNonLinearSegments(); for (int ipoint = 0; ipoint < m_pointCount; ipoint++) { ptStart.x = points.read(ipoint * 2); ptStart.y = points.read(ipoint * 2 + 1); ptStart.z = zs.read(ipoint); if (bHasNonLinear) { int segIndex = m_segmentParamIndex.read(ipoint); if (segIndex >= 0) { int segmentType = (int) m_segmentFlags.read(ipoint); int type = segmentType & (int) SegmentFlags.enumSegmentMask; switch (type) { case SegmentFlags.enumBezierSeg: { ptControl.x = m_segmentParams.read(segIndex); ptControl.y = m_segmentParams.read(segIndex + 1); ptControl.z = m_segmentParams.read(segIndex + 2); ptControl = transform.transform(ptControl); m_segmentParams.write(segIndex, ptControl.x); m_segmentParams.write(segIndex + 1, ptControl.y); m_segmentParams.write(segIndex + 1, ptControl.z); ptControl.x = m_segmentParams.read(segIndex + 3); ptControl.y = m_segmentParams.read(segIndex + 4); ptControl.z = m_segmentParams.read(segIndex + 5); ptControl = transform.transform(ptControl); m_segmentParams.write(segIndex + 3, ptControl.x); m_segmentParams.write(segIndex + 4, ptControl.y); m_segmentParams.write(segIndex + 5, ptControl.z); } break; case SegmentFlags.enumArcSeg: throw new GeometryException("internal error"); } } } ptStart = transform.transform(ptStart); points.write(ipoint * 2, ptStart.x); points.write(ipoint * 2 + 1, ptStart.y); zs.write(ipoint, ptStart.z); } // REFACTOR: reset the exact envelope only and transform the loose // envelope notifyModified(DirtyFlags.DirtyCoordinates); } @Override protected void _verifyStreamsImpl() { if (m_paths == null) { m_paths = (AttributeStreamOfInt32) AttributeStreamBase .createIndexStream(1, 0); m_pathFlags = (AttributeStreamOfInt8) AttributeStreamBase .createByteStream(1, (byte) 0); } if (m_segmentFlags != null) { m_segmentFlags.resize(m_reservedPointCount, (byte) SegmentFlags.enumLineSeg); m_segmentParamIndex.resize(m_reservedPointCount, -1); } } @Override void _copyToImpl(MultiVertexGeometryImpl dst) { MultiPathImpl dstPoly = (MultiPathImpl) dst; dstPoly.m_bPathStarted = false; dstPoly.m_curveParamwritePoint = m_curveParamwritePoint; if (m_paths != null) dstPoly.m_paths = new AttributeStreamOfInt32(m_paths); else dstPoly.m_paths = null; if (m_pathFlags != null) dstPoly.m_pathFlags = new AttributeStreamOfInt8(m_pathFlags); else dstPoly.m_pathFlags = null; if (m_segmentParamIndex != null) dstPoly.m_segmentParamIndex = new AttributeStreamOfInt32( m_segmentParamIndex); else dstPoly.m_segmentParamIndex = null; if (m_segmentFlags != null) dstPoly.m_segmentFlags = new AttributeStreamOfInt8(m_segmentFlags); else dstPoly.m_segmentFlags = null; if (m_segmentParams != null) dstPoly.m_segmentParams = new AttributeStreamOfDbl(m_segmentParams); else dstPoly.m_segmentParams = null; dstPoly.m_cachedLength2D = m_cachedLength2D; dstPoly.m_cachedArea2D = m_cachedArea2D; if (!_hasDirtyFlag(DirtyFlags.DirtyRingAreas2D)) { dstPoly.m_cachedRingAreas2D = (AttributeStreamOfDbl) m_cachedRingAreas2D; } else dstPoly.m_cachedRingAreas2D = null; } @Override public double calculateLength2D() { if (!_hasDirtyFlag(DirtyFlags.DirtyLength2D)) { return m_cachedLength2D; } SegmentIteratorImpl segIter = querySegmentIterator(); MathUtils.KahanSummator len = new MathUtils.KahanSummator(0); while (segIter.nextPath()) { while (segIter.hasNextSegment()) { len.add(segIter.nextSegment().calculateLength2D()); } } m_cachedLength2D = len.getResult(); _setDirtyFlag(DirtyFlags.DirtyLength2D, false); return len.getResult(); } @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof MultiPathImpl)) return false; if (!super.equals(other)) return false; MultiPathImpl otherMultiPath = (MultiPathImpl) other; int pathCount = getPathCount(); int pathCountOther = otherMultiPath.getPathCount(); if (pathCount != pathCountOther) return false; if (m_paths != null && !m_paths.equals(otherMultiPath.m_paths, 0, pathCount + 1)) return false; if (m_pathFlags != null && !m_pathFlags .equals(otherMultiPath.m_pathFlags, 0, pathCount)) return false; return super.equals(other); } /** * Returns a SegmentIterator that set to a specific vertex of the * MultiPathImpl. The call to NextSegment will return the segment that * starts at the vertex. Call to PreviousSegment will return the segment * that starts at the previous vertex. */ public SegmentIteratorImpl querySegmentIteratorAtVertex(int startVertexIndex) { if (startVertexIndex < 0 || startVertexIndex >= getPointCount()) throw new IndexOutOfBoundsException(); SegmentIteratorImpl iter = new SegmentIteratorImpl(this, startVertexIndex); return iter; } // void QuerySegmentIterator(int fromVertex, SegmentIterator iterator); public SegmentIteratorImpl querySegmentIterator() { return new SegmentIteratorImpl(this); } @Override public void _updateXYImpl(boolean bExact) { super._updateXYImpl(bExact); boolean bHasCurves = hasNonLinearSegments(); if (bHasCurves) { SegmentIteratorImpl segIter = querySegmentIterator(); while (segIter.nextPath()) { while (segIter.hasNextSegment()) { Segment curve = segIter.nextCurve(); if (curve != null) { Envelope2D env2D = new Envelope2D(); curve.queryEnvelope2D(env2D); m_envelope.merge(env2D); } else break; } } } } @Override void calculateEnvelope2D(Envelope2D env, boolean bExact) { super.calculateEnvelope2D(env, bExact); boolean bHasCurves = hasNonLinearSegments(); if (bHasCurves) { SegmentIteratorImpl segIter = querySegmentIterator(); while (segIter.nextPath()) { while (segIter.hasNextSegment()) { Segment curve = segIter.nextCurve(); if (curve != null) { Envelope2D env2D = new Envelope2D(); curve.queryEnvelope2D(env2D); env.merge(env2D); } else break; } } } } @Override public void _notifyModifiedAllImpl() { if (m_paths == null || m_paths.size() == 0)// if (m_paths == null || // !m_paths.size()) m_pointCount = 0; else m_pointCount = m_paths.read(m_paths.size() - 1); } @Override public double calculateArea2D() { if (!m_bPolygon) return 0.0; _updateRingAreas2D(); return m_cachedArea2D; } /** * Returns True if the ring is an exterior ring. Valid only for simple * polygons. */ public boolean isExteriorRing(int ringIndex) { if (!m_bPolygon) return false; if (!_hasDirtyFlag(DirtyFlags.DirtyOGCFlags)) return (m_pathFlags.read(ringIndex) & (byte) PathFlags.enumOGCStartPolygon) != 0; _updateRingAreas2D(); return m_cachedRingAreas2D.read(ringIndex) > 0; // Should we make a function called _UpdateHasNonLinearSegmentsFlags and // call it here? } public double calculateRingArea2D(int pathIndex) { if (!m_bPolygon) return 0.0; _updateRingAreas2D(); return m_cachedRingAreas2D.read(pathIndex); } public void _updateRingAreas2D() { if (_hasDirtyFlag(DirtyFlags.DirtyRingAreas2D)) { int pathCount = getPathCount(); if (m_cachedRingAreas2D == null) m_cachedRingAreas2D = new AttributeStreamOfDbl(pathCount); else if (m_cachedRingAreas2D.size() != pathCount) m_cachedRingAreas2D.resize(pathCount); MathUtils.KahanSummator totalArea = new MathUtils.KahanSummator(0); MathUtils.KahanSummator pathArea = new MathUtils.KahanSummator(0); Point2D pt = new Point2D(); int ipath = 0; SegmentIteratorImpl segIter = querySegmentIterator(); while (segIter.nextPath()) { pathArea.reset(); getXY(getPathStart(segIter.getPathIndex()), pt);// get the area // calculation // origin to be // the origin of // the ring. while (segIter.hasNextSegment()) { pathArea.add(segIter.nextSegment()._calculateArea2DHelper( pt.x, pt.y)); } totalArea.add(pathArea.getResult()); int i = ipath++; m_cachedRingAreas2D.write(i, pathArea.getResult()); } m_cachedArea2D = totalArea.getResult(); _setDirtyFlag(DirtyFlags.DirtyRingAreas2D, false); } } int getOGCPolygonCount() { if (!m_bPolygon) return 0; _updateOGCFlags(); int polygonCount = 0; int partCount = getPathCount(); for (int ipart = 0; ipart < partCount; ipart++) { if (((int) m_pathFlags.read(ipart) & (int) PathFlags.enumOGCStartPolygon) != 0) polygonCount++; } return polygonCount; } protected void _updateOGCFlags() { if (_hasDirtyFlag(DirtyFlags.DirtyOGCFlags)) { _updateRingAreas2D(); int pathCount = getPathCount(); if (m_pathFlags == null || m_pathFlags.size() < pathCount) m_pathFlags = (AttributeStreamOfInt8) AttributeStreamBase .createByteStream(pathCount + 1); int firstSign = 1; for (int ipath = 0; ipath < pathCount; ipath++) { double area = m_cachedRingAreas2D.read(ipath); if (ipath == 0) firstSign = area > 0 ? 1 : -1; if (area * firstSign > 0.0) m_pathFlags.setBits(ipath, (byte) PathFlags.enumOGCStartPolygon); else m_pathFlags.clearBits(ipath, (byte) PathFlags.enumOGCStartPolygon); } _setDirtyFlag(DirtyFlags.DirtyOGCFlags, false); } } public int getPathIndexFromPointIndex(int pointIndex) { int positionHint = m_currentPathIndex;// in case of multithreading // thiswould simply produce an // invalid value int pathCount = getPathCount(); // Try using the hint position first to get the path index. if (positionHint >= 0 && positionHint < pathCount) { if (pointIndex < getPathEnd(positionHint)) { if (pointIndex >= getPathStart(positionHint)) return positionHint; positionHint--; } else { positionHint++; } if (positionHint >= 0 && positionHint < pathCount) { if (pointIndex >= getPathStart(positionHint) && pointIndex < getPathEnd(positionHint)) { m_currentPathIndex = positionHint; return positionHint; } } } if (pathCount < 5) {// TODO: time the performance to choose when to use // linear search. for (int i = 0; i < pathCount; i++) { if (pointIndex < getPathEnd(i)) { m_currentPathIndex = i; return i; } } throw new GeometryException("corrupted geometry"); } // Do binary search: int minPathIndex = 0; int maxPathIndex = pathCount - 1; while (maxPathIndex > minPathIndex) { int mid = minPathIndex + ((maxPathIndex - minPathIndex) >> 1); int pathStart = getPathStart(mid); if (pointIndex < pathStart) maxPathIndex = mid - 1; else { int pathEnd = getPathEnd(mid); if (pointIndex >= pathEnd) minPathIndex = mid + 1; else { m_currentPathIndex = mid; return mid; } } } m_currentPathIndex = minPathIndex; return minPathIndex; } int getHighestPointIndex(int path_index) { assert (path_index >= 0 && path_index < getPathCount()); AttributeStreamOfDbl position = (AttributeStreamOfDbl) (getAttributeStreamRef(VertexDescription.Semantics.POSITION)); AttributeStreamOfInt32 paths = (AttributeStreamOfInt32) (getPathStreamRef()); int path_end = getPathEnd(path_index); int path_start = getPathStart(path_index); int max_index = -1; Point2D max_point = new Point2D(), pt = new Point2D(); max_point.y = NumberUtils.negativeInf(); max_point.x = NumberUtils.negativeInf(); for (int i = path_start + 0; i < path_end; i++) { position.read(2 * i, pt); if (max_point.compare(pt) == -1) { max_index = i; max_point.setCoords(pt); } } return max_index; } /** * Returns total segment count in the MultiPathImpl. */ public int getSegmentCount() { int segCount = getPointCount(); if (!m_bPolygon) { segCount -= getPathCount(); for (int i = 0, n = getPathCount(); i < n; i++) if (isClosedPath(i)) segCount++; } return segCount; } public int getSegmentCount(int path_index) { int segCount = getPathSize(path_index); if (!isClosedPath(path_index)) segCount--; return segCount; } // HEADER defintions @Override public Geometry createInstance() { return new MultiPathImpl(m_bPolygon, getDescription()); } @Override public int getDimension() { return m_bPolygon ? 2 : 1; } @Override public Geometry.Type getType() { return m_bPolygon ? Type.Polygon : Type.Polyline; } /** * Returns True if the class is envelope. THis is not an exact method. Only * addEnvelope makes this true. */ public boolean isEnvelope() { return !_hasDirtyFlag(DirtyFlags.DirtyIsEnvelope); } /** * Returns a reference to the AttributeStream of MultiPathImpl parts * (Paths). * * For the non empty MultiPathImpl, that stream contains start points of the * MultiPathImpl curves. In addition, the last element is the total point * count. The number of vertices in a given part is parts[i + 1] - parts[i]. */ public AttributeStreamOfInt32 getPathStreamRef() { throwIfEmpty(); return m_paths; } /** * sets a reference to an AttributeStream of MultiPathImpl paths (Paths). */ public void setPathStreamRef(AttributeStreamOfInt32 paths) { m_paths = paths; notifyModified(DirtyFlags.DirtyAll); } /** * Returns a reference to the AttributeStream of Segment flags (SegmentFlags * flags). Can be NULL when no non-linear segments are present. * * Segment flags indicate what kind of segment originates (starts) on the * given point. The last vertices of open Path parts has enumNone flag. */ public AttributeStreamOfInt8 getSegmentFlagsStreamRef() { throwIfEmpty(); return m_segmentFlags; } /** * Returns a reference to the AttributeStream of Path flags (PathFlags * flags). * * Each start point of a path has a flag set to indicate if the Path is open * or closed. */ public AttributeStreamOfInt8 getPathFlagsStreamRef() { throwIfEmpty(); return m_pathFlags; } /** * sets a reference to an AttributeStream of Path flags (PathFlags flags). */ public void setPathFlagsStreamRef(AttributeStreamOfInt8 pathFlags) { m_pathFlags = pathFlags; notifyModified(DirtyFlags.DirtyAll); } public AttributeStreamOfInt32 getSegmentIndexStreamRef() { throwIfEmpty(); return m_segmentParamIndex; } public AttributeStreamOfDbl getSegmentDataStreamRef() { throwIfEmpty(); return m_segmentParams; } public int getPathCount() { return (m_paths != null) ? m_paths.size() - 1 : 0; } public int getPathEnd(int partIndex) { return m_paths.read(partIndex + 1); } public int getPathSize(int partIndex) { return m_paths.read(partIndex + 1) - m_paths.read(partIndex); } public int getPathStart(int partIndex) { return m_paths.read(partIndex); } @Override public Object _getImpl() { return this; } public void setDirtyOGCFlags(boolean bYesNo) { _setDirtyFlag(DirtyFlags.DirtyOGCFlags, bYesNo); } public boolean hasDirtyOGCStartFlags() { return _hasDirtyFlag(DirtyFlags.DirtyOGCFlags); } public void setDirtyRingAreas2D(boolean bYesNo) { _setDirtyFlag(DirtyFlags.DirtyRingAreas2D, bYesNo); } public boolean hasDirtyRingAreas2D() { return _hasDirtyFlag(DirtyFlags.DirtyRingAreas2D); } public void setRingAreasStreamRef(AttributeStreamOfDbl ringAreas) { m_cachedRingAreas2D = ringAreas; _setDirtyFlag(DirtyFlags.DirtyRingAreas2D, false); } // HEADER defintions // // TODO check this against current implementation in native // public void notifyModified(int flags) // { // if(flags == DirtyFlags.DirtyAll) // { // m_reservedPointCount = -1; // _notifyModifiedAllImpl(); // } // m_flagsMask |= flags; // _clearAccelerators(); // // // // ROHIT's implementation // // if (m_paths == null || 0 == m_paths.size()) // // m_pointCount = 0; // // else // // m_pointCount = m_paths.read(m_paths.size() - 1); // // // // super.notifyModified(flags); // } @Override public boolean _buildRasterizedGeometryAccelerator(double toleranceXY, GeometryAccelerationDegree accelDegree) { if (m_accelerators == null)// (!m_accelerators) { m_accelerators = new GeometryAccelerators(); } int rasterSize = RasterizedGeometry2D .rasterSizeFromAccelerationDegree(accelDegree); RasterizedGeometry2D rgeom = m_accelerators.getRasterizedGeometry(); if (rgeom != null) { if (rgeom.getToleranceXY() < toleranceXY || rasterSize > rgeom.getRasterSize()) { m_accelerators._setRasterizedGeometry(null); } else return true; } rgeom = RasterizedGeometry2D.create(this, toleranceXY, rasterSize); m_accelerators._setRasterizedGeometry(rgeom); return true; } @Override public int hashCode() { int hashCode = super.hashCode(); if (!isEmptyImpl()) { int pathCount = getPathCount(); if (m_paths != null) m_paths.calculateHashImpl(hashCode, 0, pathCount + 1); if (m_pathFlags != null) m_pathFlags.calculateHashImpl(hashCode, 0, pathCount); } return hashCode; } public byte getSegmentFlags(int ivertex) { if (m_segmentFlags != null) return m_segmentFlags.read(ivertex); else return (byte) SegmentFlags.enumLineSeg; } public void getSegment(int startVertexIndex, SegmentBuffer segBuffer, boolean bStripAttributes) { int ipath = getPathIndexFromPointIndex(startVertexIndex); if (startVertexIndex == getPathEnd(ipath) - 1 && !isClosedPath(ipath)) throw new GeometryException("index out of bounds"); _verifyAllStreams(); AttributeStreamOfInt8 segFlagStream = getSegmentFlagsStreamRef(); int segFlag = SegmentFlags.enumLineSeg; if (segFlagStream != null) segFlag = segFlagStream.read(startVertexIndex) & SegmentFlags.enumSegmentMask; switch (segFlag) { case SegmentFlags.enumLineSeg: segBuffer.createLine(); break; case SegmentFlags.enumBezierSeg: throw new GeometryException("internal error"); case SegmentFlags.enumArcSeg: throw new GeometryException("internal error"); default: throw new GeometryException("internal error"); } Segment currentSegment = segBuffer.get(); if (!bStripAttributes) currentSegment.assignVertexDescription(m_description); else currentSegment .assignVertexDescription(VertexDescriptionDesignerImpl .getDefaultDescriptor2D()); int endVertexIndex; if (startVertexIndex == getPathEnd(ipath) - 1 && isClosedPath(ipath)) { endVertexIndex = getPathStart(ipath); } else endVertexIndex = startVertexIndex + 1; Point2D pt = new Point2D(); getXY(startVertexIndex, pt); currentSegment.setStartXY(pt); getXY(endVertexIndex, pt); currentSegment.setEndXY(pt); if (!bStripAttributes) { for (int i = 1, nattr = m_description.getAttributeCount(); i < nattr; i++) { int semantics = m_description._getSemanticsImpl(i); int ncomp = VertexDescription.getComponentCount(semantics); for (int ord = 0; ord < ncomp; ord++) { double vs = getAttributeAsDbl(semantics, startVertexIndex, ord); currentSegment.setStartAttribute(semantics, ord, vs); double ve = getAttributeAsDbl(semantics, endVertexIndex, ord); currentSegment.setEndAttribute(semantics, ord, ve); } } } } void queryPathEnvelope2D(int path_index, Envelope2D envelope) { if (path_index >= getPathCount()) throw new IllegalArgumentException(); if (isEmpty()) { envelope.setEmpty(); return; } if (hasNonLinearSegments(path_index)) { throw new GeometryException("not implemented"); } else { AttributeStreamOfDbl stream = (AttributeStreamOfDbl) getAttributeStreamRef(VertexDescription.Semantics.POSITION); Point2D pt = new Point2D(); Envelope2D env = new Envelope2D(); env.setEmpty(); for (int i = getPathStart(path_index), iend = getPathEnd(path_index); i < iend; i++) { stream.read(2 * i, pt); env.merge(pt); } envelope.setCoords(env); } } @Override public boolean _buildQuadTreeAccelerator(GeometryAccelerationDegree d) { if (m_accelerators == null)// (!m_accelerators) { m_accelerators = new GeometryAccelerators(); } if (d == GeometryAccelerationDegree.enumMild || getPointCount() < 16) return false; QuadTreeImpl quad_tree_impl = InternalUtils.buildQuadTree(this); m_accelerators._setQuadTree(quad_tree_impl); return true; } }
apache-2.0
saego/RepositBasic
Chess/src/main/java/King.java
882
import static java.lang.Math.abs; /** Created by ${Ruslan} on 22.12.16. */ public class King extends Figure{ King(String colour, Cell cell) { super(colour, cell); } //way of figure @Override public Cell[] way(Cell newPosition) throws ImpossibleToMoveException { boolean invalid = true; if ((abs(this.position.getPositionH() - newPosition.getPositionH()) <= 1) && (abs(this.position.getPositionV() - newPosition.getPositionV()) <= 1)){ invalid = false; } if (invalid){ throw new ImpossibleToMoveException("King can't move this way !!!"); } else { int pointQuantity = 1; Cell []wayPoints = new Cell[pointQuantity]; wayPoints[0] = new Cell(newPosition.getPositionH(), newPosition.getPositionV()); return wayPoints; } } }
apache-2.0
Ccook/Stonewall
src/test/java/edu/american/student/stonewall/display/css/property/BorderPropertyTest.java
603
package edu.american.student.stonewall.display.css.property; import static org.junit.Assert.assertTrue; import org.junit.Test; import edu.american.student.stonewall.display.css.util.CSSColor; public class BorderPropertyTest { @Test public void test() { BorderProperty prop = new BorderProperty(); prop .setBorderBottomColor(CSSColor.AliceBlue) .setBorderCollapsed() .setBorderLeftColor(CSSColor.Green); String[] expected = {"border-left-color","border-collapse","border-bottom-color"}; for(String e: expected) { assertTrue(prop.makeProperties().contains(e)); } } }
apache-2.0
glorycloud/GloryMail
CloudyMail/lib_src/org/apache/commons/io/input/TailerListener.java
2175
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io.input; /** * Listener for events from a {@link Tailer}. * * @version $Id: TailerListener.java 1002921 2010-09-30 01:33:38Z sebb $ * @since Commons IO 2.0 */ public interface TailerListener { /** * The tailer will call this method during construction, * giving the listener a method of stopping the tailer. * @param tailer the tailer. */ public void init(Tailer tailer); /** * This method is called if the tailed file is not found. * <p> * <b>Note:</b> this is called from the tailer thread. */ public void fileNotFound(); /** * Called if a file rotation is detected. * * This method is called before the file is reopened, and fileNotFound may * be called if the new file has not yet been created. * <p> * <b>Note:</b> this is called from the tailer thread. */ public void fileRotated(); /** * Handles a line from a Tailer. * <p> * <b>Note:</b> this is called from the tailer thread. * @param line the line. */ public void handle(String line); /** * Handles an Exception . * <p> * <b>Note:</b> this is called from the tailer thread. * @param ex the exception. */ public void handle(Exception ex); }
apache-2.0
consulo/consulo
modules/base/lang-injecting-impl/src/main/java/org/intellij/plugins/intelliLang/inject/InjectLanguageAction.java
9621
/* * Copyright 2006 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.intelliLang.inject; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.QuestionAction; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.ide.util.PropertiesComponent; import com.intellij.injected.editor.EditorWindow; import com.intellij.lang.Language; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.PopupChooserBuilder; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.PsiModificationTrackerImpl; import com.intellij.psi.injection.Injectable; import com.intellij.psi.injection.ReferenceInjector; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.ui.ColoredListCellRenderer; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.components.JBList; import com.intellij.util.FileContentUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import consulo.psi.injection.LanguageInjectionSupport; import consulo.util.dataholder.Key; import org.intellij.plugins.intelliLang.Configuration; import org.intellij.plugins.intelliLang.references.InjectedReferencesContributor; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class InjectLanguageAction implements IntentionAction { @NonNls private static final String INJECT_LANGUAGE_FAMILY = "Inject Language/Reference"; public static final String LAST_INJECTED_LANGUAGE = "LAST_INJECTED_LANGUAGE"; public static final Key<Processor<PsiLanguageInjectionHost>> FIX_KEY = Key.create("inject fix key"); public static List<Injectable> getAllInjectables() { Language[] languages = InjectedLanguage.getAvailableLanguages(); List<Injectable> list = new ArrayList<Injectable>(); for (Language language : languages) { list.add(Injectable.fromLanguage(language)); } list.addAll(ReferenceInjector.EXTENSION_POINT_NAME.getExtensionList()); Collections.sort(list); return list; } @Nonnull public String getText() { return INJECT_LANGUAGE_FAMILY; } @Nonnull public String getFamilyName() { return INJECT_LANGUAGE_FAMILY; } public boolean isAvailable(@Nonnull Project project, Editor editor, PsiFile file) { final PsiLanguageInjectionHost host = findInjectionHost(editor, file); if (host == null) return false; final List<Pair<PsiElement, TextRange>> injectedPsi = InjectedLanguageManager.getInstance(project).getInjectedPsiFiles(host); if (injectedPsi == null || injectedPsi.isEmpty()) { return !InjectedReferencesContributor.isInjected(file.findReferenceAt(editor.getCaretModel().getOffset())); } return true; } @Nullable protected static PsiLanguageInjectionHost findInjectionHost(Editor editor, PsiFile file) { if (editor instanceof EditorWindow) return null; final int offset = editor.getCaretModel().getOffset(); final PsiLanguageInjectionHost host = PsiTreeUtil.getParentOfType(file.findElementAt(offset), PsiLanguageInjectionHost.class, false); if (host == null) return null; return host.isValidHost()? host : null; } public void invoke(@Nonnull final Project project, final Editor editor, final PsiFile file) throws IncorrectOperationException { doChooseLanguageToInject(editor, new Processor<Injectable>() { public boolean process(final Injectable injectable) { ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { if (!project.isDisposed()) { invokeImpl(project, editor, file, injectable); } } }); return false; } }); } public static void invokeImpl(Project project, Editor editor, final PsiFile file, Injectable injectable) { final PsiLanguageInjectionHost host = findInjectionHost(editor, file); if (host == null) return; if (defaultFunctionalityWorked(host, injectable.getId())) return; try { host.putUserData(FIX_KEY, null); Language language = injectable.toLanguage(); for (LanguageInjectionSupport support : InjectorUtils.getActiveInjectionSupports()) { if (support.isApplicableTo(host) && support.addInjectionInPlace(language, host)) { return; } } if (TemporaryPlacesRegistry.getInstance(project).getLanguageInjectionSupport().addInjectionInPlace(language, host)) { final Processor<PsiLanguageInjectionHost> data = host.getUserData(FIX_KEY); String text = StringUtil.escapeXml(language.getDisplayName()) + " was temporarily injected."; if (data != null) { if (!ApplicationManager.getApplication().isUnitTestMode()) { final SmartPsiElementPointer<PsiLanguageInjectionHost> pointer = SmartPointerManager.getInstance(project).createSmartPsiElementPointer(host); final TextRange range = host.getTextRange(); HintManager.getInstance().showQuestionHint(editor, text + "<br>Do you want to insert annotation? " + KeymapUtil .getFirstKeyboardShortcutText(ActionManager.getInstance().getAction(IdeActions.ACTION_SHOW_INTENTION_ACTIONS)), range.getStartOffset(), range.getEndOffset(), new QuestionAction() { @Override public boolean execute() { return data.process(pointer.getElement()); } }); } } else { HintManager.getInstance().showInformationHint(editor, text); } } } finally { if (injectable.getLanguage() != null) { // no need for reference injection FileContentUtil.reparseFiles(project, Collections.<VirtualFile>emptyList(), true); } else { ((PsiModificationTrackerImpl)PsiManager.getInstance(project).getModificationTracker()).incCounter(); DaemonCodeAnalyzer.getInstance(project).restart(); } } } private static boolean defaultFunctionalityWorked(final PsiLanguageInjectionHost host, String id) { return Configuration.getProjectInstance(host.getProject()).setHostInjectionEnabled(host, Collections.singleton(id), true); } private static boolean doChooseLanguageToInject(Editor editor, final Processor<Injectable> onChosen) { final List<Injectable> injectables = getAllInjectables(); final JList<Injectable> list = new JBList<>(injectables); list.setCellRenderer(new ColoredListCellRenderer<Injectable > () { @Override protected void customizeCellRenderer(@Nonnull JList<? extends Injectable> list, Injectable value, int index, boolean selected, boolean hasFocus) { setIcon(value.getIcon()); append(value.getDisplayName()); String description = value.getAdditionalDescription(); if (description != null) { append(description, SimpleTextAttributes.GRAYED_ATTRIBUTES); } } }); JBPopup popup = new PopupChooserBuilder(list).setItemChoosenCallback(new Runnable() { public void run() { Injectable value = (Injectable)list.getSelectedValue(); if (value != null) { onChosen.process(value); PropertiesComponent.getInstance().setValue(LAST_INJECTED_LANGUAGE, value.getId()); } } }).setFilteringEnabled(language -> ((Injectable)language).getDisplayName()).createPopup(); final String lastInjected = PropertiesComponent.getInstance().getValue(LAST_INJECTED_LANGUAGE); if (lastInjected != null) { Injectable injectable = ContainerUtil.find(injectables, new Condition<Injectable>() { @Override public boolean value(Injectable injectable) { return lastInjected.equals(injectable.getId()); } }); list.setSelectedValue(injectable, true); } popup.showInBestPositionFor(editor); return true; } public boolean startInWriteAction() { return false; } public static boolean doEditConfigurable(final Project project, final Configurable configurable) { return true; //ShowSettingsUtil.getInstance().editConfigurable(project, configurable); } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/OriginCustomHeader.java
7758
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * A complex type that contains <code>HeaderName</code> and <code>HeaderValue</code> elements, if any, for this * distribution. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2017-03-25/OriginCustomHeader" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class OriginCustomHeader implements Serializable, Cloneable { /** * <p> * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html">Forwarding * Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront Developer * Guide</i>. * </p> */ private String headerName; /** * <p> * The value for the header that you specified in the <code>HeaderName</code> field. * </p> */ private String headerValue; /** * <p> * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html">Forwarding * Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront Developer * Guide</i>. * </p> * * @param headerName * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html"> * Forwarding Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront * Developer Guide</i>. */ public void setHeaderName(String headerName) { this.headerName = headerName; } /** * <p> * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html">Forwarding * Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront Developer * Guide</i>. * </p> * * @return The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html"> * Forwarding Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront * Developer Guide</i>. */ public String getHeaderName() { return this.headerName; } /** * <p> * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html">Forwarding * Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront Developer * Guide</i>. * </p> * * @param headerName * The name of a header that you want CloudFront to forward to your origin. For more information, see <a * href="http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/forward-custom-headers.html"> * Forwarding Custom Headers to Your Origin (Web Distributions Only)</a> in the <i>Amazon Amazon CloudFront * Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public OriginCustomHeader withHeaderName(String headerName) { setHeaderName(headerName); return this; } /** * <p> * The value for the header that you specified in the <code>HeaderName</code> field. * </p> * * @param headerValue * The value for the header that you specified in the <code>HeaderName</code> field. */ public void setHeaderValue(String headerValue) { this.headerValue = headerValue; } /** * <p> * The value for the header that you specified in the <code>HeaderName</code> field. * </p> * * @return The value for the header that you specified in the <code>HeaderName</code> field. */ public String getHeaderValue() { return this.headerValue; } /** * <p> * The value for the header that you specified in the <code>HeaderName</code> field. * </p> * * @param headerValue * The value for the header that you specified in the <code>HeaderName</code> field. * @return Returns a reference to this object so that method calls can be chained together. */ public OriginCustomHeader withHeaderValue(String headerValue) { setHeaderValue(headerValue); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHeaderName() != null) sb.append("HeaderName: ").append(getHeaderName()).append(","); if (getHeaderValue() != null) sb.append("HeaderValue: ").append(getHeaderValue()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof OriginCustomHeader == false) return false; OriginCustomHeader other = (OriginCustomHeader) obj; if (other.getHeaderName() == null ^ this.getHeaderName() == null) return false; if (other.getHeaderName() != null && other.getHeaderName().equals(this.getHeaderName()) == false) return false; if (other.getHeaderValue() == null ^ this.getHeaderValue() == null) return false; if (other.getHeaderValue() != null && other.getHeaderValue().equals(this.getHeaderValue()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHeaderName() == null) ? 0 : getHeaderName().hashCode()); hashCode = prime * hashCode + ((getHeaderValue() == null) ? 0 : getHeaderValue().hashCode()); return hashCode; } @Override public OriginCustomHeader clone() { try { return (OriginCustomHeader) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
cpxiao/AndroidUtils
library/src/main/java/com/cpxiao/androidutils/library/utils/LogUtils.java
4989
package com.cpxiao.androidutils.library.utils; import android.util.Log; import com.cpxiao.androidutils.library.constant.Config; import java.util.Hashtable; /** * LogUtils * * @author cpxiao on 2016/6/12 */ public class LogUtils { private static final boolean DEBUG = Config.DEBUG; private static final String TAG = "CPXIAO"; /** * 设置log等级 */ private static final int LOG_LEVEL = Log.DEBUG; private static Hashtable<String, LogUtils> sLoggerTable = new Hashtable<>(); private String mClassName; private static LogUtils cLog; private static final String CPXIAO = "@cpxiao@ "; /** * cannot be instantiated */ private LogUtils() { throw new UnsupportedOperationException("cannot be instantiated"); } private LogUtils(String name) { mClassName = name; } /** * @param className String * @return LogUtils */ private static LogUtils getLogger(String className) { LogUtils classLogger = sLoggerTable.get(className); if (classLogger == null) { classLogger = new LogUtils(className); sLoggerTable.put(className, classLogger); } return classLogger; } /** * Purpose:Mark user * * @return LogUtils */ public static LogUtils cLog() { if (cLog == null) { cLog = new LogUtils(CPXIAO); } return cLog; } /** * Get The Current Function Name * * @return String */ private String getFunctionName() { StackTraceElement[] sts = Thread.currentThread().getStackTrace(); if (sts == null) { return null; } for (StackTraceElement st : sts) { if (st.isNativeMethod()) { continue; } if (st.getClassName().equals(Thread.class.getName())) { continue; } if (st.getClassName().equals(this.getClass().getName())) { continue; } return mClassName + "[ " + Thread.currentThread().getName() + ": " + st.getFileName() + ":" + st.getLineNumber() + " " + st.getMethodName() + " ]"; } return null; } /** * The Log * Level:i * * @param str Object */ public void i(Object str) { if (DEBUG) { if (LOG_LEVEL <= Log.INFO) { String name = getFunctionName(); String msg = getMsg(name, str); Log.i(TAG, msg); } } } /** * The Log * Level:d * * @param str Object */ public void d(Object str) { if (DEBUG) { if (LOG_LEVEL <= Log.DEBUG) { String name = getFunctionName(); String msg = getMsg(name, str); Log.d(TAG, msg); } } } /** * The Log * Level:V * * @param str Object */ public void v(Object str) { if (DEBUG) { if (LOG_LEVEL <= Log.VERBOSE) { String name = getFunctionName(); String msg = getMsg(name, str); Log.v(TAG, msg); } } } /** * The Log * Level:w * * @param str Object */ public void w(Object str) { if (DEBUG) { if (LOG_LEVEL <= Log.WARN) { String name = getFunctionName(); String msg = getMsg(name, str); Log.w(TAG, msg); } } } /** * The Log * Level:e * * @param str Object */ public void e(Object str) { if (DEBUG) { if (LOG_LEVEL <= Log.ERROR) { String name = getFunctionName(); String msg = getMsg(name, str); Log.e(TAG, msg); } } } /** * The Log * Level:e * * @param e Exception */ public void e(Exception e) { if (DEBUG) { if (LOG_LEVEL <= Log.ERROR) { Log.e(TAG, "error", e); } } } /** * The Log * Level:e * * @param log log * @param tr Throwable */ public void e(String log, Throwable tr) { if (DEBUG) { String line = getFunctionName(); Log.e(TAG, "{Thread:" + Thread.currentThread().getName() + "}" + "[" + mClassName + line + ":] " + log + "\n", tr); } } /** * getMsg * * @param name name * @param str str * @return String */ private String getMsg(String name, Object str) { if (str == null) { return name; } String msg; if (name != null) { msg = name + " - " + str.toString(); } else { msg = str.toString(); } return msg; } }
apache-2.0
SAP/hana-native-adapters
WebserviceAdapter/src/sap/ciep/sdi/webserviceadapter/WebserviceRequestHandler.java
2374
package sap.ciep.sdi.webserviceadapter; import java.io.IOException; import java.net.URISyntaxException; import java.util.HashMap; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.impl.DefaultConnectionReuseStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import com.sap.hana.dp.adapter.sdk.AdapterException; import com.sap.hana.dp.adapter.sdk.Metadata; @SuppressWarnings("restriction") public abstract class WebserviceRequestHandler { protected static Logger logger = LogManager.getLogger("WebserviceRequestHandler"); public final static int MAX_CONNECTIONS_PER_ROUTE=5; public final static int SOCKET_TIMEOUT=60000; public final static int CONNECT_TIMEOUT=3000; public final static int CONN_REQUEST_TIMEOUT=3000; /** * This function performs the request and returns the response * @param func metadata, as sent by the adapter * @param connProps * @return * @throws AdapterException * @throws ClientProtocolException * @throws IOException */ public abstract CloseableHttpResponse exec(Metadata func, HashMap<String, String> connProps) throws AdapterException, ClientProtocolException, IOException, URISyntaxException; private static CloseableHttpClient cli; /** * Returns the default HTTP client with * @return */ public static CloseableHttpClient getDefaultHttpClient(){ if (cli==null) cli=HttpClientBuilder.create() .useSystemProperties() .setConnectionReuseStrategy(new DefaultConnectionReuseStrategy()) .setMaxConnPerRoute(MAX_CONNECTIONS_PER_ROUTE) .setDefaultRequestConfig(getDefaultRequestConfig()) .build(); return cli; } /** * Returns a url as a String * @param func * @return * @throws AdapterException */ public abstract String getURL(Metadata func) throws AdapterException; private static RequestConfig getDefaultRequestConfig(){ return RequestConfig.custom() .setConnectionRequestTimeout(CONN_REQUEST_TIMEOUT) .setConnectTimeout(CONNECT_TIMEOUT) .setSocketTimeout(SOCKET_TIMEOUT) .build(); } }
apache-2.0
ledotcom/le-dubbo
dubbo-config/dubbo-config-api/src/main/java/com/alibaba/dubbo/config/AbstractConfig.java
23048
/* * Copyright 1999-2011 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.config; import java.io.Serializable; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.alibaba.dubbo.common.Constants; import com.alibaba.dubbo.common.URL; import com.alibaba.dubbo.common.extension.ExtensionLoader; import com.alibaba.dubbo.common.logger.Logger; import com.alibaba.dubbo.common.logger.LoggerFactory; import com.alibaba.dubbo.common.utils.CollectionUtils; import com.alibaba.dubbo.common.utils.ConfigUtils; import com.alibaba.dubbo.common.utils.ReflectUtils; import com.alibaba.dubbo.common.utils.StringUtils; import com.alibaba.dubbo.config.support.Parameter; /** * 配置解析的工具方法、公共方法 * * @author william.liangf * @export */ public abstract class AbstractConfig implements Serializable { private static final long serialVersionUID = 4267533505537413570L; protected static final Logger logger = LoggerFactory.getLogger(AbstractConfig.class); private static final int MAX_LENGTH = 100; private static final int MAX_PATH_LENGTH = 200; private static final Pattern PATTERN_NAME = Pattern.compile("[\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_MULTI_NAME = Pattern.compile("[,\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_METHOD_NAME = Pattern.compile("[a-zA-Z][0-9a-zA-Z]*"); private static final Pattern PATTERN_PATH = Pattern.compile("[/\\-$._0-9a-zA-Z]+"); private static final Pattern PATTERN_NAME_HAS_SYMBOL = Pattern.compile("[@:*,/\\-._0-9a-zA-Z]+"); private static final Pattern PATTERN_KEY = Pattern.compile("[*,\\-._0-9a-zA-Z]+"); protected String id; @Parameter(excluded = true) public String getId() { return id; } public void setId(String id) { this.id = id; } private static final Map<String, String> legacyProperties = new HashMap<String, String>(); static { legacyProperties.put("dubbo.protocol.name", "dubbo.service.protocol"); legacyProperties.put("dubbo.protocol.host", "dubbo.service.server.host"); legacyProperties.put("dubbo.protocol.port", "dubbo.service.server.port"); legacyProperties.put("dubbo.protocol.threads", "dubbo.service.max.thread.pool.size"); legacyProperties.put("dubbo.consumer.timeout", "dubbo.service.invoke.timeout"); legacyProperties.put("dubbo.consumer.retries", "dubbo.service.max.retry.providers"); legacyProperties.put("dubbo.consumer.check", "dubbo.service.allow.no.provider"); legacyProperties.put("dubbo.service.url", "dubbo.service.address"); } private static String convertLegacyValue(String key, String value) { if (value != null && value.length() > 0) { if ("dubbo.service.max.retry.providers".equals(key)) { return String.valueOf(Integer.parseInt(value) - 1); } else if ("dubbo.service.allow.no.provider".equals(key)) { return String.valueOf(! Boolean.parseBoolean(value)); } } return value; } protected void appendAnnotation(Class<?> annotationClass, Object annotation) { Method[] methods = annotationClass.getMethods(); for (Method method : methods) { if (method.getDeclaringClass() != Object.class && method.getReturnType() != void.class && method.getParameterTypes().length == 0 && Modifier.isPublic(method.getModifiers()) && ! Modifier.isStatic(method.getModifiers())) { try { String property = method.getName(); if ("interfaceClass".equals(property) || "interfaceName".equals(property)) { property = "interface"; } String setter = "set" + property.substring(0, 1).toUpperCase() + property.substring(1); Object value = method.invoke(annotation, new Object[0]); if (value != null && ! value.equals(method.getDefaultValue())) { Class<?> parameterType = ReflectUtils.getBoxedClass(method.getReturnType()); if ("filter".equals(property) || "listener".equals(property)) { parameterType = String.class; value = StringUtils.join((String[]) value, ","); } else if ("parameters".equals(property)) { parameterType = Map.class; value = CollectionUtils.toStringMap((String[]) value); } try { Method setterMethod = getClass().getMethod(setter, new Class<?>[] { parameterType }); setterMethod.invoke(this, new Object[] { value }); } catch (NoSuchMethodException e) { // ignore } } } catch (Throwable e) { logger.error(e.getMessage(), e); } } } } protected static void appendProperties(AbstractConfig config) { if (config == null) { return; } String prefix = "dubbo." + getTagName(config.getClass()) + "."; Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if (name.length() > 3 && name.startsWith("set") && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 1 && isPrimitive(method.getParameterTypes()[0])) { String property = StringUtils.camelToSplitName(name.substring(3, 4).toLowerCase() + name.substring(4), "-"); String value = null; if (config.getId() != null && config.getId().length() > 0) { String pn = prefix + config.getId() + "." + property; value = System.getProperty(pn); if(! StringUtils.isBlank(value)) { logger.info("Use System Property " + pn + " to config dubbo"); } } if (value == null || value.length() == 0) { String pn = prefix + property; value = System.getProperty(pn); if(! StringUtils.isBlank(value)) { logger.info("Use System Property " + pn + " to config dubbo"); } } if (value == null || value.length() == 0) { Method getter; try { getter = config.getClass().getMethod("get" + name.substring(3), new Class<?>[0]); } catch (NoSuchMethodException e) { try { getter = config.getClass().getMethod("is" + name.substring(3), new Class<?>[0]); } catch (NoSuchMethodException e2) { getter = null; } } if (getter != null) { if (getter.invoke(config, new Object[0]) == null) { if (config.getId() != null && config.getId().length() > 0) { value = ConfigUtils.getProperty(prefix + config.getId() + "." + property); } if (value == null || value.length() == 0) { value = ConfigUtils.getProperty(prefix + property); } if (value == null || value.length() == 0) { String legacyKey = legacyProperties.get(prefix + property); if (legacyKey != null && legacyKey.length() > 0) { value = convertLegacyValue(legacyKey, ConfigUtils.getProperty(legacyKey)); } } } } } if (value != null && value.length() > 0) { method.invoke(config, new Object[] {convertPrimitive(method.getParameterTypes()[0], value)}); } } } catch (Exception e) { logger.error(e.getMessage(), e); } } } private static String getTagName(Class<?> cls) { String tag = cls.getSimpleName(); for (String suffix : SUFFIXS) { if (tag.endsWith(suffix)) { tag = tag.substring(0, tag.length() - suffix.length()); break; } } tag = tag.toLowerCase(); return tag; } protected static void appendParameters(Map<String, String> parameters, Object config) { appendParameters(parameters, config, null); } @SuppressWarnings("unchecked") protected static void appendParameters(Map<String, String> parameters, Object config, String prefix) { if (config == null) { return; } Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { Parameter parameter = method.getAnnotation(Parameter.class); if (method.getReturnType() == Object.class || parameter != null && parameter.excluded()) { continue; } int i = name.startsWith("get") ? 3 : 2; String prop = StringUtils.camelToSplitName(name.substring(i, i + 1).toLowerCase() + name.substring(i + 1), "."); String key; if (parameter != null && parameter.key() != null && parameter.key().length() > 0) { key = parameter.key(); } else { key = prop; } Object value = method.invoke(config, new Object[0]); String str = String.valueOf(value).trim(); if (value != null && str.length() > 0) { if (parameter != null && parameter.escaped()) { str = URL.encode(str); } if (parameter != null && parameter.append()) { String pre = (String)parameters.get(Constants.DEFAULT_KEY + "." + key); if (pre != null && pre.length() > 0) { str = pre + "," + str; } pre = (String)parameters.get(key); if (pre != null && pre.length() > 0) { str = pre + "," + str; } } if (prefix != null && prefix.length() > 0) { key = prefix + "." + key; } parameters.put(key, str); } else if (parameter != null && parameter.required()) { throw new IllegalStateException(config.getClass().getSimpleName() + "." + key + " == null"); } } else if ("getParameters".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && method.getReturnType() == Map.class) { Map<String, String> map = (Map<String, String>) method.invoke(config, new Object[0]); if (map != null && map.size() > 0) { String pre = (prefix != null && prefix.length() > 0 ? prefix + "." : ""); for (Map.Entry<String, String> entry : map.entrySet()) { parameters.put(pre + entry.getKey().replace('-', '.'), entry.getValue()); } } } } catch (Exception e) { throw new IllegalStateException(e.getMessage(), e); } } } protected static void appendAttributes(Map<Object, Object> parameters, Object config) { appendAttributes(parameters, config, null); } protected static void appendAttributes(Map<Object, Object> parameters, Object config, String prefix) { if (config == null) { return; } Method[] methods = config.getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { Parameter parameter = method.getAnnotation(Parameter.class); if (parameter == null || !parameter.attribute()) continue; String key; if (parameter != null && parameter.key() != null && parameter.key().length() > 0) { key = parameter.key(); } else { int i = name.startsWith("get") ? 3 : 2; key = name.substring(i, i + 1).toLowerCase() + name.substring(i + 1); } Object value = method.invoke(config, new Object[0]); if (value != null) { if (prefix != null && prefix.length() > 0) { key = prefix + "." + key; } parameters.put(key, value); } } } catch (Exception e) { throw new IllegalStateException(e.getMessage(), e); } } } private static boolean isPrimitive(Class<?> type) { return type.isPrimitive() || type == String.class || type == Character.class || type == Boolean.class || type == Byte.class || type == Short.class || type == Integer.class || type == Long.class || type == Float.class || type == Double.class || type == Object.class; } private static Object convertPrimitive(Class<?> type, String value) { if (type == char.class || type == Character.class) { return value.length() > 0 ? value.charAt(0) : '\0'; } else if (type == boolean.class || type == Boolean.class) { return Boolean.valueOf(value); } else if (type == byte.class || type == Byte.class) { return Byte.valueOf(value); } else if (type == short.class || type == Short.class) { return Short.valueOf(value); } else if (type == int.class || type == Integer.class) { return Integer.valueOf(value); } else if (type == long.class || type == Long.class) { return Long.valueOf(value); } else if (type == float.class || type == Float.class) { return Float.valueOf(value); } else if (type == double.class || type == Double.class) { return Double.valueOf(value); } return value; } protected static void checkExtension(Class<?> type, String property, String value) { checkName(property, value); if (value != null && value.length() > 0 && ! ExtensionLoader.getExtensionLoader(type).hasExtension(value)) { throw new IllegalStateException("No such extension " + value + " for " + property + "/" + type.getName()); } } protected static void checkMultiExtension(Class<?> type, String property, String value) { checkMultiName(property, value); if (value != null && value.length() > 0) { String[] values = value.split("\\s*[,]+\\s*"); for (String v : values) { if (v.startsWith(Constants.REMOVE_VALUE_PREFIX)) { v = v.substring(1); } if (Constants.DEFAULT_KEY.equals(v)) { continue; } if (! ExtensionLoader.getExtensionLoader(type).hasExtension(v)) { throw new IllegalStateException("No such extension " + v + " for " + property + "/" + type.getName()); } } } } protected static void checkLength(String property, String value) { checkProperty(property, value, MAX_LENGTH, null); } protected static void checkPathLength(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, null); } protected static void checkName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME); } protected static void checkNameHasSymbol(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME_HAS_SYMBOL); } protected static void checkKey(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_KEY); } protected static void checkMultiName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_MULTI_NAME); } protected static void checkPathName(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, PATTERN_PATH); } protected static void checkMethodName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_METHOD_NAME); } protected static void checkParameterName(Map<String, String> parameters) { if (parameters == null || parameters.size() == 0) { return; } for (Map.Entry<String, String> entry : parameters.entrySet()) { //change by tony.chenl parameter value maybe has colon.for example napoli address checkNameHasSymbol(entry.getKey(), entry.getValue()); } } protected static void checkProperty(String property, String value, int maxlength, Pattern pattern) { if (value == null || value.length() == 0) { return; } if(value.length() > maxlength){ throw new IllegalStateException("Invalid " + property + "=\"" + value + "\" is longer than " + maxlength); } if (pattern != null) { Matcher matcher = pattern.matcher(value); if(! matcher.matches()) { throw new IllegalStateException("Invalid " + property + "=\"" + value + "\" contain illegal charactor, only digit, letter, '-', '_' and '.' is legal."); } } } static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { public void run() { if (logger.isInfoEnabled()) { logger.info("Run shutdown hook now."); } ProtocolConfig.destroyAll(); } }, "DubboShutdownHook")); } private static final String[] SUFFIXS = new String[] {"Config", "Bean"}; @Override public String toString() { try { StringBuilder buf = new StringBuilder(); buf.append("<dubbo:"); buf.append(getTagName(getClass())); Method[] methods = getClass().getMethods(); for (Method method : methods) { try { String name = method.getName(); if ((name.startsWith("get") || name.startsWith("is")) && ! "getClass".equals(name) && ! "get".equals(name) && ! "is".equals(name) && Modifier.isPublic(method.getModifiers()) && method.getParameterTypes().length == 0 && isPrimitive(method.getReturnType())) { int i = name.startsWith("get") ? 3 : 2; String key = name.substring(i, i + 1).toLowerCase() + name.substring(i + 1); Object value = method.invoke(this, new Object[0]); if (value != null) { buf.append(" "); buf.append(key); buf.append("=\""); buf.append(value); buf.append("\""); } } } catch (Exception e) { logger.warn(e.getMessage(), e); } } buf.append(" />"); return buf.toString(); } catch (Throwable t) { // 防御性容错 logger.warn(t.getMessage(), t); return super.toString(); } } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-playcustomapp/v1/1.27.0/com/google/api/services/playcustomapp/Playcustomapp.java
15972
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.playcustomapp; /** * Service definition for Playcustomapp (v1). * * <p> * An API to publish custom Android apps. * </p> * * <p> * For more information about this service, see the * <a href="https://developers.google.com/android/work/play/custom-app-api" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link PlaycustomappRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class Playcustomapp extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.27.0 of the Google Play Custom App Publishing API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://www.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = "playcustomapp/v1/accounts/"; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch/playcustomapp/v1"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Playcustomapp(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ Playcustomapp(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Accounts collection. * * <p>The typical use is:</p> * <pre> * {@code Playcustomapp playcustomapp = new Playcustomapp(...);} * {@code Playcustomapp.Accounts.List request = playcustomapp.accounts().list(parameters ...)} * </pre> * * @return the resource collection */ public Accounts accounts() { return new Accounts(); } /** * The "accounts" collection of methods. */ public class Accounts { /** * An accessor for creating requests from the CustomApps collection. * * <p>The typical use is:</p> * <pre> * {@code Playcustomapp playcustomapp = new Playcustomapp(...);} * {@code Playcustomapp.CustomApps.List request = playcustomapp.customApps().list(parameters ...)} * </pre> * * @return the resource collection */ public CustomApps customApps() { return new CustomApps(); } /** * The "customApps" collection of methods. */ public class CustomApps { /** * Create and publish a new custom app. * * Create a request for the method "customApps.create". * * This request holds the parameters needed by the playcustomapp server. After setting any optional * parameters, call the {@link Create#execute()} method to invoke the remote operation. * * @param account Developer account ID. * @param content the {@link com.google.api.services.playcustomapp.model.CustomApp} * @return the request */ public Create create(java.lang.Long account, com.google.api.services.playcustomapp.model.CustomApp content) throws java.io.IOException { Create result = new Create(account, content); initialize(result); return result; } /** * Create and publish a new custom app. * * Create a request for the method "customApps.create". * * This request holds the parameters needed by the the playcustomapp server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * * <p> * This method should be used for uploading media content. * </p> * * @param account Developer account ID. * @param content the {@link com.google.api.services.playcustomapp.model.CustomApp} media metadata or {@code null} if none * @param mediaContent The media HTTP content or {@code null} if none. * @return the request * @throws java.io.IOException if the initialization of the request fails */ public Create create(java.lang.Long account, com.google.api.services.playcustomapp.model.CustomApp content, com.google.api.client.http.AbstractInputStreamContent mediaContent) throws java.io.IOException { Create result = new Create(account, content, mediaContent); initialize(result); return result; } public class Create extends PlaycustomappRequest<com.google.api.services.playcustomapp.model.CustomApp> { private static final String REST_PATH = "{account}/customApps"; /** * Create and publish a new custom app. * * Create a request for the method "customApps.create". * * This request holds the parameters needed by the the playcustomapp server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * <p> {@link * Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param account Developer account ID. * @param content the {@link com.google.api.services.playcustomapp.model.CustomApp} * @since 1.13 */ protected Create(java.lang.Long account, com.google.api.services.playcustomapp.model.CustomApp content) { super(Playcustomapp.this, "POST", REST_PATH, content, com.google.api.services.playcustomapp.model.CustomApp.class); this.account = com.google.api.client.util.Preconditions.checkNotNull(account, "Required parameter account must be specified."); } /** * Create and publish a new custom app. * * Create a request for the method "customApps.create". * * This request holds the parameters needed by the the playcustomapp server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * <p> {@link * Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * <p> * This constructor should be used for uploading media content. * </p> * * @param account Developer account ID. * @param content the {@link com.google.api.services.playcustomapp.model.CustomApp} media metadata or {@code null} if none * @param mediaContent The media HTTP content or {@code null} if none. * @since 1.13 */ protected Create(java.lang.Long account, com.google.api.services.playcustomapp.model.CustomApp content, com.google.api.client.http.AbstractInputStreamContent mediaContent) { super(Playcustomapp.this, "POST", "/upload/" + getServicePath() + REST_PATH, content, com.google.api.services.playcustomapp.model.CustomApp.class); this.account = com.google.api.client.util.Preconditions.checkNotNull(account, "Required parameter account must be specified."); initializeMediaUpload(mediaContent); } @Override public Create setAlt(java.lang.String alt) { return (Create) super.setAlt(alt); } @Override public Create setFields(java.lang.String fields) { return (Create) super.setFields(fields); } @Override public Create setKey(java.lang.String key) { return (Create) super.setKey(key); } @Override public Create setOauthToken(java.lang.String oauthToken) { return (Create) super.setOauthToken(oauthToken); } @Override public Create setPrettyPrint(java.lang.Boolean prettyPrint) { return (Create) super.setPrettyPrint(prettyPrint); } @Override public Create setQuotaUser(java.lang.String quotaUser) { return (Create) super.setQuotaUser(quotaUser); } @Override public Create setUserIp(java.lang.String userIp) { return (Create) super.setUserIp(userIp); } /** Developer account ID. */ @com.google.api.client.util.Key private java.lang.Long account; /** Developer account ID. */ public java.lang.Long getAccount() { return account; } /** Developer account ID. */ public Create setAccount(java.lang.Long account) { this.account = account; return this; } @Override public Create set(String parameterName, Object value) { return (Create) super.set(parameterName, value); } } } } /** * Builder for {@link Playcustomapp}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link Playcustomapp}. */ @Override public Playcustomapp build() { return new Playcustomapp(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link PlaycustomappRequestInitializer}. * * @since 1.12 */ public Builder setPlaycustomappRequestInitializer( PlaycustomappRequestInitializer playcustomappRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(playcustomappRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
apache-2.0
emil-wcislo/sbql4j8
sbql4j8/src/test/openjdk/tools/javac/processing/errors/EnsureAnnotationTypeMismatchException/Processor.java
3588
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ import java.io.IOException; import java.io.Writer; import java.lang.annotation.*; import java.util.Set; import javax.annotation.processing.*; import javax.lang.model.element.*; import javax.lang.model.util.ElementFilter; import javax.tools.*; import sbql4j8.com.sun.tools.javac.util.Assert; public class Processor extends JavacTestingAbstractProcessor { private boolean seenGenerated; @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { for (Element e : roundEnv.getElementsAnnotatedWith(Gen.class)) { Gen gen = e.getAnnotation(Gen.class); try { JavaFileObject source = processingEnv.getFiler().createSourceFile(gen.fileName()); try (Writer out = source.openWriter()) { out.write(gen.content()); } } catch (IOException ex) { throw new IllegalStateException(ex); } } TypeElement generated = processingEnv.getElementUtils().getTypeElement("Generated"); if (generated != null) { Check check = ElementFilter.methodsIn(generated.getEnclosedElements()).get(0).getAnnotation(Check.class); checkCorrectException(check::classValue, "java.lang.Class<java.lang.String>"); checkCorrectException(check::intConstValue, "boolean"); checkCorrectException(check::enumValue, "java.lang.String"); checkCorrectException(check::incorrectAnnotationValue, "java.lang.Deprecated"); checkCorrectException(check::incorrectArrayValue, "<any>"); checkCorrectException(check::incorrectClassValue, "<any>"); seenGenerated = true; } if (roundEnv.processingOver() && !seenGenerated) { Assert.error("Did not see the generated class!"); } return true; } private static void checkCorrectException(Runnable runCheck, String expectedType) { try { runCheck.run(); Assert.check(false); //Should not reach here } catch (AnnotationTypeMismatchException ex) { Assert.check(expectedType.equals(ex.foundType()), ex.foundType()); } } } @interface Gen { String fileName(); String content(); } @interface Check { Class<? extends Number> classValue(); int intConstValue(); E enumValue(); int incorrectAnnotationValue(); int incorrectArrayValue(); Class<?> incorrectClassValue(); } enum E { A; }
apache-2.0
gidons/chorussync
chorussync-lib/src/main/java/org/searchordsmen/chorussync/lib/VoicePart.java
413
package org.searchordsmen.chorussync.lib; import lombok.AllArgsConstructor; import lombok.Getter; @AllArgsConstructor @Getter public enum VoicePart { ALL("All", "all"), TENOR("Tenor", "tenor"), LEAD("Lead", "lead"), BARI("Bari", "bari"), BASS("Bass", "bass"), NONE("None", "none"); private final String displayName; private final String trackFilePart; }
apache-2.0