gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter; import java.net.URL; import java.util.List; import java.util.Properties; import org.apache.zeppelin.scheduler.Scheduler; /** * Add to the classpath interpreters. * */ public class ClassloaderInterpreter extends Interpreter implements WrappedInterpreter { private ClassLoader cl; private Interpreter intp; public ClassloaderInterpreter(Interpreter intp, ClassLoader cl) { super(new Properties()); this.cl = cl; this.intp = intp; } @Override public Interpreter getInnerInterpreter() { return intp; } public ClassLoader getClassloader() { return cl; } @Override public InterpreterResult interpret(String st, InterpreterContext context) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.interpret(st, context); } catch (InterpreterException e) { throw e; } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void open() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.open(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void close() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.close(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void cancel(InterpreterContext context) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.cancel(context); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public FormType getFormType() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getFormType(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public int getProgress(InterpreterContext context) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getProgress(context); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public Scheduler getScheduler() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getScheduler(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public List<String> completion(String buf, int cursor) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.completion(buf, cursor); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public String getClassName() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getClassName(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void setInterpreterGroup(InterpreterGroup interpreterGroup) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.setInterpreterGroup(interpreterGroup); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public InterpreterGroup getInterpreterGroup() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getInterpreterGroup(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void setClassloaderUrls(URL [] urls) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.setClassloaderUrls(urls); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public URL [] getClassloaderUrls() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getClassloaderUrls(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public void setProperty(Properties property) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { intp.setProperty(property); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public Properties getProperty() { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getProperty(); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } @Override public String getProperty(String key) { ClassLoader oldcl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(cl); try { return intp.getProperty(key); } catch (Exception e) { throw new InterpreterException(e); } finally { cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(oldcl); } } }
/* * Copyright (c) 2010-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.workflow.dto; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.gui.api.util.WebModelServiceUtils; import com.evolveum.midpoint.model.api.ModelAuthorizationAction; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.query.ObjectPaging; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.prism.query.builder.QueryBuilder; import com.evolveum.midpoint.prism.query.builder.S_FilterEntryOrEmpty; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.security.api.MidPointPrincipal; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.exception.*; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.web.component.data.BaseSortableDataProvider; import com.evolveum.midpoint.web.page.admin.workflow.WorkItemsPageType; import com.evolveum.midpoint.web.page.error.PageError; import com.evolveum.midpoint.web.security.SecurityUtils; import com.evolveum.midpoint.wf.util.QueryUtils; import com.evolveum.midpoint.xml.ns._public.common.common_3.OtherPrivilegesLimitationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import com.evolveum.midpoint.xml.ns._public.common.common_3.WfContextType; import com.evolveum.midpoint.xml.ns._public.common.common_3.WorkItemType; import org.apache.wicket.Component; import org.apache.wicket.RestartResponseException; import org.apache.wicket.model.IModel; import java.util.Collection; import java.util.Iterator; import java.util.List; import static com.evolveum.midpoint.gui.api.util.WebComponentUtil.isAuthorized; import static com.evolveum.midpoint.gui.api.util.WebComponentUtil.safeLongToInteger; import static com.evolveum.midpoint.prism.PrismConstants.T_PARENT; import static com.evolveum.midpoint.prism.query.OrderDirection.DESCENDING; import static com.evolveum.midpoint.xml.ns._public.common.common_3.WorkItemType.F_ASSIGNEE_REF; import static com.evolveum.midpoint.xml.ns._public.common.common_3.WorkItemType.F_CREATE_TIMESTAMP; /** * @author lazyman */ public class WorkItemDtoProvider extends BaseSortableDataProvider<WorkItemDto> { private static final transient Trace LOGGER = TraceManager.getTrace(WorkItemDtoProvider.class); private static final String DOT_CLASS = WorkItemDtoProvider.class.getName() + "."; private static final String OPERATION_LIST_ITEMS = DOT_CLASS + "listItems"; private static final String OPERATION_COUNT_ITEMS = DOT_CLASS + "countItems"; private WorkItemsPageType workItemsPageType; private IModel<PrismObject<UserType>> donorModel; public WorkItemDtoProvider(Component component, WorkItemsPageType workItemsPageType, IModel<PrismObject<UserType>> donorModel) { super(component); this.workItemsPageType = workItemsPageType; this.donorModel = donorModel; } private String currentUserOid() { MidPointPrincipal principal = SecurityUtils.getPrincipalUser(); if (principal == null) { return "Unknown"; } return principal.getOid(); } @Override public Iterator<? extends WorkItemDto> iterator(long first, long count) { assumePowerOfAttorneyIfRequested(); try { return super.iterator(first, count); } finally { dropPowerOfAttorneyIfRequested(); } } @Override public long size() { assumePowerOfAttorneyIfRequested(); try { return super.size(); } finally { dropPowerOfAttorneyIfRequested(); } } private void assumePowerOfAttorneyIfRequested() { if (workItemsPageType == WorkItemsPageType.ATTORNEY) { WebModelServiceUtils.assumePowerOfAttorney(donorModel.getObject(), getModelInteractionService(), getTaskManager(), null); } } private void dropPowerOfAttorneyIfRequested() { if (workItemsPageType == WorkItemsPageType.ATTORNEY) { WebModelServiceUtils.dropPowerOfAttorney(getModelInteractionService(), getTaskManager(), null); } } @Override public Iterator<? extends WorkItemDto> internalIterator(long first, long count) { getAvailableData().clear(); Task task = getTaskManager().createTaskInstance(); OperationResult result = new OperationResult(OPERATION_LIST_ITEMS); try { ObjectQuery query = createQuery(first, count, result); Collection<SelectorOptions<GetOperationOptions>> options = GetOperationOptions.resolveItemsNamed( new ItemPath(F_ASSIGNEE_REF), new ItemPath(T_PARENT, WfContextType.F_OBJECT_REF), new ItemPath(T_PARENT, WfContextType.F_TARGET_REF)); List<WorkItemType> items = getModel().searchContainers(WorkItemType.class, query, options, task, result); for (WorkItemType item : items) { try { getAvailableData().add(new WorkItemDto(item)); } catch (Exception e) { LoggingUtils.logUnexpectedException(LOGGER, "Unhandled exception when listing work item {}", e, item); result.recordFatalError("Couldn't list work item.", e); } } } catch (CommonException | RuntimeException ex) { LoggingUtils.logUnexpectedException(LOGGER, "Unhandled exception when listing work items", ex); result.recordFatalError("Couldn't list work items.", ex); } if (result.isUnknown()) { result.computeStatus(); } if (!WebComponentUtil.isSuccessOrHandledError(result)) { handleError(result); } return getAvailableData().iterator(); } private void handleError(OperationResult result) { getPage().showResult(result); throw new RestartResponseException(PageError.class); } private ObjectQuery createQuery(long first, long count, OperationResult result) throws SchemaException { ObjectQuery query = createQuery(result); query.setPaging(ObjectPaging.createPaging(safeLongToInteger(first), safeLongToInteger(count), F_CREATE_TIMESTAMP, DESCENDING)); return query; } private ObjectQuery createQuery(OperationResult result) throws SchemaException { boolean authorizedToSeeAll = isAuthorized(ModelAuthorizationAction.READ_ALL_WORK_ITEMS.getUrl()); S_FilterEntryOrEmpty q = QueryBuilder.queryFor(WorkItemType.class, getPrismContext()); if (WorkItemsPageType.ALL.equals(workItemsPageType) && authorizedToSeeAll) { return q.build(); } else if (WorkItemsPageType.CLAIMABLE.equals(workItemsPageType)) { return QueryUtils.filterForGroups(q, currentUserOid(), getRepositoryService(), getRelationRegistry(), result).build(); } else { // not authorized to see all => sees only allocated to him (not quite what is expected, but sufficient for the time being) return QueryUtils.filterForAssignees(q, SecurityUtils.getPrincipalUser(), OtherPrivilegesLimitationType.F_APPROVAL_WORK_ITEMS, getRelationRegistry()).build(); } } @Override protected int internalSize() { int count; Task task = getTaskManager().createTaskInstance(); OperationResult result = new OperationResult(OPERATION_COUNT_ITEMS); try { ObjectQuery query = createQuery(result); count = getModel().countContainers(WorkItemType.class, query, null, task, result); } catch (SchemaException | SecurityViolationException | ObjectNotFoundException | ExpressionEvaluationException | RuntimeException | CommunicationException | ConfigurationException e) { throw new SystemException("Couldn't count work items: " + e.getMessage(), e); } if (result.isUnknown()) { result.computeStatus(); } if (!WebComponentUtil.isSuccessOrHandledError(result)) { handleError(result); } return count; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.replication.regionserver; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.io.UncheckedIOException; import java.util.UUID; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.replication.BaseReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALProvider; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Testcase for HBASE-20624. */ @Category({ ReplicationTests.class, MediumTests.class }) public class TestRaceWhenCreatingReplicationSource { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestRaceWhenCreatingReplicationSource.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static String PEER_ID = "1"; private static TableName TABLE_NAME = TableName.valueOf("race"); private static byte[] CF = Bytes.toBytes("CF"); private static byte[] CQ = Bytes.toBytes("CQ"); private static FileSystem FS; private static Path LOG_PATH; private static WALProvider.Writer WRITER; private static volatile boolean NULL_UUID = true; public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { private static final UUID PEER_UUID = UTIL.getRandomUUID(); @Override public UUID getPeerUUID() { if (NULL_UUID) { return null; } else { return PEER_UUID; } } @Override public boolean replicate(ReplicateContext replicateContext) { synchronized (WRITER) { try { for (Entry entry : replicateContext.getEntries()) { WRITER.append(entry); } WRITER.sync(false); } catch (IOException e) { throw new UncheckedIOException(e); } } return true; } @Override public void start() { startAsync(); } @Override public void stop() { stopAsync(); } @Override protected void doStart() { notifyStarted(); } @Override protected void doStop() { notifyStopped(); } @Override public boolean canReplicateToSameCluster() { return true; } } @BeforeClass public static void setUpBeforeClass() throws Exception { UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, "multiwal"); // make sure that we will create a new group for the table UTIL.getConfiguration().setInt("hbase.wal.regiongrouping.numgroups", 8); UTIL.startMiniCluster(3); Path dir = UTIL.getDataTestDirOnTestFS(); FS = UTIL.getTestFileSystem(); LOG_PATH = new Path(dir, "replicated"); WRITER = WALFactory.createWALWriter(FS, LOG_PATH, UTIL.getConfiguration()); UTIL.getAdmin().addReplicationPeer(PEER_ID, ReplicationPeerConfig.newBuilder().setClusterKey("127.0.0.1:2181:/hbase") .setReplicationEndpointImpl(LocalReplicationEndpoint.class.getName()).build(), true); } @AfterClass public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Test public void testRace() throws Exception { UTIL.waitFor(30000, new ExplainingPredicate<Exception>() { @Override public boolean evaluate() throws Exception { for (RegionServerThread t : UTIL.getMiniHBaseCluster().getRegionServerThreads()) { ReplicationSource source = (ReplicationSource) ((Replication) t.getRegionServer().getReplicationSourceService()) .getReplicationManager().getSource(PEER_ID); if (source == null || source.getReplicationEndpoint() == null) { return false; } } return true; } @Override public String explainFailure() throws Exception { return "Replication source has not been initialized yet"; } }); UTIL.getAdmin().createTable( TableDescriptorBuilder.newBuilder(TABLE_NAME).setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(CF).setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()).build()); UTIL.waitTableAvailable(TABLE_NAME); try (Table table = UTIL.getConnection().getTable(TABLE_NAME)) { table.put(new Put(Bytes.toBytes(1)).addColumn(CF, CQ, Bytes.toBytes(1))); } NULL_UUID = false; UTIL.waitFor(30000, new ExplainingPredicate<Exception>() { @Override public boolean evaluate() throws Exception { try (WAL.Reader reader = WALFactory.createReader(FS, LOG_PATH, UTIL.getConfiguration())) { return reader.next() != null; } catch (IOException e) { return false; } } @Override public String explainFailure() throws Exception { return "Replication has not catched up"; } }); try (WAL.Reader reader = WALFactory.createReader(FS, LOG_PATH, UTIL.getConfiguration())) { Cell cell = reader.next().getEdit().getCells().get(0); assertEquals(1, Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength())); assertArrayEquals(CF, CellUtil.cloneFamily(cell)); assertArrayEquals(CQ, CellUtil.cloneQualifier(cell)); assertEquals(1, Bytes.toInt(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); } } }
/* Copyright (c) 2008, Jared Crapo All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of jactiveresource.org nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jactiveresource; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; /** * <h3>Overview</h3> * * A resource connection defines and implements a channel by which to exchange data * with a resource. It provides HTTP transport and that's it. You give it a URL, * tell it get, put, post, or delete, and it gives you back the response. This * class uses <a href="http://hc.apache.org/httpcomponents-client-ga/">Apache HTTP Client</a> * to do all of the real work. * * <h3>Usage</h3> * You create a resource connection by indicating the base URL from which one or more * resources may be interacted with. * * Say there is a person resource available at * <code>http://localhost:3000/people.xml</code>, and a <code>Person</code> * class which models the data elements provided by that resource. * <code> * <pre> * ResourceConnection c = new ResourceConnection("http://localhost:3000"); * ResourceFactory f = new ResourceFactory<Person>(c, Person.class); * </pre> * </code> * * The resource connection provides http transport, and the resource factory takes the * serialized data stream sent over that transport and turns it into Person objects. * * <h3>HttpClient Factories</h3> * * A resource connection uses an HttpClient object to do all of the dirty work. You may * find that you want to control the creation and parameters associated with these HttpClient * objects used by the resource connection. For example, you might have a cookie based * authentication scheme, and you need to get your cookies on the HttpClient objects * used by the resource connection. By default, a resource connection uses * {@link DefaultHttpClientFactory} to create HttpClient objects for it's use. You can * create your own factory, as long as it implements the {@link AbstractHttpClientFactory} * interface, giving you full control over TCP timeouts, cookies, authentication, * and concurrency of the HttpClient objects used by the resource connection. * * <h3>Authentication</h3> * * If your service requires HTTP based authentication, you can use the * {@link #setUsername(String)} and {@link #setPassword(String)} methods. <code> * <pre> * ResourceConnection c = new ResourceConnection("http://localhost:3000"); * c.setUsername("Ace"); * c.setPassword("newenglandclamchowder"); * </pre> * </code> * These credentials will be passed through to the HttpClientFactory object, * which is responsible for using these credentials on the HTTP request. The * default HttpClientFactory object will use these credentials as basic * authentication. Authentication credentials embedded in the URL will be ignored. * * @version $LastChangedRevision$ <br> * $LastChangedDate$ * @author $LastChangedBy$ */ public class ResourceConnection { private URL site; private AbstractHttpClientFactory clientFactory; private static final String CONTENT_TYPE = "Content-type"; private Log log = LogFactory.getLog(ResourceConnection.class); /** * Connect a resource located at a site represented in a URL * * @param site */ public ResourceConnection(URL site) { this.site = site; this.clientFactory = new DefaultHttpClientFactory(); } /** * Connect a resource located at a site represented in a string * * @param site * @throws MalformedURLException */ public ResourceConnection(String site) throws MalformedURLException { this.site = new URL(site); this.clientFactory = new DefaultHttpClientFactory(); } /** * Connect a resource located at a site represented in a URL * * @param site */ public ResourceConnection(URL site, AbstractHttpClientFactory clientFactory) { this.site = site; this.clientFactory = clientFactory; } /** * Connect a resource located at a site represented in a string, using a * specific HttpClientFactory * * @param site * @param factory * @throws MalformedURLException */ public ResourceConnection(String site, AbstractHttpClientFactory factory) throws MalformedURLException { this.site = new URL(site); this.clientFactory = factory; } /** * @return the URL object for the site this connection points to */ public URL getSite() { return this.site; } /** * set the factory used to create HttpClient objects. If you don't * set your own factory, {@link DefaultHttpClientFactory} will be used. * * @param factory */ public void setHttpClientFactory(AbstractHttpClientFactory factory) { this.clientFactory = factory; } /** * * @return the factory used to create HttpClient objects */ public AbstractHttpClientFactory getHttpClientFactory() { return this.clientFactory; } /** * @return the username used for authentication */ public String getUsername() { return this.clientFactory.getUsername(); } /** * @param username * the username to use for authentication */ public void setUsername(String username) { this.clientFactory.setUsername(username); } /** * @return the password used for authentication */ public String getPassword() { return this.clientFactory.getPassword(); } /** * @param password * the password to use for authentication */ public void setPassword(String password) { this.clientFactory.setPassword(password); } /** * Close this resource connection */ public void close() { clientFactory.shutter(); } /** * append url to the site this Connection was created with, issue a HTTP GET * request, and return the body of the HTTP response * * @param url * generates a URL when toString() is called * @return a string containing the body of the response * @throws HttpException * @throws IOException * @throws InterruptedException * @throws URISyntaxException */ public String get(Object url) throws HttpException, IOException, InterruptedException, URISyntaxException { StringBuffer sb = new StringBuffer(); BufferedReader reader = getStream(url); if (reader != null) { try { int c; while ((c = reader.read()) != -1) sb.append((char) c); } finally { reader.close(); } } return sb.toString(); } /** * append url to the site this Connection was created with, issue a HTTP GET * request, and return a buffered input stream of the body of the HTTP * response. You have to call reader.close() when you are done with it in * order to clean up resources cleanly. * * if there is no response body, return null * * @param url * @return a buffered stream of the response * @throws HttpException * @throws IOException * @throws InterruptedException * @throws URISyntaxException */ public BufferedReader getStream(Object url) throws HttpException, IOException, InterruptedException, URISyntaxException { HttpClient client = clientFactory.getHttpClient(this.getSite()); String uri = this.getSite().toString() + url.toString(); HttpGet request = new HttpGet(uri); HttpEntity entity = null; log.trace("HttpGet uri=" + uri); HttpResponse response = client.execute(request); checkHttpStatus(response); entity = response.getEntity(); BufferedReader reader = null; if (entity != null) { reader = new BufferedReader(new InputStreamReader( entity.getContent())); } return reader; } /** * send an http put request to the server. This is a bit unique because * there is no response returned from the server. * * @param url * @param body * @param contentType * @throws URISyntaxException * @throws HttpException * @throws IOException * @throws InterruptedException */ public HttpResponse put(Object url, String body, String contentType) throws URISyntaxException, HttpException, IOException, InterruptedException { HttpClient client = clientFactory.getHttpClient(this.getSite()); String uri = this.getSite().toString() + url.toString(); HttpPut request = new HttpPut(uri); log.trace("HttpPut uri=" + uri); request.setHeader(CONTENT_TYPE, contentType); StringEntity entity = new StringEntity(body); request.setEntity(entity); HttpResponse response = client.execute(request); return response; } /** * post body to url using the supplied content type * * @param url * @param body * @throws ClientProtocolException * @throws IOException * @throws ClientError * @throws ServerError */ public HttpResponse post(Object url, String body, String contentType) throws ClientProtocolException, IOException, ClientError, ServerError { HttpClient client = clientFactory.getHttpClient(this.getSite()); String uri = this.getSite().toString() + url.toString(); HttpPost request = new HttpPost(uri); log.trace("HttpGet uri=" + uri); request.setHeader(CONTENT_TYPE, contentType); StringEntity entity = new StringEntity(body); request.setEntity(entity); HttpResponse response = client.execute(request); return response; } /** * delete a resource on the server * * @param url * @throws ClientError * @throws ServerError * @throws ClientProtocolException * @throws IOException */ public void delete(Object url) throws ClientError, ServerError, ClientProtocolException, IOException { HttpClient client = clientFactory.getHttpClient(this.getSite()); String uri = this.getSite().toString() + url.toString(); HttpDelete request = new HttpDelete(uri); log.trace("HttpDelete uri=" + uri); HttpResponse response = client.execute(request); checkHttpStatus(response); } /** * check the status in the HTTP response and throw an appropriate exception * * @param response * @throws ClientError * @throws ServerError */ public final void checkHttpStatus(HttpResponse response) throws ClientError, ServerError { int status = response.getStatusLine().getStatusCode(); if (status == 400) throw new BadRequest(); else if (status == 401) throw new UnauthorizedAccess(); else if (status == 403) throw new ForbiddenAccess(); else if (status == 404) throw new ResourceNotFound(); else if (status == 405) throw new MethodNotAllowed(); else if (status == 409) throw new ResourceConflict(); else if (status == 422) throw new ResourceInvalid(); else if (status >= 401 && status <= 499) throw new ClientError(); else if (status >= 500 && status <= 599) throw new ServerError(); } public String toString() { return site.toString(); } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ancompiler; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import com.github.javaparser.StaticJavaParser; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.Modifier; import com.github.javaparser.ast.NodeList; import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.body.Parameter; import com.github.javaparser.ast.body.VariableDeclarator; import com.github.javaparser.ast.stmt.BlockStmt; import com.github.javaparser.ast.stmt.Statement; import com.github.javaparser.ast.type.VoidType; import com.github.javaparser.printer.PrettyPrinter; import org.drools.core.InitialFact; import org.drools.core.base.ClassObjectType; import org.drools.core.reteoo.ObjectTypeNode; import org.drools.core.reteoo.Rete; import org.drools.core.util.index.AlphaRangeIndex; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.github.javaparser.StaticJavaParser.parse; import static com.github.javaparser.StaticJavaParser.parseType; public class ObjectTypeNodeCompiler { private static final String NEWLINE = "\n"; public static final String PACKAGE_NAME = "org.drools.ancompiler"; private static final String BINARY_PACKAGE_NAME = PACKAGE_NAME.replace('.', '/'); /** * This field hold the fully qualified class name that the {@link ObjectTypeNode} is representing. */ private String className; /** * This field will hold the "simple" name of the generated class */ private String generatedClassSimpleName; /** * OTN we are creating a compiled network for */ private ObjectTypeNode objectTypeNode; private StringBuilder builder = new StringBuilder(); private static final Logger logger = LoggerFactory.getLogger(ObjectTypeNodeCompiler.class); // TODO DT-ANC avoid using a boolean private boolean shouldInline; /* In case additional fields are needed, will be initialised in order in initAdditionalFields */ private List<FieldDeclaration> additionalFields = new ArrayList<>(); private ANCConfiguration ancConfiguration; public ObjectTypeNodeCompiler(ObjectTypeNode objectTypeNode) { this(new ANCConfiguration(), objectTypeNode, false); } public ObjectTypeNodeCompiler(ANCConfiguration ancConfiguration, ObjectTypeNode objectTypeNode, boolean shouldInline) { this.ancConfiguration = ancConfiguration; this.shouldInline = shouldInline; this.objectTypeNode = objectTypeNode; ClassObjectType classObjectType = (ClassObjectType) objectTypeNode.getObjectType(); this.className = classObjectType.getClassName().replace("$", "."); final String classObjectTypeName = classObjectType.getClassName().replace('.', '_'); final String otnHash = String.valueOf(objectTypeNode.hashCode()).replace("-", ""); generatedClassSimpleName = String.format("Compiled%sNetwork%d%s" , classObjectTypeName , objectTypeNode.getId() , otnHash); } public void addAdditionalFields(FieldDeclaration additionalFieldDeclarations) { this.additionalFields.add(additionalFieldDeclarations); } public CompiledNetworkSources generateSource() { createClassDeclaration(); ObjectTypeNodeParser parser = new ObjectTypeNodeParser(objectTypeNode); // debug rete logger.debug("Compiling Alpha Network: "); DebugHandler debugHandler = new DebugHandler(); parser.accept(debugHandler); // After the first parsing we decide whether to traverse hashedAlphaNodes or not if(parser.getIndexableConstraints().size() > 1) { logger.warn("Alpha Network Compiler with multiple Indexable Constraints is not supported, reverting to non hashed-ANC. This might be slower "); parser.setTraverseHashedAlphaNodes(false); } createAdditionalFields(builder); // create declarations DeclarationsHandler declarations = new DeclarationsHandler(builder, ancConfiguration.getDisableContextEntry()); parser.accept(declarations); // we need the hashed declarations when creating the constructor Collection<HashedAlphasDeclaration> hashedAlphaDeclarations = declarations.getHashedAlphaDeclarations(); Map<String, AlphaRangeIndex> rangeIndexDeclarationMap = declarations.getRangeIndexDeclarationMap(); createConstructor(hashedAlphaDeclarations, rangeIndexDeclarationMap); // create set node method NodeCollectorHandler nodeCollectors = new NodeCollectorHandler(); parser.accept(nodeCollectors); final Collection<CompilationUnit> initClasses; builder.append(String.format("protected boolean isInlined() { return %s; }", shouldInline)); if(shouldInline) { addEmptySetNetworkReference(builder); InlineFieldReferenceInitHandler inlineFieldReferenceInitHandler = new InlineFieldReferenceInitHandler(nodeCollectors.getNodes(), additionalFields); inlineFieldReferenceInitHandler.emitCode(builder); initClasses = inlineFieldReferenceInitHandler.getPartitionedNodeInitialisationClasses(); } else { SetNodeReferenceHandler partitionedSwitch = new SetNodeReferenceHandler(nodeCollectors.getNodes()); partitionedSwitch.emitCode(builder); initClasses = null; } // create assert method AssertHandler assertHandler = new AssertHandler(className, !hashedAlphaDeclarations.isEmpty()); parser.accept(assertHandler); builder.append(assertHandler.emitCode()); ModifyHandler modifyHandler = new ModifyHandler(className, !hashedAlphaDeclarations.isEmpty()); if (ancConfiguration.isEnableModifyObject()) { parser.accept(modifyHandler); } builder.append(modifyHandler.emitCode()); DelegateMethodsHandler delegateMethodsHandler = new DelegateMethodsHandler(builder); parser.accept(delegateMethodsHandler); // end of class builder.append("}").append(NEWLINE); String sourceCode = builder.toString(); if(ancConfiguration.isPrettyPrint()) { sourceCode = new PrettyPrinter().print(parse(sourceCode)); } if (logger.isDebugEnabled()) { logger.debug(String.format("Generated Compiled Alpha Network %s", sourceCode)); } return new CompiledNetworkSources( sourceCode, parser.getIndexableConstraint(), getName(), getSourceName(), objectTypeNode, rangeIndexDeclarationMap, initClasses); } private void addEmptySetNetworkReference(StringBuilder builder) { builder.append(" @Override\n" + " protected void setNetworkNodeReference(org.drools.core.common.NetworkNode networkNode) {\n" + " \n" + " }"); } // TODO DT-ANC move this outside? private void createAdditionalFields(StringBuilder builder) { for(FieldDeclaration fd : additionalFields) { builder.append(fd.toString()); } MethodDeclaration initMethod = new MethodDeclaration(); initMethod.setModifiers(NodeList.nodeList(Modifier.publicModifier())); initMethod.setType(new VoidType()); initMethod.setName("init"); Parameter args = new Parameter(parseType("Object"), "args"); args.setVarArgs(true); initMethod.setParameters(NodeList.nodeList(args)); BlockStmt initMethodStatements = new BlockStmt(); for (int i = 0, additionalFieldsSize = additionalFields.size(); i < additionalFieldsSize; i++) { FieldDeclaration fd = additionalFields.get(i); VariableDeclarator fieldType = fd.getVariables().iterator().next(); String fieldInitFromVarargs = String.format("%s = (%s)%s;", fieldType.getName(), fieldType.getType(), String.format("args[%d]", i)); Statement initStatement = StaticJavaParser.parseStatement(fieldInitFromVarargs); initMethodStatements.addStatement(initStatement); } initMethod.setBody(initMethodStatements); builder.append(initMethod); } /** * This method will output the package statement, followed by the opening of the class declaration */ private void createClassDeclaration() { builder.append("package ").append(PACKAGE_NAME).append(";").append(NEWLINE); builder.append("public class ").append(generatedClassSimpleName).append(" extends "). append(CompiledNetwork.class.getName()).append("{ ").append(NEWLINE); builder.append(String.format("private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(%s.class);%n", generatedClassSimpleName)); builder.append("org.drools.core.spi.InternalReadAccessor readAccessor;\n"); } /** * Creates the constructor for the generated class. If the hashedAlphaDeclarations is empty, it will just * output a empty default constructor; if it is not, the constructor will contain code to fill the hash * alpha maps with the values and node ids. * * @param hashedAlphaDeclarations declarations used for creating statements to populate the hashed alpha * maps for the generate class */ private void createConstructor(Collection<HashedAlphasDeclaration> hashedAlphaDeclarations, Map<String, AlphaRangeIndex> rangeIndexDeclarationMap) { builder.append("public ").append(generatedClassSimpleName).append("(org.drools.core.spi.InternalReadAccessor readAccessor, java.util.Map<String, " + AlphaRangeIndex.class.getCanonicalName() + "> rangeIndexDeclarationMap) {").append(NEWLINE); builder.append("this.readAccessor = readAccessor;\n"); // for each hashed alpha, we need to fill in the map member variable with the hashed values to node Ids for (HashedAlphasDeclaration declaration : hashedAlphaDeclarations) { String mapVariableName = declaration.getVariableName(); for (Object hashedValue : declaration.getHashedValues()) { Object value = hashedValue; if (value == null) { // generate the map.put(hashedValue, nodeId) call String nodeId = declaration.getNodeId(hashedValue); builder.append(mapVariableName).append(".put(null,").append(nodeId).append(");"); builder.append(NEWLINE); } else { // need to quote value if it is a string if (value.getClass().equals(String.class)) { value = "\"" + value + "\""; } else if (value instanceof BigDecimal) { value = "new java.math.BigDecimal(\"" + value + "\")"; } else if (value instanceof BigInteger) { value = "new java.math.BigInteger(\"" + value + "\")"; } String nodeId = declaration.getNodeId(hashedValue); // generate the map.put(hashedValue, nodeId) call builder.append(mapVariableName).append(".put(").append(value).append(", ").append(nodeId).append(");"); builder.append(NEWLINE); } } } // Range Index for (String variableName : rangeIndexDeclarationMap.keySet()) { builder.append("this." + variableName + " = rangeIndexDeclarationMap.get(\"" + variableName + "\");"); builder.append(NEWLINE); } builder.append("}").append(NEWLINE); } /** * Returns the fully qualified name of the generated subclass of {@link CompiledNetwork} * * @return name of generated class */ private String getName() { return getPackageName() + "." + generatedClassSimpleName; } /** * Returns the fully qualified binary name of the generated subclass of {@link CompiledNetwork} * * @return binary name of generated class */ private String getBinaryName() { return BINARY_PACKAGE_NAME + "/" + generatedClassSimpleName + ".class"; } /** * Returns the fully qualified source name of the generated subclass of {@link CompiledNetwork} * * @return binary name of generated class */ private String getSourceName() { return BINARY_PACKAGE_NAME + "/" + generatedClassSimpleName + ".java"; } private String getPackageName() { return PACKAGE_NAME; } public static List<CompiledNetworkSources> compiledNetworkSources(Rete rete) { return objectTypeNodeCompiler(rete) .stream() .map(ObjectTypeNodeCompiler::generateSource) .collect(Collectors.toList()); } public static List<ObjectTypeNodeCompiler> objectTypeNodeCompiler(Rete rete) { return objectTypeNodes(rete) .stream() .map(ObjectTypeNodeCompiler::new) .collect(Collectors.toList()); } public static List<ObjectTypeNode> objectTypeNodes(Rete rete) { return rete.getEntryPointNodes().values().stream() .flatMap(ep -> ep.getObjectTypeNodes().values().stream()) .filter(ObjectTypeNodeCompiler::shouldCreateCompiledAlphaNetwork) .collect(Collectors.toList()); } private static boolean shouldCreateCompiledAlphaNetwork(ObjectTypeNode f) { return !InitialFact.class.isAssignableFrom(f.getObjectType().getClassType()) && !(f.getObjectSinkPropagator() instanceof CompiledNetwork); // DROOLS-6336 Avoid generating an ANC from an ANC, it won't work anyway } public static Map<String, CompiledNetworkSources> compiledNetworkSourceMap(Rete rete) { List<CompiledNetworkSources> compiledNetworkSources = ObjectTypeNodeCompiler.compiledNetworkSources(rete); return compiledNetworkSources .stream() .collect(Collectors.toMap(CompiledNetworkSources::getName, Function.identity())); } public static Map<ObjectTypeNode, String> otnWithClassName(Rete rete) { List<ObjectTypeNodeCompiler> compiledNetworkSources = ObjectTypeNodeCompiler.objectTypeNodeCompiler(rete); return compiledNetworkSources .stream() .collect(Collectors.toMap(k -> k.objectTypeNode, ObjectTypeNodeCompiler::getName)); } }
package me.prettyprint.cassandra.connection.client; import java.net.SocketException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import me.prettyprint.cassandra.service.CassandraHost; import me.prettyprint.cassandra.service.SystemProperties; import me.prettyprint.hector.api.exceptions.HInvalidRequestException; import me.prettyprint.hector.api.exceptions.HectorTransportException; import org.apache.cassandra.thrift.Cassandra; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.commons.lang.StringUtils; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TSSLTransportFactory.TSSLTransportParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /* * It expects few system properties to be set up if it uses SSL: * <ul> * <li><code>ssl.truststore</code> File path for trust store * <li><code>ssl.truststore.password</code> Password for trust store * <li><code>ssl.protocol</code> SSL protocol, default SSL * <li><code>ssl.store.type</code> Store type, default JKS * <li><code>ssl.cipher.suites</code> Cipher suites * </ul> * <p> */ public class HThriftClient implements HClient { private long createdTime = System.currentTimeMillis(); private static Logger log = LoggerFactory.getLogger(HThriftClient.class); private static final String NAME_FORMAT = "CassandraClient<%s-%d>"; private static final AtomicLong serial = new AtomicLong(0); final CassandraHost cassandraHost; private final long mySerial; protected final int timeout; protected String keyspaceName; private long useageStartTime; protected TTransport transport; protected Cassandra.Client cassandraClient; private TSSLTransportParameters params; private final Map<String, String> credentials = new HashMap<String, String>(); /** * Constructor * @param cassandraHost */ public HThriftClient(CassandraHost cassandraHost) { this.cassandraHost = cassandraHost; this.timeout = getTimeout(cassandraHost); mySerial = serial.incrementAndGet(); } /** * Constructor * @param cassandraHost * @param params */ public HThriftClient(CassandraHost cassandraHost, TSSLTransportParameters params) { this.cassandraHost = cassandraHost; this.timeout = getTimeout(cassandraHost); this.params = params; mySerial = serial.incrementAndGet(); } /** * {@inheritDoc} */ public Cassandra.Client getCassandra() { if ( !isOpen() ) { throw new IllegalStateException("getCassandra called on client that was not open. You should not have gotten here."); } if ( cassandraClient == null ) { cassandraClient = new Cassandra.Client(new TBinaryProtocol(transport)); } return cassandraClient; } /** * {@inheritDoc} */ public Cassandra.Client getCassandra(String keyspaceNameArg) { getCassandra(); if ( keyspaceNameArg != null && !StringUtils.equals(keyspaceName, keyspaceNameArg)) { if ( log.isDebugEnabled() ) log.debug("keyspace reseting from {} to {}", keyspaceName, keyspaceNameArg); try { cassandraClient.set_keyspace(keyspaceNameArg); } catch (InvalidRequestException ire) { throw new HInvalidRequestException(ire); } catch (TException e) { throw new HectorTransportException(e); } keyspaceName = keyspaceNameArg; } return cassandraClient; } /** * {@inheritDoc} */ public HThriftClient close() { if ( log.isDebugEnabled() ) { log.debug("Closing client {}", this); } if ( isOpen() ) { try { transport.flush(); } catch (Exception e) { log.error("Could not flush transport (to be expected if the pool is shutting down) in close for client: " + toString(), e); } finally { try { transport.close(); } catch (Exception e) { log.error("Error on transport close for client: " +toString(), e); } } } return this; } /** * {@inheritDoc} */ public HThriftClient open() { if ( isOpen() ) { throw new IllegalStateException("Open called on already open connection. You should not have gotten here."); } if ( log.isDebugEnabled() ) { log.debug("Creating a new thrift connection to {}", cassandraHost); } TSocket socket; try { socket = params == null ? new TSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout) : TSSLTransportFactory.getClientSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout, params); } catch (TTransportException e) { throw new HectorTransportException("Could not get client socket: ", e); } if ( cassandraHost.getUseSocketKeepalive() ) { try { socket.getSocket().setKeepAlive(true); } catch (SocketException se) { throw new HectorTransportException("Could not set SO_KEEPALIVE on socket: ", se); } } transport = maybeWrapWithTFramedTransport(socket); // If using SSL, the socket will already be connected, and TFramedTransport and // TSocket just wind up calling socket.isConnected(), so check this before calling // open() to avoid a "Socket already connected" error. if (!transport.isOpen()) { try { transport.open(); } catch (TTransportException e) { // Thrift exceptions aren't very good in reporting, so we have to catch the exception here and // add details to it. log.debug("Unable to open transport to " + cassandraHost.getName()); //clientMonitor.incCounter(Counter.CONNECT_ERROR); throw new HectorTransportException("Unable to open transport to " + cassandraHost.getName() +" , " + e.getLocalizedMessage(), e); } } return this; } protected TTransport maybeWrapWithTFramedTransport(TTransport transport) { if (cassandraHost.getUseThriftFramedTransport()) { return new TFramedTransport(transport, cassandraHost.getMaxFrameSize()); } else { return transport; } } /** * {@inheritDoc} */ public boolean isOpen() { boolean open = false; if (transport != null) { open = transport.isOpen(); } if ( log.isTraceEnabled() ) { log.trace("Transport open status {} for client {}", open, this); } return open; } /** * If CassandraHost was not null we use {@link CassandraHost#getCassandraThriftSocketTimeout()} * if it was greater than zero. Otherwise look for an environment * variable name CASSANDRA_THRIFT_SOCKET_TIMEOUT value. * If doesn't exist, returns 0. * @param cassandraHost */ private int getTimeout(CassandraHost cassandraHost) { int timeoutVar = 0; if ( cassandraHost != null && cassandraHost.getCassandraThriftSocketTimeout() > 0 ) { timeoutVar = cassandraHost.getCassandraThriftSocketTimeout(); } else { String timeoutStr = System.getProperty( SystemProperties.CASSANDRA_THRIFT_SOCKET_TIMEOUT.toString()); if (timeoutStr != null && timeoutStr.length() > 0) { try { timeoutVar = Integer.parseInt(timeoutStr); } catch (NumberFormatException e) { log.error("Invalid value for CASSANDRA_THRIFT_SOCKET_TIMEOUT", e); } } } return timeoutVar; } /** * {@inheritDoc} */ public void startToUse() { useageStartTime = System.currentTimeMillis(); } /** * {@inheritDoc} */ public long getSinceLastUsed() { return System.currentTimeMillis() - useageStartTime; } @Override public String toString() { return String.format(NAME_FORMAT, cassandraHost.getUrl(), mySerial); } /** * Compares the toString of these clients */ @Override public boolean equals(Object obj) { return this.toString().equals(obj.toString()); } /** * {@inheritDoc} */ @Override public CassandraHost getCassandraHost() { return cassandraHost; } /** * {@inheritDoc} */ @Override public boolean isAlreadyAuthenticated(Map<String, String> credentials) { return credentials != null && this.credentials.equals(credentials); } /** * {@inheritDoc} */ @Override public void clearAuthentication() { credentials.clear(); } /** * {@inheritDoc} */ @Override public void setAuthenticated(Map<String, String> credentials) { clearAuthentication(); this.credentials.putAll(credentials); } /** * {@inheritDoc} */ public long getCreatedTime() { return createdTime; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.validation; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.wicket.markup.html.form.ValidationErrorFeedback; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.lang.Classes; import org.apache.wicket.util.string.Strings; /** * A versatile implementation of {@link IValidationError} that supports message resolution from * {@link IErrorMessageSource}, default message (if none of the keys matched), and variable * substitution. * * The final error message is constructed via the following process: * <ol> * <li>Try all keys added by calls to {@link #addKey(String)} via the provided * <code>IErrorMessageSource</code>.</li> * <li>If none of the keys yielded a message, use the message set by {@link #setMessage(String)}, if * any.</li> * <li>Perform variable substitution on the message, if any.</li> * </ol> * * @author Igor Vaynberg (ivaynberg) * @since 1.2.6 */ public final class ValidationError implements IValidationError { private static final long serialVersionUID = 1L; /** list of message keys to try against the <code>IErrorMessageSource</code> */ private List<String> keys; /** variables map to use in variable substitution */ private Map<String, Object> vars; /** default message used when all keys yield no message */ private String message; /** * Constructs an empty error */ public ValidationError() { } /** * Constructs a validation error with the validator's standard key. Equivalent to calling * {@link #addKey(IValidator)} * * @param validator * validator */ public ValidationError(IValidator<?> validator) { addKey(validator); } /** * Constructs a validation error with a variation of validator's standard key. Equivalent to * calling {@link #addKey(IValidator, String)} * * @param validator * validator * @param variation * key variation * * */ public ValidationError(IValidator<?> validator, String variation) { addKey(validator, variation); } /** * Constructs a validation error with the specified message. Equivalent to calling * {@link #setMessage(String)} * * @param message * message */ public ValidationError(String message) { setMessage(message); } /** * Adds a key to the list of keys that will be tried against <code>IErrorMessageSource</code> to * locate the error message string. * * @param key * a message key to be added * @return this <code>ValidationError</code> for chaining purposes */ public ValidationError addKey(String key) { Args.notEmpty(key, "key"); if (keys == null) { keys = new ArrayList<>(1); } keys.add(key); return this; } /** * Shortcut for adding a standard message key which is the simple name of the validator' class * * @param validator * validator * @return {@code this} */ public ValidationError addKey(IValidator<?> validator) { Args.notNull(validator, "validator"); addKey(Classes.simpleName(validator.getClass())); return this; } /** * Shortcut for adding a standard message key variation which is the simple name of the * validator class followed by a dot and the {@literal variation} * <p> * If the variation is empty only the validator's simple class name is used * </p> * * @param validator * validator * @param variation * key variation * @return {@code this} */ public ValidationError addKey(IValidator<?> validator, String variation) { Args.notNull(validator, "validator"); String key = Classes.simpleName(validator.getClass()); if (!Strings.isEmpty(variation)) { key = key + "." + variation.trim(); } addKey(key); return this; } /** * Sets a key and value in the variables map for use in substitution. * * @param name * a variable name * @param value * a variable value * @return this <code>ValidationError</code> for chaining purposes */ public ValidationError setVariable(String name, Object value) { Args.notEmpty(name, "name"); getVariables().put(name, value); return this; } /** * Retrieves the variables map for this error. The caller is free to modify the contents. * * @return a <code>Map</code> of variables for this error */ public final Map<String, Object> getVariables() { if (vars == null) { vars = new HashMap<>(2); } return vars; } /** * Sets the variables map for this error. * * @param vars * a variables map * @return this <code>ValidationError</code> for chaining purposes */ public final ValidationError setVariables(Map<String, Object> vars) { Args.notNull(vars, "vars"); this.vars = vars; return this; } /** * @see IValidationError#getErrorMessage(IErrorMessageSource) */ @Override public final Serializable getErrorMessage(IErrorMessageSource messageSource) { String errorMessage = null; if (keys != null) { // try any message keys ... for (String key : keys) { errorMessage = messageSource.getMessage(key, vars); if (errorMessage != null) { break; } } } // ... if no keys matched try the default if (errorMessage == null && message != null) { errorMessage = message; } return new ValidationErrorFeedback(this, errorMessage); } /** * Gets the default message that will be used when no message could be located via message keys. * * @return message the default message used when all keys yield no message */ public final String getMessage() { return message; } /** * Sets message that will be used when no message could be located via message keys. * <p> * Note: No variable substitution is performed on the given message! * * @param message * a default message to be used when all keys yield no message * * @return this <code>ValidationError</code> for chaining purposes */ public final ValidationError setMessage(String message) { Args.notNull(message, "message"); this.message = message; return this; } /** * Gets error keys * * @return keys */ public List<String> getKeys() { if (keys == null) { keys = new ArrayList<>(); } return keys; } /** * Sets error keys * * @param keys */ public void setKeys(List<String> keys) { this.keys = keys; } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder tostring = new StringBuilder(); tostring.append('[').append(Classes.simpleName(getClass())); tostring.append(" message=[").append(message); tostring.append("], keys=["); if (keys != null) { Iterator<String> i = keys.iterator(); while (i.hasNext()) { tostring.append(i.next()); if (i.hasNext()) { tostring.append(", "); } } } else { tostring.append("null"); } tostring.append("], variables=["); if (vars != null) { Iterator<Entry<String, Object>> i = vars.entrySet().iterator(); while (i.hasNext()) { final Entry<String, Object> e = i.next(); tostring.append('[') .append(e.getKey()) .append('=') .append(e.getValue()) .append(']'); if (i.hasNext()) { tostring.append(','); } } } else { tostring.append("null"); } tostring.append(']'); tostring.append(']'); return tostring.toString(); } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitcoinj.protocols.channels; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import org.bitcoinj.core.*; import org.bitcoinj.crypto.TransactionSignature; import org.bitcoinj.protocols.channels.IPaymentChannelClient.ClientChannelProperties; import org.bitcoinj.script.Script; import org.bitcoinj.script.ScriptBuilder; import org.bitcoinj.wallet.AllowUnconfirmedCoinSelector; import org.bitcoinj.wallet.SendRequest; import org.bitcoinj.wallet.Wallet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.crypto.params.KeyParameter; import javax.annotation.Nullable; import java.math.BigInteger; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * Version 2 of the payment channel state machine - uses CLTV opcode transactions * instead of multisig transactions. */ public class PaymentChannelV2ClientState extends PaymentChannelClientState { private static final Logger log = LoggerFactory.getLogger(PaymentChannelV1ClientState.class); // How much value (in satoshis) is locked up into the channel. private final Coin totalValue; // When the channel will automatically settle in favor of the client, if the server halts before protocol termination // specified in terms of block timestamps (so it can off real time by a few hours). private final long expiryTime; // The refund is a time locked transaction that spends all the money of the channel back to the client. // Unlike in V1 this refund isn't signed by the server - we only have to sign it ourselves. @VisibleForTesting Transaction refundTx; private Coin refundFees; // The multi-sig contract locks the value of the channel up such that the agreement of both parties is required // to spend it. private Transaction contract; PaymentChannelV2ClientState(StoredClientChannel storedClientChannel, Wallet wallet) throws VerificationException { super(storedClientChannel, wallet); // The PaymentChannelClientConnection handles storedClientChannel.active and ensures we aren't resuming channels this.contract = checkNotNull(storedClientChannel.contract); this.expiryTime = storedClientChannel.expiryTime; this.totalValue = contract.getOutput(0).getValue(); this.valueToMe = checkNotNull(storedClientChannel.valueToMe); this.refundTx = checkNotNull(storedClientChannel.refund); this.refundFees = checkNotNull(storedClientChannel.refundFees); stateMachine.transition(State.READY); initWalletListeners(); } public PaymentChannelV2ClientState(Wallet wallet, ECKey myKey, ECKey serverMultisigKey, Coin value, long expiryTimeInSeconds) throws VerificationException { super(wallet, myKey, serverMultisigKey, value, expiryTimeInSeconds); checkArgument(value.signum() > 0); initWalletListeners(); this.valueToMe = this.totalValue = checkNotNull(value); this.expiryTime = expiryTimeInSeconds; stateMachine.transition(State.NEW); } @Override protected Multimap<State, State> getStateTransitions() { Multimap<State, State> result = MultimapBuilder.enumKeys(State.class).arrayListValues().build(); result.put(State.UNINITIALISED, State.NEW); result.put(State.UNINITIALISED, State.READY); result.put(State.NEW, State.SAVE_STATE_IN_WALLET); result.put(State.SAVE_STATE_IN_WALLET, State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER); result.put(State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER, State.READY); result.put(State.READY, State.EXPIRED); result.put(State.READY, State.CLOSED); return result; } @Override public int getMajorVersion() { return 2; } @Override public synchronized void initiate(@Nullable KeyParameter userKey, ClientChannelProperties clientChannelProperties) throws ValueOutOfRangeException, InsufficientMoneyException { final NetworkParameters params = wallet.getParams(); Transaction template = new Transaction(params); // There is also probably a change output, but we don't bother shuffling them as it's obvious from the // format which one is the change. If we start obfuscating the change output better in future this may // be worth revisiting. Script redeemScript = ScriptBuilder.createCLTVPaymentChannelOutput(BigInteger.valueOf(expiryTime), myKey, serverKey); TransactionOutput transactionOutput = template.addOutput(totalValue, ScriptBuilder.createP2SHOutputScript(redeemScript)); if (transactionOutput.isDust()) throw new ValueOutOfRangeException("totalValue too small to use"); SendRequest req = SendRequest.forTx(template); req.coinSelector = AllowUnconfirmedCoinSelector.get(); req.shuffleOutputs = false; // TODO: Fix things so shuffling is usable. req = clientChannelProperties.modifyContractSendRequest(req); if (userKey != null) req.aesKey = userKey; wallet.completeTx(req); Coin multisigFee = req.tx.getFee(); contract = req.tx; // Build a refund transaction that protects us in the case of a bad server that's just trying to cause havoc // by locking up peoples money (perhaps as a precursor to a ransom attempt). We time lock it because the // CheckLockTimeVerify opcode requires a lock time to be specified and the input to have a non-final sequence // number (so that the lock time is not disabled). refundTx = new Transaction(params); // by using this sequence value, we avoid extra full replace-by-fee and relative lock time processing. refundTx.addInput(contract.getOutput(0)).setSequenceNumber(TransactionInput.NO_SEQUENCE - 1L); refundTx.setLockTime(expiryTime); if (Context.get().isEnsureMinRequiredFee()) { // Must pay min fee. final Coin valueAfterFee = totalValue.subtract(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE); if (Transaction.MIN_NONDUST_OUTPUT.compareTo(valueAfterFee) > 0) throw new ValueOutOfRangeException("totalValue too small to use"); refundTx.addOutput(valueAfterFee, myKey.toAddress(params)); refundFees = multisigFee.add(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE); } else { refundTx.addOutput(totalValue, myKey.toAddress(params)); refundFees = multisigFee; } TransactionSignature refundSignature = refundTx.calculateSignature(0, myKey.maybeDecrypt(userKey), getSignedScript(), Transaction.SigHash.ALL, false); refundTx.getInput(0).setScriptSig(ScriptBuilder.createCLTVPaymentChannelP2SHRefund(refundSignature, redeemScript)); refundTx.getConfidence().setSource(TransactionConfidence.Source.SELF); log.info("initiated channel with contract {}", contract.getHashAsString()); stateMachine.transition(State.SAVE_STATE_IN_WALLET); // Client should now call getIncompleteRefundTransaction() and send it to the server. } @Override protected synchronized Coin getValueToMe() { return valueToMe; } protected long getExpiryTime() { return expiryTime; } @Override public synchronized Transaction getContract() { checkState(contract != null); if (stateMachine.getState() == State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER) { stateMachine.transition(State.READY); } return contract; } @Override protected synchronized Transaction getContractInternal() { return contract; } protected synchronized Script getContractScript() { return contract.getOutput(0).getScriptPubKey(); } @Override protected Script getSignedScript() { return ScriptBuilder.createCLTVPaymentChannelOutput(BigInteger.valueOf(expiryTime), myKey, serverKey); } @Override public synchronized Coin getRefundTxFees() { checkState(getState().compareTo(State.NEW) > 0); return refundFees; } @VisibleForTesting Transaction getRefundTransaction() { return refundTx; } @Override @VisibleForTesting synchronized void doStoreChannelInWallet(Sha256Hash id) { StoredPaymentChannelClientStates channels = (StoredPaymentChannelClientStates) wallet.getExtensions().get(StoredPaymentChannelClientStates.EXTENSION_ID); checkNotNull(channels, "You have not added the StoredPaymentChannelClientStates extension to the wallet."); checkState(channels.getChannel(id, contract.getHash()) == null); storedChannel = new StoredClientChannel(getMajorVersion(), id, contract, refundTx, myKey, serverKey, valueToMe, refundFees, expiryTime, true); channels.putChannel(storedChannel); } @Override public Coin getTotalValue() { return totalValue; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.functions.sql; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.planner.calcite.FlinkTypeFactory; import org.apache.flink.util.Preconditions; import org.apache.flink.shaded.guava30.com.google.common.collect.ImmutableList; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlCallBinding; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlOperandCountRange; import org.apache.calcite.sql.SqlOperatorBinding; import org.apache.calcite.sql.SqlTableFunction; import org.apache.calcite.sql.SqlUtil; import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlOperandCountRanges; import org.apache.calcite.sql.type.SqlOperandMetadata; import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.calcite.sql.validate.SqlNameMatcher; import org.apache.calcite.sql.validate.SqlValidator; import java.util.Collections; import java.util.List; import java.util.Optional; import static org.apache.calcite.util.Static.RESOURCE; /** * Base class for a table-valued function that computes windows. Examples include {@code TUMBLE}, * {@code HOP}, {@code CUMULATE} and {@code SESSION}. * * <p>Note: we copied the implementation from Calcite's {@link * org.apache.calcite.sql.SqlWindowTableFunction}, but support return additional {@code window_time} * time attribute column which should keep the same type with original time attribute. */ public class SqlWindowTableFunction extends SqlFunction implements SqlTableFunction { /** The data source which the table function computes with. */ protected static final String PARAM_DATA = "DATA"; /** The time attribute column. Also known as the event time. */ protected static final String PARAM_TIMECOL = "TIMECOL"; /** The window duration INTERVAL. */ protected static final String PARAM_SIZE = "SIZE"; /** The optional align offset for each window. */ protected static final String PARAM_OFFSET = "OFFSET"; /** The session key(s), only used for SESSION window. */ protected static final String PARAM_KEY = "KEY"; /** The slide interval, only used for HOP window. */ protected static final String PARAM_SLIDE = "SLIDE"; /** The slide interval, only used for HOP window. */ protected static final String PARAM_STEP = "STEP"; /** * Type-inference strategy whereby the row type of a table function call is a ROW, which is * combined from the row type of operand #0 (which is a TABLE) and two additional fields. The * fields are as follows: * * <ol> * <li>{@code window_start}: TIMESTAMP type to indicate a window's start * <li>{@code window_end}: TIMESTAMP type to indicate a window's end * <li>{@code window_time}: TIMESTAMP type with time attribute metadata to indicate a window's * time attribute * </ol> */ public static final SqlReturnTypeInference ARG0_TABLE_FUNCTION_WINDOWING = SqlWindowTableFunction::inferRowType; /** Creates a window table function with a given name. */ public SqlWindowTableFunction(String name, SqlOperandMetadata operandMetadata) { super( name, SqlKind.OTHER_FUNCTION, ReturnTypes.CURSOR, null, operandMetadata, SqlFunctionCategory.SYSTEM); } @Override public SqlOperandMetadata getOperandTypeChecker() { return (SqlOperandMetadata) super.getOperandTypeChecker(); } @Override public SqlReturnTypeInference getRowTypeInference() { return ARG0_TABLE_FUNCTION_WINDOWING; } /** * {@inheritDoc} * * <p>Overrides because the first parameter of table-value function windowing is an explicit * TABLE parameter, which is not scalar. */ @Override public boolean argumentMustBeScalar(int ordinal) { return ordinal != 0; } /** Helper for {@link #ARG0_TABLE_FUNCTION_WINDOWING}. */ private static RelDataType inferRowType(SqlOperatorBinding opBinding) { final RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); final RelDataType inputRowType = opBinding.getOperandType(0); final RelDataType descriptorType = opBinding.getOperandType(1); final RelDataTypeField timeField = descriptorType.getFieldList().get(0); final RelDataType timeAttributeType; if (timeField.getType().getSqlTypeName() == SqlTypeName.NULL) { // the type is not inferred yet, we should infer the type here, // see org.apache.flink.table.planner.functions.sql.SqlDescriptorOperator.deriveType RelDataTypeField field = inputRowType.getField(timeField.getName(), false, false); if (field == null) { throw new IllegalArgumentException( String.format( "Can't find the time attribute field '%s' in the input schema %s.", timeField.getName(), inputRowType.getFullTypeString())); } timeAttributeType = field.getType(); } else { // the type has been inferred, use it directly timeAttributeType = timeField.getType(); } return inferRowType(typeFactory, inputRowType, timeAttributeType); } public static RelDataType inferRowType( RelDataTypeFactory typeFactory, RelDataType inputRowType, RelDataType timeAttributeType) { return typeFactory .builder() .kind(inputRowType.getStructKind()) .addAll(inputRowType.getFieldList()) .add("window_start", SqlTypeName.TIMESTAMP, 3) .add("window_end", SqlTypeName.TIMESTAMP, 3) .add("window_time", typeFactory.createTypeWithNullability(timeAttributeType, false)) .build(); } /** Partial implementation of operand type checker. */ protected abstract static class AbstractOperandMetadata implements SqlOperandMetadata { final List<String> paramNames; final int mandatoryParamCount; AbstractOperandMetadata(List<String> paramNames, int mandatoryParamCount) { this.paramNames = ImmutableList.copyOf(paramNames); this.mandatoryParamCount = mandatoryParamCount; Preconditions.checkArgument( mandatoryParamCount >= 0 && mandatoryParamCount <= paramNames.size()); } @Override public SqlOperandCountRange getOperandCountRange() { return SqlOperandCountRanges.between(mandatoryParamCount, paramNames.size()); } @Override public List<RelDataType> paramTypes(RelDataTypeFactory typeFactory) { return Collections.nCopies( paramNames.size(), typeFactory.createSqlType(SqlTypeName.ANY)); } @Override public List<String> paramNames() { return paramNames; } @Override public Consistency getConsistency() { return Consistency.NONE; } @Override public boolean isOptional(int i) { return i > getOperandCountRange().getMin() && i <= getOperandCountRange().getMax(); } boolean throwValidationSignatureErrorOrReturnFalse( SqlCallBinding callBinding, boolean throwOnFailure) { if (throwOnFailure) { throw callBinding.newValidationSignatureError(); } else { return false; } } @SuppressWarnings("OptionalUsedAsFieldOrParameterType") boolean throwExceptionOrReturnFalse(Optional<RuntimeException> e, boolean throwOnFailure) { if (e.isPresent()) { if (throwOnFailure) { throw e.get(); } else { return false; } } else { return true; } } /** * Checks whether the heading operands are in the form {@code (ROW, DESCRIPTOR, DESCRIPTOR * ..., other params)}, returning whether successful, and throwing if any columns are not * found. * * @param callBinding The call binding * @param descriptorCount The number of descriptors following the first operand (e.g. the * table) * @return true if validation passes; throws if any columns are not found */ boolean checkTableAndDescriptorOperands(SqlCallBinding callBinding, int descriptorCount) { final SqlNode operand0 = callBinding.operand(0); final SqlValidator validator = callBinding.getValidator(); final RelDataType type = validator.getValidatedNodeType(operand0); if (type.getSqlTypeName() != SqlTypeName.ROW) { return false; } for (int i = 1; i < descriptorCount + 1; i++) { final SqlNode operand = callBinding.operand(i); if (operand.getKind() != SqlKind.DESCRIPTOR) { return false; } validateColumnNames( validator, type.getFieldNames(), ((SqlCall) operand).getOperandList()); } return true; } /** * Checks whether the type that the operand of time col descriptor refers to is valid. * * @param callBinding The call binding * @param pos The position of the descriptor at the operands of the call * @return true if validation passes, false otherwise */ Optional<RuntimeException> checkTimeColumnDescriptorOperand( SqlCallBinding callBinding, int pos) { SqlValidator validator = callBinding.getValidator(); SqlNode operand0 = callBinding.operand(0); RelDataType type = validator.getValidatedNodeType(operand0); List<SqlNode> operands = ((SqlCall) callBinding.operand(pos)).getOperandList(); SqlIdentifier identifier = (SqlIdentifier) operands.get(0); String columnName = identifier.getSimple(); SqlNameMatcher matcher = validator.getCatalogReader().nameMatcher(); for (RelDataTypeField field : type.getFieldList()) { if (matcher.matches(field.getName(), columnName)) { if (FlinkTypeFactory.isTimeIndicatorType(field.getType())) { return Optional.empty(); } else { ValidationException exception = new ValidationException( String.format( "The window function %s requires the timecol is a time attribute type, but is %s.", callBinding.getOperator().getAllowedSignatures(), field.getType())); return Optional.of(exception); } } } IllegalArgumentException error = new IllegalArgumentException( String.format( "Can't find the time attribute field '%s' in the input schema %s.", columnName, type.getFullTypeString())); return Optional.of(error); } /** * Checks whether the operands starting from position {@code startPos} are all of type * {@code INTERVAL}, returning whether successful. * * @param callBinding The call binding * @param startPos The start position to validate (starting index is 0) * @return true if validation passes */ boolean checkIntervalOperands(SqlCallBinding callBinding, int startPos) { final SqlValidator validator = callBinding.getValidator(); for (int i = startPos; i < callBinding.getOperandCount(); i++) { final RelDataType type = validator.getValidatedNodeType(callBinding.operand(i)); if (!SqlTypeUtil.isInterval(type)) { return false; } } return true; } void validateColumnNames( SqlValidator validator, List<String> fieldNames, List<SqlNode> columnNames) { final SqlNameMatcher matcher = validator.getCatalogReader().nameMatcher(); for (SqlNode columnName : columnNames) { final String name = ((SqlIdentifier) columnName).getSimple(); if (matcher.indexOf(fieldNames, name) < 0) { throw SqlUtil.newContextException( columnName.getParserPosition(), RESOURCE.unknownIdentifier(name)); } } } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.GwtCompatible; import com.google.common.primitives.Ints; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import javax.annotation.Nullable; /** * An immutable hash-based multiset. Does not permit null elements. * * <p>Its iterator orders elements according to the first appearance of the * element among the items passed to the factory method or builder. When the * multiset contains multiple instances of an element, those instances are * consecutive in the iteration order. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/ImmutableCollectionsExplained"> * immutable collections</a>. * * @author Jared Levy * @author Louis Wasserman * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(serializable = true) @SuppressWarnings("serial") // we're overriding default serialization // TODO(user): write an efficient asList() implementation public abstract class ImmutableMultiset<E> extends ImmutableCollection<E> implements Multiset<E> { /** * Returns the empty immutable multiset. */ @SuppressWarnings("unchecked") // all supported methods are covariant public static <E> ImmutableMultiset<E> of() { return (ImmutableMultiset<E>) EmptyImmutableMultiset.INSTANCE; } /** * Returns an immutable multiset containing a single element. * * @throws NullPointerException if {@code element} is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // generic array created but never written public static <E> ImmutableMultiset<E> of(E element) { return copyOfInternal(element); } /** * Returns an immutable multiset containing the given elements, in order. * * @throws NullPointerException if any element is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // public static <E> ImmutableMultiset<E> of(E e1, E e2) { return copyOfInternal(e1, e2); } /** * Returns an immutable multiset containing the given elements, in order. * * @throws NullPointerException if any element is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3) { return copyOfInternal(e1, e2, e3); } /** * Returns an immutable multiset containing the given elements, in order. * * @throws NullPointerException if any element is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3, E e4) { return copyOfInternal(e1, e2, e3, e4); } /** * Returns an immutable multiset containing the given elements, in order. * * @throws NullPointerException if any element is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // public static <E> ImmutableMultiset<E> of(E e1, E e2, E e3, E e4, E e5) { return copyOfInternal(e1, e2, e3, e4, e5); } /** * Returns an immutable multiset containing the given elements, in order. * * @throws NullPointerException if any element is null * @since 6.0 (source-compatible since 2.0) */ @SuppressWarnings("unchecked") // public static <E> ImmutableMultiset<E> of( E e1, E e2, E e3, E e4, E e5, E e6, E... others) { int size = others.length + 6; List<E> all = new ArrayList<E>(size); Collections.addAll(all, e1, e2, e3, e4, e5, e6); Collections.addAll(all, others); return copyOf(all); } /** * Returns an immutable multiset containing the given elements. * * <p>The multiset is ordered by the first occurrence of each element. For * example, {@code ImmutableMultiset.copyOf([2, 3, 1, 3])} yields a multiset * with elements in the order {@code 2, 3, 3, 1}. * * @throws NullPointerException if any of {@code elements} is null * @since 6.0 */ public static <E> ImmutableMultiset<E> copyOf(E[] elements) { return copyOf(Arrays.asList(elements)); } /** * Returns an immutable multiset containing the given elements. * * <p>The multiset is ordered by the first occurrence of each element. For * example, {@code ImmutableMultiset.copyOf(Arrays.asList(2, 3, 1, 3))} yields * a multiset with elements in the order {@code 2, 3, 3, 1}. * * <p>Despite the method name, this method attempts to avoid actually copying * the data when it is safe to do so. The exact circumstances under which a * copy will or will not be performed are undocumented and subject to change. * * <p><b>Note:</b> Despite what the method name suggests, if {@code elements} * is an {@code ImmutableMultiset}, no copy will actually be performed, and * the given multiset itself will be returned. * * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableMultiset<E> copyOf( Iterable<? extends E> elements) { if (elements instanceof ImmutableMultiset) { @SuppressWarnings("unchecked") // all supported methods are covariant ImmutableMultiset<E> result = (ImmutableMultiset<E>) elements; if (!result.isPartialView()) { return result; } } Multiset<? extends E> multiset = (elements instanceof Multiset) ? Multisets.cast(elements) : LinkedHashMultiset.create(elements); return copyOfInternal(multiset); } private static <E> ImmutableMultiset<E> copyOfInternal(E... elements) { return copyOf(Arrays.asList(elements)); } private static <E> ImmutableMultiset<E> copyOfInternal( Multiset<? extends E> multiset) { return copyFromEntries(multiset.entrySet()); } static <E> ImmutableMultiset<E> copyFromEntries( Collection<? extends Entry<? extends E>> entries) { long size = 0; ImmutableMap.Builder<E, Integer> builder = ImmutableMap.builder(); for (Entry<? extends E> entry : entries) { int count = entry.getCount(); if (count > 0) { // Since ImmutableMap.Builder throws an NPE if an element is null, no // other null checks are needed. builder.put(entry.getElement(), count); size += count; } } if (size == 0) { return of(); } return new RegularImmutableMultiset<E>( builder.build(), Ints.saturatedCast(size)); } /** * Returns an immutable multiset containing the given elements. * * <p>The multiset is ordered by the first occurrence of each element. For * example, * {@code ImmutableMultiset.copyOf(Arrays.asList(2, 3, 1, 3).iterator())} * yields a multiset with elements in the order {@code 2, 3, 3, 1}. * * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableMultiset<E> copyOf( Iterator<? extends E> elements) { Multiset<E> multiset = LinkedHashMultiset.create(); Iterators.addAll(multiset, elements); return copyOfInternal(multiset); } ImmutableMultiset() {} @Override public UnmodifiableIterator<E> iterator() { final Iterator<Entry<E>> entryIterator = entrySet().iterator(); return new UnmodifiableIterator<E>() { int remaining; E element; @Override public boolean hasNext() { return (remaining > 0) || entryIterator.hasNext(); } @Override public E next() { if (remaining <= 0) { Entry<E> entry = entryIterator.next(); element = entry.getElement(); remaining = entry.getCount(); } remaining--; return element; } }; } @Override public boolean contains(@Nullable Object object) { return count(object) > 0; } @Override public boolean containsAll(Collection<?> targets) { return elementSet().containsAll(targets); } /** * Guaranteed to throw an exception and leave the collection unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final int add(E element, int occurrences) { throw new UnsupportedOperationException(); } /** * Guaranteed to throw an exception and leave the collection unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final int remove(Object element, int occurrences) { throw new UnsupportedOperationException(); } /** * Guaranteed to throw an exception and leave the collection unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final int setCount(E element, int count) { throw new UnsupportedOperationException(); } /** * Guaranteed to throw an exception and leave the collection unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public final boolean setCount(E element, int oldCount, int newCount) { throw new UnsupportedOperationException(); } @Override public boolean equals(@Nullable Object object) { if (object == this) { return true; } if (object instanceof Multiset) { Multiset<?> that = (Multiset<?>) object; if (this.size() != that.size()) { return false; } for (Entry<?> entry : that.entrySet()) { if (count(entry.getElement()) != entry.getCount()) { return false; } } return true; } return false; } @Override public int hashCode() { return Sets.hashCodeImpl(entrySet()); } @Override public String toString() { return entrySet().toString(); } private transient ImmutableSet<Entry<E>> entrySet; @Override public ImmutableSet<Entry<E>> entrySet() { ImmutableSet<Entry<E>> es = entrySet; return (es == null) ? (entrySet = createEntrySet()) : es; } abstract ImmutableSet<Entry<E>> createEntrySet(); abstract class EntrySet extends ImmutableSet<Entry<E>> { @Override boolean isPartialView() { return ImmutableMultiset.this.isPartialView(); } @Override public boolean contains(Object o) { if (o instanceof Entry) { Entry<?> entry = (Entry<?>) o; if (entry.getCount() <= 0) { return false; } int count = count(entry.getElement()); return count == entry.getCount(); } return false; } /* * TODO(hhchan): Revert once we have a separate, manual emulation of this * class. */ @Override public Object[] toArray() { Object[] newArray = new Object[size()]; return toArray(newArray); } /* * TODO(hhchan): Revert once we have a separate, manual emulation of this * class. */ @Override public <T> T[] toArray(T[] other) { int size = size(); if (other.length < size) { other = ObjectArrays.newArray(other, size); } else if (other.length > size) { other[size] = null; } // Writes will produce ArrayStoreException when the toArray() doc requires Object[] otherAsObjectArray = other; int index = 0; for (Entry<?> element : this) { otherAsObjectArray[index++] = element; } return other; } @Override public int hashCode() { return ImmutableMultiset.this.hashCode(); } // We can't label this with @Override, because it doesn't override anything // in the GWT emulated version. // TODO(cpovirk): try making all copies of this method @GwtIncompatible instead Object writeReplace() { return new EntrySetSerializedForm<E>(ImmutableMultiset.this); } private static final long serialVersionUID = 0; } static class EntrySetSerializedForm<E> implements Serializable { final ImmutableMultiset<E> multiset; EntrySetSerializedForm(ImmutableMultiset<E> multiset) { this.multiset = multiset; } Object readResolve() { return multiset.entrySet(); } } private static class SerializedForm implements Serializable { final Object[] elements; final int[] counts; SerializedForm(Multiset<?> multiset) { int distinct = multiset.entrySet().size(); elements = new Object[distinct]; counts = new int[distinct]; int i = 0; for (Entry<?> entry : multiset.entrySet()) { elements[i] = entry.getElement(); counts[i] = entry.getCount(); i++; } } Object readResolve() { LinkedHashMultiset<Object> multiset = LinkedHashMultiset.create(elements.length); for (int i = 0; i < elements.length; i++) { multiset.add(elements[i], counts[i]); } return ImmutableMultiset.copyOf(multiset); } private static final long serialVersionUID = 0; } // We can't label this with @Override, because it doesn't override anything // in the GWT emulated version. Object writeReplace() { return new SerializedForm(this); } /** * Returns a new builder. The generated builder is equivalent to the builder * created by the {@link Builder} constructor. */ public static <E> Builder<E> builder() { return new Builder<E>(); } /** * A builder for creating immutable multiset instances, especially {@code * public static final} multisets ("constant multisets"). Example: * <pre> {@code * * public static final ImmutableMultiset<Bean> BEANS = * new ImmutableMultiset.Builder<Bean>() * .addCopies(Bean.COCOA, 4) * .addCopies(Bean.GARDEN, 6) * .addCopies(Bean.RED, 8) * .addCopies(Bean.BLACK_EYED, 10) * .build();}</pre> * * Builder instances can be reused; it is safe to call {@link #build} multiple * times to build multiple multisets in series. * * @since 2.0 (imported from Google Collections Library) */ public static class Builder<E> extends ImmutableCollection.Builder<E> { final Multiset<E> contents; /** * Creates a new builder. The returned builder is equivalent to the builder * generated by {@link ImmutableMultiset#builder}. */ public Builder() { this(LinkedHashMultiset.<E>create()); } Builder(Multiset<E> contents) { this.contents = contents; } /** * Adds {@code element} to the {@code ImmutableMultiset}. * * @param element the element to add * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null */ @Override public Builder<E> add(E element) { contents.add(checkNotNull(element)); return this; } /** * Adds a number of occurrences of an element to this {@code * ImmutableMultiset}. * * @param element the element to add * @param occurrences the number of occurrences of the element to add. May * be zero, in which case no change will be made. * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code occurrences} is negative, or * if this operation would result in more than {@link Integer#MAX_VALUE} * occurrences of the element */ public Builder<E> addCopies(E element, int occurrences) { contents.add(checkNotNull(element), occurrences); return this; } /** * Adds or removes the necessary occurrences of an element such that the * element attains the desired count. * * @param element the element to add or remove occurrences of * @param count the desired count of the element in this multiset * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code count} is negative */ public Builder<E> setCount(E element, int count) { contents.setCount(checkNotNull(element), count); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableMultiset}. * * @param elements the elements to add * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a * null element */ @Override public Builder<E> add(E... elements) { super.add(elements); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableMultiset}. * * @param elements the {@code Iterable} to add to the {@code * ImmutableMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a * null element */ @Override public Builder<E> addAll(Iterable<? extends E> elements) { if (elements instanceof Multiset) { Multiset<? extends E> multiset = Multisets.cast(elements); for (Entry<? extends E> entry : multiset.entrySet()) { addCopies(entry.getElement(), entry.getCount()); } } else { super.addAll(elements); } return this; } /** * Adds each element of {@code elements} to the {@code ImmutableMultiset}. * * @param elements the elements to add to the {@code ImmutableMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a * null element */ @Override public Builder<E> addAll(Iterator<? extends E> elements) { super.addAll(elements); return this; } /** * Returns a newly-created {@code ImmutableMultiset} based on the contents * of the {@code Builder}. */ @Override public ImmutableMultiset<E> build() { return copyOf(contents); } } }
/* * Druid - a distributed column store. * Copyright 2012 - 2015 Metamarkets Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.UnionDataSource; import io.druid.query.UnionQueryRunner; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.TestHelper; import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @RunWith(Parameterized.class) public class TimeSeriesUnionQueryRunnerTest { private final QueryRunner runner; public TimeSeriesUnionQueryRunnerTest( QueryRunner runner ) { this.runner = runner; } @Parameterized.Parameters public static Collection<?> constructorFeeder() throws IOException { return QueryRunnerTestHelper.makeUnionQueryRunners( new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ), QueryRunnerTestHelper.unionDataSource ); } @Test public void testUnionTimeseries() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.unionDataSource) .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( Arrays.<AggregatorFactory>asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", "index" ), QueryRunnerTestHelper.qualityUniques ) ) .build(); List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-04-01"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 52L, "idx", 26476L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<TimeseriesResultValue>( new DateTime("2011-04-02"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 52L, "idx", 23308L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ) ); HashMap<String, Object> context = new HashMap<String, Object>(); Iterable<Result<TimeseriesResultValue>> results = Sequences.toList( runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); } @Test public void testUnionResultMerging() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource( new UnionDataSource( Lists.newArrayList( new TableDataSource("ds1"), new TableDataSource("ds2") ) ) ) .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.firstToThird) .aggregators( Arrays.<AggregatorFactory>asList( QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( "idx", "index" ) ) ) .build(); QueryToolChest toolChest = new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()); QueryRunner mergingrunner = toolChest.mergeResults( new UnionQueryRunner<Result<TimeseriesResultValue>>( (Iterable) Arrays.asList( new QueryRunner<Result<TimeseriesResultValue>>() { @Override public Sequence<Result<TimeseriesResultValue>> run( Query<Result<TimeseriesResultValue>> query, Map<String, Object> context ) { return Sequences.simple( Lists.newArrayList( new Result<>( new DateTime("2011-04-02"), new TimeseriesResultValue( ImmutableMap.<String, Object>of( "rows", 1L, "idx", 2L ) ) ), new Result<>( new DateTime("2011-04-03"), new TimeseriesResultValue( ImmutableMap.<String, Object>of( "rows", 3L, "idx", 4L ) ) ) ) ); } }, new QueryRunner<Result<TimeseriesResultValue>>() { @Override public Sequence<Result<TimeseriesResultValue>> run( Query<Result<TimeseriesResultValue>> query, Map<String, Object> context ) { { return Sequences.simple( Lists.newArrayList( new Result<>( new DateTime("2011-04-01"), new TimeseriesResultValue( ImmutableMap.<String, Object>of( "rows", 5L, "idx", 6L ) ) ), new Result<>( new DateTime("2011-04-02"), new TimeseriesResultValue( ImmutableMap.<String, Object>of( "rows", 7L, "idx", 8L ) ) ), new Result<>( new DateTime("2011-04-04"), new TimeseriesResultValue( ImmutableMap.<String, Object>of( "rows", 9L, "idx", 10L ) ) ) ) ); } } } ), toolChest ) ); List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<>( new DateTime("2011-04-01"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 5L, "idx", 6L) ) ), new Result<>( new DateTime("2011-04-02"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 8L, "idx", 10L) ) ), new Result<>( new DateTime("2011-04-03"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 3L, "idx", 4L) ) ), new Result<>( new DateTime("2011-04-04"), new TimeseriesResultValue( ImmutableMap.<String, Object>of("rows", 9L, "idx", 10L) ) ) ); Iterable<Result<TimeseriesResultValue>> results = Sequences.toList( mergingrunner.run(query, Maps.<String, Object>newHashMap()), Lists.<Result<TimeseriesResultValue>>newArrayList() ); System.out.println(results); TestHelper.assertExpectedResults(expectedResults, results); } }
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.gcardone.junidecode; /** * Character map for Unicode characters with codepoint U+20xx. * @author Giuseppe Cardone * @version 0.1 */ class X20 { public static final String[] map = new String[]{ " ", // 0x00 " ", // 0x01 " ", // 0x02 " ", // 0x03 " ", // 0x04 " ", // 0x05 " ", // 0x06 " ", // 0x07 " ", // 0x08 " ", // 0x09 " ", // 0x0a " ", // 0x0b "", // 0x0c "", // 0x0d "", // 0x0e "", // 0x0f "-", // 0x10 "-", // 0x11 "-", // 0x12 "-", // 0x13 "--", // 0x14 "--", // 0x15 "||", // 0x16 "_", // 0x17 "\'", // 0x18 "\'", // 0x19 ",", // 0x1a "\'", // 0x1b "\"", // 0x1c "\"", // 0x1d ",,", // 0x1e "\"", // 0x1f "+", // 0x20 "++", // 0x21 "*", // 0x22 "*>", // 0x23 ".", // 0x24 "..", // 0x25 "...", // 0x26 ".", // 0x27 new String("" + (char) 0x0a), // 0x28 new String("" + (char) 0x0a + (char) 0x0a), // 0x29 "", // 0x2a "", // 0x2b "", // 0x2c "", // 0x2d "", // 0x2e " ", // 0x2f "%0", // 0x30 "%00", // 0x31 "\'", // 0x32 "\'\'", // 0x33 "\'\'\'", // 0x34 "`", // 0x35 "``", // 0x36 "```", // 0x37 "^", // 0x38 "<", // 0x39 ">", // 0x3a "*", // 0x3b "!!", // 0x3c "!?", // 0x3d "-", // 0x3e "_", // 0x3f "-", // 0x40 "^", // 0x41 "***", // 0x42 "--", // 0x43 "/", // 0x44 "-[", // 0x45 "]-", // 0x46 "[?]", // 0x47 "?!", // 0x48 "!?", // 0x49 "7", // 0x4a "PP", // 0x4b "(]", // 0x4c "[)", // 0x4d "[?]", // 0x4e "[?]", // 0x4f "[?]", // 0x50 "[?]", // 0x51 "[?]", // 0x52 "[?]", // 0x53 "[?]", // 0x54 "[?]", // 0x55 "[?]", // 0x56 "[?]", // 0x57 "[?]", // 0x58 "[?]", // 0x59 "[?]", // 0x5a "[?]", // 0x5b "[?]", // 0x5c "[?]", // 0x5d "[?]", // 0x5e "[?]", // 0x5f "[?]", // 0x60 "[?]", // 0x61 "[?]", // 0x62 "[?]", // 0x63 "[?]", // 0x64 "[?]", // 0x65 "[?]", // 0x66 "[?]", // 0x67 "[?]", // 0x68 "[?]", // 0x69 "", // 0x6a "", // 0x6b "", // 0x6c "", // 0x6d "", // 0x6e "", // 0x6f "0", // 0x70 "", // 0x71 "", // 0x72 "", // 0x73 "4", // 0x74 "5", // 0x75 "6", // 0x76 "7", // 0x77 "8", // 0x78 "9", // 0x79 "+", // 0x7a "-", // 0x7b "=", // 0x7c "(", // 0x7d ")", // 0x7e "n", // 0x7f "0", // 0x80 "1", // 0x81 "2", // 0x82 "3", // 0x83 "4", // 0x84 "5", // 0x85 "6", // 0x86 "7", // 0x87 "8", // 0x88 "9", // 0x89 "+", // 0x8a "-", // 0x8b "=", // 0x8c "(", // 0x8d ")", // 0x8e "[?]", // 0x8f "[?]", // 0x90 "[?]", // 0x91 "[?]", // 0x92 "[?]", // 0x93 "[?]", // 0x94 "[?]", // 0x95 "[?]", // 0x96 "[?]", // 0x97 "[?]", // 0x98 "[?]", // 0x99 "[?]", // 0x9a "[?]", // 0x9b "[?]", // 0x9c "[?]", // 0x9d "[?]", // 0x9e "[?]", // 0x9f "ECU", // 0xa0 "CL", // 0xa1 "Cr", // 0xa2 "FF", // 0xa3 "L", // 0xa4 "mil", // 0xa5 "N", // 0xa6 "Pts", // 0xa7 "Rs", // 0xa8 "W", // 0xa9 "NS", // 0xaa "D", // 0xab "EUR", // 0xac "K", // 0xad "T", // 0xae "Dr", // 0xaf "Pf", // 0xb0 "P", // 0xb1 "G", // 0xb2 "A", // 0xb3 "C/", // 0xb4 "[?]", // 0xb5 "[?]", // 0xb6 "[?]", // 0xb7 "[?]", // 0xb8 "[?]", // 0xb9 "[?]", // 0xba "[?]", // 0xbb "[?]", // 0xbc "[?]", // 0xbd "[?]", // 0xbe "[?]", // 0xbf "[?]", // 0xc0 "[?]", // 0xc1 "[?]", // 0xc2 "[?]", // 0xc3 "[?]", // 0xc4 "[?]", // 0xc5 "[?]", // 0xc6 "[?]", // 0xc7 "[?]", // 0xc8 "[?]", // 0xc9 "[?]", // 0xca "[?]", // 0xcb "[?]", // 0xcc "[?]", // 0xcd "[?]", // 0xce "[?]", // 0xcf "", // 0xd0 "", // 0xd1 "", // 0xd2 "", // 0xd3 "", // 0xd4 "", // 0xd5 "", // 0xd6 "", // 0xd7 "", // 0xd8 "", // 0xd9 "", // 0xda "", // 0xdb "", // 0xdc "", // 0xdd "", // 0xde "", // 0xdf "", // 0xe0 "", // 0xe1 "", // 0xe2 "", // 0xe3 "[?]", // 0xe4 "[?]", // 0xe5 "[?]", // 0xe6 "[?]", // 0xe7 "[?]", // 0xe8 "[?]", // 0xe9 "[?]", // 0xea "[?]", // 0xeb "[?]", // 0xec "[?]", // 0xed "[?]", // 0xee "[?]", // 0xef "[?]", // 0xf0 "[?]", // 0xf1 "[?]", // 0xf2 "[?]", // 0xf3 "[?]", // 0xf4 "[?]", // 0xf5 "[?]", // 0xf6 "[?]", // 0xf7 "[?]", // 0xf8 "[?]", // 0xf9 "[?]", // 0xfa "[?]", // 0xfb "[?]", // 0xfc "[?]", // 0xfd "[?]", // 0xfe "[?]" // 0xff }; }
/* Copyright (c) 2013 OpenPlans. All rights reserved. * This code is licensed under the BSD New License, available at the root * application directory. */ package org.geogit.storage.mongo; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.geogit.api.ObjectId; import org.geogit.api.RevCommit; import org.geogit.api.RevFeature; import org.geogit.api.RevFeatureType; import org.geogit.api.RevObject; import org.geogit.api.RevTag; import org.geogit.api.RevTree; import org.geogit.repository.RepositoryConnectionException; import org.geogit.storage.BulkOpListener; import org.geogit.storage.ConfigDatabase; import org.geogit.storage.ObjectDatabase; import org.geogit.storage.ObjectInserter; import org.geogit.storage.ObjectSerializingFactory; import org.geogit.storage.ObjectWriter; import org.geogit.storage.datastream.DataStreamSerializationFactory; import com.google.common.base.Functions; import com.google.common.collect.AbstractIterator; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.mongodb.MongoClient; import com.mongodb.WriteResult; /** * An Object database that uses a MongoDB server for persistence. * * @see http://mongodb.com/ */ public class MongoObjectDatabase implements ObjectDatabase { private final MongoConnectionManager manager; protected final ConfigDatabase config; private MongoClient client = null; protected DB db = null; protected DBCollection collection = null; protected ObjectSerializingFactory serializers = new DataStreamSerializationFactory(); private String collectionName; @Inject public MongoObjectDatabase(ConfigDatabase config, MongoConnectionManager manager) { this(config, manager, "objects"); } MongoObjectDatabase(ConfigDatabase config, MongoConnectionManager manager, String collectionName) { this.config = config; this.manager = manager; this.collectionName = collectionName; } private RevObject fromBytes(ObjectId id, byte[] buffer) { ByteArrayInputStream byteStream = new ByteArrayInputStream(buffer); RevObject result = serializers.createObjectReader().read(id, byteStream); return result; } private byte[] toBytes(RevObject object) { ObjectWriter<RevObject> writer = serializers.createObjectWriter(object.getType()); ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); try { writer.write(object, byteStream); } catch (IOException e) { throw new RuntimeException(e); } return byteStream.toByteArray(); } protected String getCollectionName() { return collectionName; } @Override public synchronized void open() { if (client != null) { return; } String hostname = config.get("mongo.host").get(); int port = config.get("mongo.port", Integer.class).get(); client = manager.acquire(new MongoAddress(hostname, port)); db = client.getDB("geogit"); collection = db.getCollection(getCollectionName()); collection.ensureIndex("oid"); } @Override public synchronized boolean isOpen() { return client != null; } @Override public void configure() throws RepositoryConnectionException { RepositoryConnectionException.StorageType.OBJECT.configure(config, "mongodb", "0.1"); } @Override public void checkConfig() throws RepositoryConnectionException { RepositoryConnectionException.StorageType.OBJECT.verify(config, "mongodb", "0.1"); } @Override public synchronized void close() { if (client != null) { manager.release(client); } client = null; db = null; collection = null; } @Override public boolean exists(ObjectId id) { DBObject query = new BasicDBObject(); query.put("oid", id.toString()); return collection.find(query).hasNext(); } @Override public List<ObjectId> lookUp(final String partialId) { if (partialId.matches("[a-fA-F0-9]+")) { DBObject regex = new BasicDBObject(); regex.put("$regex", "^" + partialId); DBObject query = new BasicDBObject(); query.put("oid", regex); DBCursor cursor = collection.find(query); List<ObjectId> ids = new ArrayList<ObjectId>(); while (cursor.hasNext()) { DBObject elem = cursor.next(); String oid = (String) elem.get("oid"); ids.add(ObjectId.valueOf(oid)); } return ids; } else { throw new IllegalArgumentException( "Prefix query must be done with hexadecimal values only"); } } @Override public RevObject get(ObjectId id) { RevObject result = getIfPresent(id); if (result != null) { return result; } else { throw new NoSuchElementException("No object with id: " + id); } } @Override public <T extends RevObject> T get(ObjectId id, Class<T> clazz) { return clazz.cast(get(id)); } @Override public RevObject getIfPresent(ObjectId id) { DBObject query = new BasicDBObject(); query.put("oid", id.toString()); DBCursor results = collection.find(query); if (results.hasNext()) { DBObject result = results.next(); return fromBytes(id, (byte[]) result.get("serialized_object")); } else { return null; } } @Override public <T extends RevObject> T getIfPresent(ObjectId id, Class<T> clazz) { return clazz.cast(getIfPresent(id)); } @Override public RevTree getTree(ObjectId id) { return get(id, RevTree.class); } @Override public RevFeature getFeature(ObjectId id) { return get(id, RevFeature.class); } @Override public RevFeatureType getFeatureType(ObjectId id) { return get(id, RevFeatureType.class); } @Override public RevCommit getCommit(ObjectId id) { return get(id, RevCommit.class); } @Override public RevTag getTag(ObjectId id) { return get(id, RevTag.class); } private long deleteChunk(List<ObjectId> ids) { List<String> idStrings = Lists.transform(ids, Functions.toStringFunction()); DBObject query = BasicDBObjectBuilder.start().push("oid").add("$in", idStrings).pop().get(); WriteResult result = collection.remove(query); return result.getN(); } @Override public boolean delete(ObjectId id) { DBObject query = new BasicDBObject(); query.put("oid", id.toString()); return collection.remove(query).getLastError().ok(); } @Override public long deleteAll(Iterator<ObjectId> ids) { return deleteAll(ids, BulkOpListener.NOOP_LISTENER); } @Override public long deleteAll(Iterator<ObjectId> ids, BulkOpListener listener) { Iterator<List<ObjectId>> chunks = Iterators.partition(ids, 500); long count = 0; while (chunks.hasNext()) { count += deleteChunk(chunks.next()); } return count; } @Override public boolean put(final RevObject object) { DBObject query = new BasicDBObject(); query.put("oid", object.getId().toString()); DBObject record = new BasicDBObject(); record.put("oid", object.getId().toString()); record.put("serialized_object", toBytes(object)); return collection.update(query, record, true, false).getLastError().ok(); } @Override public void putAll(final Iterator<? extends RevObject> objects) { putAll(objects, BulkOpListener.NOOP_LISTENER); } @Override public void putAll(Iterator<? extends RevObject> objects, BulkOpListener listener) { while (objects.hasNext()) { RevObject object = objects.next(); boolean put = put(object); if (put) { listener.inserted(object.getId(), null); }else{ listener.found(object.getId(), null); } } } @Override public ObjectInserter newObjectInserter() { return new ObjectInserter(this); } @Override public Iterator<RevObject> getAll(Iterable<ObjectId> ids) { return getAll(ids, BulkOpListener.NOOP_LISTENER); } @Override public Iterator<RevObject> getAll(final Iterable<ObjectId> ids, final BulkOpListener listener) { return new AbstractIterator<RevObject>() { final Iterator<ObjectId> queryIds = ids.iterator(); @Override protected RevObject computeNext() { RevObject obj = null; while (obj == null) { if (!queryIds.hasNext()) { return endOfData(); } ObjectId id = queryIds.next(); obj = getIfPresent(id); if (obj == null) { listener.notFound(id); } else { listener.found(obj.getId(), null); } } return obj == null ? endOfData() : obj; } }; } public DBCollection getCollection(String name) { return db.getCollection(name); } }
/* * Copyright 2016 Bjoern Bilger * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jrestless.aws.gateway.handler; import static java.util.Collections.singletonList; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Type; import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.Response.Status; import org.glassfish.jersey.internal.inject.InjectionManager; import org.glassfish.jersey.internal.util.collection.Ref; import org.glassfish.jersey.server.ContainerRequest; import org.glassfish.jersey.server.spi.RequestScopedInitializer; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import com.amazonaws.services.lambda.runtime.Context; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.jrestless.aws.AbstractLambdaContextReferencingBinder; import com.jrestless.aws.gateway.io.DefaultGatewayRequest; import com.jrestless.aws.gateway.io.GatewayBinaryResponseFilter; import com.jrestless.aws.gateway.io.GatewayRequest; import com.jrestless.aws.gateway.io.GatewayResponse; import com.jrestless.aws.gateway.util.GatewayRequestBuilder; import com.jrestless.core.container.JRestlessHandlerContainer; import com.jrestless.core.container.handler.SimpleRequestHandler.SimpleResponseWriter; import com.jrestless.core.container.io.JRestlessContainerRequest; import com.jrestless.core.container.io.RequestAndBaseUri; public class GatewayRequestHandlerTest { private static final Type GATEWAY_REQUEST_TYPE = (new GenericType<Ref<GatewayRequest>>() { }).getType(); private static final String TEST_AWS_DOMAIN = "0123456789.execute-api.eu-central-1.amazonaws.com"; private static final String TEST_AWS_DOMAIN_WITH_SCHEME = "https://" + TEST_AWS_DOMAIN; private static final String TEST_CUSTOM_DOMAIN = "api.example.com"; private static final String TEST_CUSTOM_DOMAIN_WITH_SCHEME = "https://" + TEST_CUSTOM_DOMAIN; private JRestlessHandlerContainer<JRestlessContainerRequest> container; private GatewayRequestHandlerImpl gatewayHandler; @SuppressWarnings("unchecked") @BeforeEach public void setup() { container = mock(JRestlessHandlerContainer.class); gatewayHandler = new GatewayRequestHandlerImpl(); gatewayHandler.init(container); gatewayHandler.start(); } @AfterEach public void tearDown() { gatewayHandler.stop(); } @SuppressWarnings("unchecked") @Test public void delegateRequest_ValidRequestAndReferencesGiven_ShouldSetReferencesOnRequestInitialization() { Context context = mock(Context.class); DefaultGatewayRequest request = new DefaultGatewayRequest(); request.setPath("/"); request.setHttpMethod("GET"); RequestScopedInitializer requestScopedInitializer = getSetRequestScopedInitializer(context, request); Ref<GatewayRequest> gatewayRequestRef = mock(Ref.class); Ref<Context> contextRef = mock(Ref.class); InjectionManager injectionManager = mock(InjectionManager.class); when(injectionManager.getInstance(GATEWAY_REQUEST_TYPE)).thenReturn(gatewayRequestRef); when(injectionManager.getInstance(AbstractLambdaContextReferencingBinder.LAMBDA_CONTEXT_TYPE)).thenReturn(contextRef); requestScopedInitializer.initialize(injectionManager); verify(gatewayRequestRef).set(request); verify(contextRef).set(context); } @Test public void delegateRequest_ValidRequestAndNoReferencesGiven_ShouldNotFailOnRequestInitialization() { Context context = mock(Context.class); DefaultGatewayRequest request = new DefaultGatewayRequest(); request.setPath("/"); request.setHttpMethod("GET"); RequestScopedInitializer requestScopedInitializer = getSetRequestScopedInitializer(context, request); InjectionManager injectionManager = mock(InjectionManager.class); requestScopedInitializer.initialize(injectionManager); } @SuppressWarnings("unchecked") private RequestScopedInitializer getSetRequestScopedInitializer(Context context, GatewayRequest request) { GatewayRequestAndLambdaContext reqAndContext = new GatewayRequestAndLambdaContext(request, context); ArgumentCaptor<Consumer<ContainerRequest>> containerEnhancerCaptor = ArgumentCaptor.forClass(Consumer.class); gatewayHandler.delegateRequest(reqAndContext); verify(container).handleRequest(any(), any(), any(), containerEnhancerCaptor.capture()); ContainerRequest containerRequest = mock(ContainerRequest.class); containerEnhancerCaptor.getValue().accept(containerRequest); ArgumentCaptor<RequestScopedInitializer> requestScopedInitializerCaptor = ArgumentCaptor.forClass(RequestScopedInitializer.class); verify(containerRequest).setRequestScopedInitializer(requestScopedInitializerCaptor.capture()); return requestScopedInitializerCaptor.getValue(); } @Test public void createContainerRequest_NoPathGiven_ShouldThrowNpe() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setPath(null); assertThrows(NullPointerException.class, () -> gatewayHandler.createContainerRequest(request)); } @Test public void createContainerRequest_NoHttpMethodGiven_ShouldThrowNpe() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setHttpMethod(null); assertThrows(NullPointerException.class, () -> gatewayHandler.createContainerRequest(request)); } @Test public void createContainerRequest_NoBodyGiven_ShouldUseEmptyBaos() { GatewayRequestAndLambdaContext request = createMinimalRequest(); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); InputStream is = containerRequest.getEntityStream(); assertEquals(ByteArrayInputStream.class, is.getClass()); assertEquals("", toString((ByteArrayInputStream) is)); } @Test public void createContainerRequest_BodyGiven_ShouldUseBody() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setBody("abc"); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); InputStream is = containerRequest.getEntityStream(); assertEquals(ByteArrayInputStream.class, is.getClass()); assertEquals("abc", toString((ByteArrayInputStream) is)); } @Test public void createContainerRequest_HttpMethodGiven_ShouldUseHttpMethod() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setHttpMethod("POST"); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals("POST", containerRequest.getHttpMethod()); } @Test public void createContainerRequest_PathWithNoQueryParamsGiven_ShouldUsePathAsRequestUri() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setPath("/abc"); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(URI.create("/abc"), containerRequest.getRequestUri()); } @Test public void createContainerRequest_PathWithOneQueryParamsGiven_ShouldUseQueryParamsInRequestUri() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setPath("/abc"); ((DefaultGatewayRequest) request.getGatewayRequest()).setQueryStringParameters(ImmutableMap.of("a_k", "a_v")); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(URI.create("/abc?a_k=a_v"), containerRequest.getRequestUri()); } @Test public void createContainerRequest_PathWithMultipleQueryParamsGiven_ShouldUseQueryParamsInRequestUri() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setPath("/abc"); ((DefaultGatewayRequest) request.getGatewayRequest()).setQueryStringParameters(ImmutableMap.of("a_k", "a_v", "b_k", "b_v")); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(URI.create("/abc?a_k=a_v&b_k=b_v"), containerRequest.getRequestUri()); } @Test public void createContainerRequest_HeadersGiven_ShouldUseHeaders() { GatewayRequestAndLambdaContext request = createMinimalRequest(); ((DefaultGatewayRequest) request.getGatewayRequest()).setHeaders(ImmutableMap.of("a_k", "a_v", "b_k", "b_v")); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(ImmutableMap.of("a_k", singletonList("a_v"), "b_k", singletonList("b_v")), containerRequest.getHeaders()); } @Test public void createContainerRequest_NullHeaderKeyGiven_ShouldFilterHeader() { GatewayRequestAndLambdaContext request = createMinimalRequest(); Map<String, String> headers = new HashMap<>(); headers.put(null, "a_v"); headers.put("b_k", "b_v"); ((DefaultGatewayRequest) request.getGatewayRequest()).setHeaders(headers); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(ImmutableMap.of("b_k", singletonList("b_v")), containerRequest.getHeaders()); } @Test public void createContainerRequest_NullHeaderValueGiven_ShouldFilterHeader() { GatewayRequestAndLambdaContext request = createMinimalRequest(); Map<String, String> headers = new HashMap<>(); headers.put("a_k", null); headers.put("b_k", "b_v"); ((DefaultGatewayRequest) request.getGatewayRequest()).setHeaders(headers); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(ImmutableMap.of("b_k", singletonList("b_v")), containerRequest.getHeaders()); } @Test public void createContainerRequest_CommaSeparatedHeaderValueGiven_ShouldNotSpreadHeader() { GatewayRequestAndLambdaContext request = createMinimalRequest(); Map<String, String> headers = new HashMap<>(); headers.put("a_k", "a_v0,a_v1"); ((DefaultGatewayRequest) request.getGatewayRequest()).setHeaders(headers); JRestlessContainerRequest containerRequest = gatewayHandler.createContainerRequest(request); assertEquals(ImmutableMap.of("a_k", singletonList("a_v0,a_v1")), containerRequest.getHeaders()); } @Test public void testResponseWriterFiltersInternalBinaryHeader() throws IOException { Map<String, List<String>> headers = new HashMap<>(); headers.put("a_k", Collections.singletonList("a_v")); headers.put(GatewayBinaryResponseFilter.HEADER_BINARY_RESPONSE, Collections.singletonList("true")); headers.put("b_k", Collections.singletonList("b_v")); SimpleResponseWriter<GatewayResponse> responseWriter = gatewayHandler.createResponseWriter(null); responseWriter.writeResponse(Status.OK, headers, new ByteArrayOutputStream()); assertEquals(ImmutableMap.of("a_k", "a_v", "b_k", "b_v"), responseWriter.getResponse().getHeaders()); } @Test public void testResponseWriterSetsBase64EncodedFlagIfExactlyOneBinaryHeaderSetToTrue() throws IOException { Map<String, List<String>> headers = new HashMap<>(); headers.put(GatewayBinaryResponseFilter.HEADER_BINARY_RESPONSE, Collections.singletonList("true")); SimpleResponseWriter<GatewayResponse> responseWriter = gatewayHandler.createResponseWriter(null); responseWriter.writeResponse(Status.OK, headers, new ByteArrayOutputStream()); assertTrue(responseWriter.getResponse().isIsBase64Encoded()); } @Test public void testResponseWriterDoesntSetBase64EncodedFlagIfMultipleBinaryHeadersSet() throws IOException { Map<String, List<String>> headers = new HashMap<>(); headers.put(GatewayBinaryResponseFilter.HEADER_BINARY_RESPONSE, ImmutableList.of("true", "true")); SimpleResponseWriter<GatewayResponse> responseWriter = gatewayHandler.createResponseWriter(null); responseWriter.writeResponse(Status.OK, headers, new ByteArrayOutputStream()); assertFalse(responseWriter.getResponse().isIsBase64Encoded()); } @Test public void testResponseWriterDoesntSetBase64EncodedFlagIfNoBinaryHeadersSet() throws IOException { Map<String, List<String>> headers = new HashMap<>(); SimpleResponseWriter<GatewayResponse> responseWriter = gatewayHandler.createResponseWriter(null); responseWriter.writeResponse(Status.OK, headers, new ByteArrayOutputStream()); assertFalse(responseWriter.getResponse().isIsBase64Encoded()); } @Test public void testResponseWriterDoesntSetBase64EncodedFlagIfBinaryHeaderSetToFalse() throws IOException { Map<String, List<String>> headers = new HashMap<>(); headers.put(GatewayBinaryResponseFilter.HEADER_BINARY_RESPONSE, Collections.singletonList("false")); SimpleResponseWriter<GatewayResponse> responseWriter = gatewayHandler.createResponseWriter(null); responseWriter.writeResponse(Status.OK, headers, new ByteArrayOutputStream()); assertFalse(responseWriter.getResponse().isIsBase64Encoded()); } @Test public void getRequestAndBaseUri_CustomDomainWithBasePathGiven_ShouldAddBasePathAndStageToUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base/stage") .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/stage/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/stage/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_CustomDomainWithBasePathAndStageGiven_ShouldAddBasePathToUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_CustomDomainWithStageGiven_ShouldAddStageToUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("stage") .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/stage/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/stage/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_CustomDomainGiven_ShouldNotAddContextStageToUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .requestContextStage("dev") .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_AwsDomainWithStageGiven_ShouldAddStageToUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_AWS_DOMAIN) .requestContextStage("dev") .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/dev/"), uris.getBaseUri()); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/dev/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_AwsDomainWithoutStageGiven_ShouldSetBaseUriToDomainOnly() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_AWS_DOMAIN) .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/"), uris.getBaseUri()); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_AwsDomainWithoutRequestContextGiven_ShouldSetBaseUriToDomainOnly() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_AWS_DOMAIN) .resource("/users") .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getRequestContext()).thenReturn(null); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/"), uris.getBaseUri()); assertEquals(URI.create(TEST_AWS_DOMAIN_WITH_SCHEME + "/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_NoDomainGiven_ShouldNotAddDomainSchemeOrPort() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .resource("/users") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_PathParamGiven_ShouldResolveBasePath() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/users/{uid}") .pathParams(ImmutableMap.of("uid", "1")) .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_ProxyParamGiven_ShouldResolveBasePath() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/{proxy+}") .pathParams(ImmutableMap.of("proxy", "users/1/contacts")) .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/users/1/contacts"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_PathAndProxyParamGiven_ShouldResolveBasePath() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/users/{uid}/contacts/{proxy+}") .pathParams(ImmutableMap.of("uid", "1", "proxy", "2")) .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/users/1/contacts/2"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_ResourceWithRegexGiven_ShouldEscapeRegex() { GatewayRequestAndLambdaContext validRequestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/a.c") .buildWrapped(); RequestAndBaseUri validUris = gatewayHandler.getRequestAndBaseUri(validRequestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), validUris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/a.c"), validUris.getRequestUri()); GatewayRequestAndLambdaContext invalidRequestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/a.c") .path("/base/abc") .buildWrapped(); // we cannot match the basepath since "." has been escaped correctly RequestAndBaseUri invalidUris = gatewayHandler.getRequestAndBaseUri(invalidRequestAndLambdaContext); assertEquals(URI.create("/"), invalidUris.getBaseUri()); assertEquals(URI.create("/base/abc"), invalidUris.getRequestUri()); } @Test public void getRequestAndBaseUri_UnescapedQueryParametersGiven_ShouldEscapeQueryParameters() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/{proxy+}") .pathParams(Collections.singletonMap("proxy", "users")) .queryParams(Collections.singletonMap("q", "foo bar")) .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/users?q=foo+bar"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_TrailingSlashesGiven_ShouldMatchBasePath() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/ab") .path("/base/ab////") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/ab////"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_PathContainsButNotMatchesResourceGiven_ShouldFallbackToBaseUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/ab") .path("/ab/a") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/ab/a"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_RootResourceAndBasePathGiven_ShouldDetectBaseUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_RootResourceAndBasePathWithTrailingSlashesGiven_ShouldDetectBaseUriAndKeepSlashesOnRequestUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .basePath("base") .resource("/") .path("/base//") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/base//"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_RootResourceWithTrailingSlashesGiven_ShouldKeepSlashesOnRequestUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/") .path("///") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "///"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_InvalidResourceTemplateGiven_ShouldFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/users/{uid}") .pathParams(Collections.singletonMap("uid", "1")) .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getResource()).thenReturn("/users/{uid"); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_TemplateResourceWithMissingPathParamGiven_ShouldFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/users/{uid}") .pathParams(Collections.singletonMap("uid", "1")) .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getPathParameters()).thenReturn(Collections.emptyMap()); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_TemplateResourceWithNullPathParamsGiven_ShouldFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/users/{uid}") .pathParams(Collections.singletonMap("uid", "1")) .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getPathParameters()).thenReturn(null); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_NonTemplateResourceWithNullPathParamsGiven_ShouldNotFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/users/1") .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getPathParameters()).thenReturn(null); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/"), uris.getBaseUri()); assertEquals(URI.create(TEST_CUSTOM_DOMAIN_WITH_SCHEME + "/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_BlankResourceGiven_ShouldFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain(TEST_CUSTOM_DOMAIN) .resource("/users/1") .buildWrapped(); when(requestAndLambdaContext.getGatewayRequest().getResource()).thenReturn(null); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users/1"), uris.getRequestUri()); } @Test public void getRequestAndBaseUri_FailsToConstructBaseUri_ShouldFallbackToRootUri() { GatewayRequestAndLambdaContext requestAndLambdaContext = new GatewayRequestBuilder() .domain("{a}") .resource("/users/1") .buildWrapped(); RequestAndBaseUri uris = gatewayHandler.getRequestAndBaseUri(requestAndLambdaContext); assertEquals(URI.create("/"), uris.getBaseUri()); assertEquals(URI.create("/users/1"), uris.getRequestUri()); } private GatewayRequestAndLambdaContext createMinimalRequest() { DefaultGatewayRequest request = new DefaultGatewayRequest(); request.setPath("/"); request.setHttpMethod("GET"); return new GatewayRequestAndLambdaContext(request, null); } public static String toString(ByteArrayInputStream bais) { int size = bais.available(); char[] chars = new char[size]; byte[] bytes = new byte[size]; bais.read(bytes, 0, size); for (int i = 0; i < size;) chars[i] = (char) (bytes[i++] & 0xff); return new String(chars); } private static class GatewayRequestHandlerImpl extends GatewayRequestHandler { } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.mailarchive.impl; import java.sql.Connection; import java.sql.ResultSet; import java.util.List; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.db.api.SqlReader; import org.sakaiproject.db.api.SqlService; import org.sakaiproject.javax.Filter; import org.sakaiproject.javax.PagingPosition; import org.sakaiproject.message.api.Message; import org.sakaiproject.message.api.MessageChannel; import org.sakaiproject.message.api.MessageChannelEdit; import org.sakaiproject.message.api.MessageEdit; import org.sakaiproject.time.api.Time; import org.sakaiproject.util.BaseDbDoubleStorage; import org.sakaiproject.util.DoubleStorageUser; import org.apache.commons.lang.StringUtils; import org.sakaiproject.util.Xml; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * <p> * DbMailArchiveService fills out the BaseMailArchiveService with a database implementation. * </p> * <p> * The sql scripts in src/sql/chef_mailarchive.sql must be run on the database. * </p> */ public class DbMailArchiveService extends BaseMailArchiveService { /** Our logger. */ private static Log M_log = LogFactory.getLog(DbMailArchiveService.class); /** The name of the db table holding mail archive channels. */ protected String m_cTableName = "MAILARCHIVE_CHANNEL"; /** The name of the db table holding mail archive messages. */ protected String m_rTableName = "MAILARCHIVE_MESSAGE"; /** If true, we do our locks in the remote database, otherwise we do them here. */ protected boolean m_locksInDb = true; protected static final String[] FIELDS = { "MESSAGE_DATE", "OWNER", "DRAFT", "PUBVIEW", "SUBJECT", "BODY"}; protected static final String[] SEARCH_FIELDS = { "OWNER", "SUBJECT", "BODY" }; /********************************************************************************************************************************************************************************************************************************************************** * Constructors, Dependencies and their setter methods *********************************************************************************************************************************************************************************************************************************************************/ /** Dependency: SqlService */ protected SqlService m_sqlService = null; /** * Dependency: SqlService. * * @param service * The SqlService. */ public void setSqlService(SqlService service) { m_sqlService = service; } /** * Configuration: set the table name for the container. * * @param path * The table name for the container. */ public void setContainerTableName(String name) { m_cTableName = name; } /** * Configuration: set the table name for the resource. * * @param path * The table name for the resource. */ public void setResourceTableName(String name) { m_rTableName = name; } /** * Configuration: set the locks-in-db * * @param path * The storage path. */ public void setLocksInDb(String value) { m_locksInDb = Boolean.valueOf(value).booleanValue(); } /** Set if we are to run the to-draft/owner conversion. */ protected boolean m_convertToDraft = false; /** * Configuration: run the to-draft/owner conversion * * @param value * The conversion desired value. */ public void setConvertDraft(String value) { m_convertToDraft = Boolean.valueOf(value).booleanValue(); } /** Configuration: to run the ddl on init or not. */ protected boolean m_autoDdl = false; /** * Configuration: to run the ddl on init or not. * * @param value * the auto ddl value. */ public void setAutoDdl(String value) { m_autoDdl = Boolean.valueOf(value).booleanValue(); } /********************************************************************************************************************************************************************************************************************************************************** * Init and Destroy *********************************************************************************************************************************************************************************************************************************************************/ /** * Final initialization, once all dependencies are set. */ public void init() { try { // if we are auto-creating our schema, check and create if (m_autoDdl) { m_sqlService.ddl(this.getClass().getClassLoader(), "sakai_mailarchive"); m_sqlService.ddl(this.getClass().getClassLoader(), "sakai_mailarchive_2_6_0"); } super.init(); M_log.info("init(): tables: " + m_cTableName + " " + m_rTableName + " locks-in-db: " + m_locksInDb); // convert? if (m_convertToDraft) { m_convertToDraft = false; convertToDraft(); } } catch (Throwable t) { M_log.warn("init(): ", t); } } /********************************************************************************************************************************************************************************************************************************************************** * BaseMessage extensions *********************************************************************************************************************************************************************************************************************************************************/ /** * Construct a Storage object. * * @return The new storage object. */ protected Storage newStorage() { return new DbStorage(this); } // newStorage /********************************************************************************************************************************************************************************************************************************************************** * Storage implementation *********************************************************************************************************************************************************************************************************************************************************/ protected class DbStorage extends BaseDbDoubleStorage implements Storage { /** * Construct. * * @param user * The StorageUser class to call back for creation of Resource and Edit objects. */ public DbStorage(DoubleStorageUser user) { super(m_cTableName, "CHANNEL_ID", m_rTableName, "MESSAGE_ID", "CHANNEL_ID", "MESSAGE_DATE", "OWNER", "DRAFT", "PUBVIEW", FIELDS, SEARCH_FIELDS, m_locksInDb, "channel", "message", user, m_sqlService); m_locksAreInTable = false; } // DbStorage /* matchXml - Optionaly do a pre-de-serialize match * * A call back to match before the XML is parsed and turned into a * Resource. If we can decide here - it is more efficient than * sending the XML through SAX. */ @Override public int matchXml(String xml, String search) { if (!xml.startsWith("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")) return 0; /* * <?xml version="1.0" encoding="UTF-8"?> <message * body="Qm9keSAyMDA4MDEyNzIwMTM0MTkzMw==" * body-html="Qm9keSAyMDA4MDEyNzIwMTM0MTkzMw=="> <header * access="channel" date="20080127201341934" from="admin" * id="d978685c-8730-4975-b3ea-55fdf03e0e5a" * mail-date="20080127201341933" mail-from="from 20080127201341933" * subject="Subject 20080127201341933"/><properties/></message> */ String body = getXmlAttr(xml, "body"); String from = getXmlAttr(xml, "from"); String subject = getXmlAttr(xml, "subject"); if (body == null || from == null || subject == null) return 0; try { byte[] decoded = Base64.decodeBase64(body); // UTF-8 by default body = org.apache.commons.codec.binary.StringUtils.newStringUtf8(decoded); } catch (Exception e) { M_log.warn("Exception decoding message body: " + e); return 0; } if (StringUtils.containsIgnoreCase(subject, search) || StringUtils.containsIgnoreCase(from, search) || StringUtils.containsIgnoreCase(body, search)) { return 1; } return -1; } String getXmlAttr(String xml, String tagName) { String lookfor = tagName+"=\""; int ipos = xml.indexOf(lookfor); if ( ipos < 1 ) return null; ipos = ipos + lookfor.length(); int jpos = xml.indexOf("\"",ipos); if ( jpos < 1 || ipos > jpos ) return null; return xml.substring(ipos,jpos); } /** Channels * */ public boolean checkChannel(String ref) { return super.getContainer(ref) != null; } public MessageChannel getChannel(String ref) { return (MessageChannel) super.getContainer(ref); } public List getChannels() { return super.getAllContainers(); } public MessageChannelEdit putChannel(String ref) { return (MessageChannelEdit) super.putContainer(ref); } public MessageChannelEdit editChannel(String ref) { return (MessageChannelEdit) super.editContainer(ref); } public void commitChannel(MessageChannelEdit edit) { super.commitContainer(edit); } public void cancelChannel(MessageChannelEdit edit) { super.cancelContainer(edit); } public void removeChannel(MessageChannelEdit edit) { super.removeContainer(edit); } public List getChannelIdsMatching(String root) { return super.getContainerIdsMatching(root); } /** messages * */ public boolean checkMessage(MessageChannel channel, String id) { return super.checkResource(channel, id); } public Message getMessage(MessageChannel channel, String id) { return (Message) super.getResource(channel, id); } public List getMessages(MessageChannel channel) { return super.getAllResources(channel); } public List getMessages(MessageChannel channel,String search, boolean asc, PagingPosition pager) { return super.getAllResources(channel, null, search, asc, pager); } public int getCount(MessageChannel channel) { return super.getCount(channel); } public int getCount(MessageChannel channel, Filter filter) { return super.getCount(channel, filter); } public MessageEdit putMessage(MessageChannel channel, String id) { return (MessageEdit) super.putResource(channel, id, null); } public MessageEdit editMessage(MessageChannel channel, String id) { return (MessageEdit) super.editResource(channel, id); } public void commitMessage(MessageChannel channel, MessageEdit edit) { super.commitResource(channel, edit); } public void cancelMessage(MessageChannel channel, MessageEdit edit) { super.cancelResource(channel, edit); } public void removeMessage(MessageChannel channel, MessageEdit edit) { super.removeResource(channel, edit); } public List getMessages(MessageChannel channel, Time afterDate, int limitedToLatest, String draftsForId, boolean pubViewOnly) { return super.getResources(channel, afterDate, limitedToLatest, draftsForId, pubViewOnly); } public List getMessages(MessageChannel channel, Filter filter,boolean asc, PagingPosition pager) { return super.getAllResources(channel,filter, null, asc, pager); } } // DbStorage /** * fill in the draft and owner db fields */ protected void convertToDraft() { M_log.info("convertToDraft"); try { // get a connection final Connection connection = m_sqlService.borrowConnection(); boolean wasCommit = connection.getAutoCommit(); connection.setAutoCommit(false); // read all message records that need conversion String sql = "select CHANNEL_ID, MESSAGE_ID, XML from " + m_rTableName /* + " where OWNER is null" */; m_sqlService.dbRead(connection, sql, null, new SqlReader() { private int count = 0; public Object readSqlResultRecord(ResultSet result) { try { // create the Resource from the db xml String channelId = result.getString(1); String messageId = result.getString(2); String xml = result.getString(3); // read the xml Document doc = Xml.readDocumentFromString(xml); // verify the root element Element root = doc.getDocumentElement(); if (!root.getTagName().equals("message")) { M_log.warn("convertToDraft(): XML root element not message: " + root.getTagName()); return null; } Message m = new BaseMessageEdit(null, root); // pick up the fields String owner = m.getHeader().getFrom().getId(); boolean draft = m.getHeader().getDraft(); // update String update = "update " + m_rTableName + " set OWNER = ?, DRAFT = ? where CHANNEL_ID = ? and MESSAGE_ID = ?"; Object fields[] = new Object[4]; fields[0] = owner; fields[1] = (draft ? "1" : "0"); fields[2] = channelId; fields[3] = messageId; boolean ok = m_sqlService.dbWrite(connection, update, fields); if (!ok) M_log.info("convertToDraft: channel: " + channelId + " message: " + messageId + " owner: " + owner + " draft: " + draft + " ok: " + ok); count++; if (count % 100 == 0) { M_log.info("convertToDraft: " + count); } return null; } catch (Exception ignore) { return null; } } }); connection.commit(); connection.setAutoCommit(wasCommit); m_sqlService.returnConnection(connection); } catch (Exception t) { M_log.warn("convertToDraft: failed: " + t); } M_log.info("convertToDraft: done"); } } // DbCachedMailArchiveService
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). package com.twitter.intellij.pants.service; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessOutput; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.model.ExternalSystemException; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.util.Consumer; import com.intellij.util.PathUtil; import com.intellij.util.containers.ContainerUtil; import com.twitter.intellij.pants.PantsBundle; import com.twitter.intellij.pants.PantsExecutionException; import com.twitter.intellij.pants.metrics.PantsMetrics; import com.twitter.intellij.pants.model.IJRC; import com.twitter.intellij.pants.model.PantsCompileOptions; import com.twitter.intellij.pants.model.PantsExecutionOptions; import com.twitter.intellij.pants.settings.PantsExecutionSettings; import com.twitter.intellij.pants.util.PantsUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; public class PantsCompileOptionsExecutor { protected static final Logger LOG = Logger.getInstance(PantsCompileOptionsExecutor.class); public static final int PROJECT_NAME_LIMIT = 200; private final List<Process> myProcesses = ContainerUtil.createConcurrentList(); private final PantsCompileOptions myOptions; private final File myBuildRoot; private final boolean myResolveSourcesAndDocsForJars; @NotNull public static PantsCompileOptionsExecutor create( @NotNull String projectRootPath, @Nullable PantsExecutionSettings executionOptions ) throws ExternalSystemException { if (executionOptions == null) { throw new ExternalSystemException("No execution options for " + projectRootPath); } PantsCompileOptions options = new MyPantsCompileOptions(projectRootPath, executionOptions); Optional<File> buildRoot = PantsUtil.findBuildRoot(new File(options.getExternalProjectPath())); if (!buildRoot.isPresent() || !buildRoot.get().exists()) { throw new ExternalSystemException(PantsBundle.message("pants.error.no.pants.executable.by.path", options.getExternalProjectPath())); } return new PantsCompileOptionsExecutor( buildRoot.get(), options, executionOptions.isLibsWithSourcesAndDocs() ); } @NotNull @TestOnly public static PantsCompileOptionsExecutor createMock() { return new PantsCompileOptionsExecutor( new File("/"), new MyPantsCompileOptions("", PantsExecutionSettings.createDefault()), true ) { }; } private PantsCompileOptionsExecutor( @NotNull File buildRoot, @NotNull PantsCompileOptions compilerOptions, boolean resolveSourcesAndDocsForJars ) { myBuildRoot = buildRoot; myOptions = compilerOptions; myResolveSourcesAndDocsForJars = resolveSourcesAndDocsForJars; } public String getProjectRelativePath() { return PantsUtil.getRelativeProjectPath(getBuildRoot(), getProjectPath()).get(); } @NotNull public File getBuildRoot() { return myBuildRoot; } public String getProjectPath() { return myOptions.getExternalProjectPath(); } @NotNull public String getProjectDir() { final File projectFile = new File(getProjectPath()); final File projectDir = projectFile.isDirectory() ? projectFile : FileUtil.getParentFile(projectFile); return projectDir != null ? projectDir.getAbsolutePath() : projectFile.getAbsolutePath(); } @NotNull @Nls public String getDefaultProjectName() { final String buildRootName = getBuildRoot().getName(); List<String> buildRootPrefixedSpecs = myOptions.getSelectedTargetSpecs().stream() .map(s -> buildRootName + File.separator + s) .collect(Collectors.toList()); String candidateName = String.join("__", buildRootPrefixedSpecs).replaceAll(File.separator, "."); return candidateName.substring(0, Math.min(PROJECT_NAME_LIMIT, candidateName.length())); } @NotNull @Nls public String getRootModuleName() { if (PantsUtil.isExecutable(myOptions.getExternalProjectPath())) { //noinspection ConstantConditions return PantsUtil.fileNameWithoutExtension(VfsUtil.extractFileName(myOptions.getExternalProjectPath())); } return getProjectRelativePath(); } @NotNull public PantsCompileOptions getOptions() { return myOptions; } @NotNull public String loadProjectStructure( @NotNull Consumer<String> statusConsumer, @Nullable ProcessAdapter processAdapter ) throws IOException, ExecutionException { if (PantsUtil.isExecutable(getProjectPath())) { return loadProjectStructureFromScript(getProjectPath(), statusConsumer, processAdapter); } else { return loadProjectStructureFromTargets(statusConsumer, processAdapter); } } @NotNull private static String loadProjectStructureFromScript( @NotNull String scriptPath, @NotNull Consumer<String> statusConsumer, @Nullable ProcessAdapter processAdapter ) throws IOException, ExecutionException { final GeneralCommandLine commandLine = PantsUtil.defaultCommandLine(scriptPath); commandLine.setExePath(scriptPath); statusConsumer.consume("Executing " + PathUtil.getFileName(scriptPath)); final ProcessOutput processOutput = PantsUtil.getCmdOutput(commandLine, processAdapter); if (processOutput.checkSuccess(LOG)) { return processOutput.getStdout(); } else { throw new PantsExecutionException("Failed to update the project!", scriptPath, processOutput); } } @NotNull private String loadProjectStructureFromTargets( @NotNull Consumer<String> statusConsumer, @Nullable ProcessAdapter processAdapter ) throws IOException, ExecutionException { final File outputFile = FileUtil.createTempFile("pants_depmap_run", ".out"); final GeneralCommandLine command = getPantsExportCommand(outputFile, statusConsumer); statusConsumer.consume("Resolving dependencies..."); PantsMetrics.markExportStart(); final ProcessOutput processOutput = getProcessOutput(command); PantsMetrics.markExportEnd(); if (processOutput.getStdout().contains("no such option")) { throw new ExternalSystemException("Pants doesn't have necessary APIs. Please upgrade your pants!"); } if (processOutput.checkSuccess(LOG)) { return FileUtil.loadFile(outputFile); } else { throw new PantsExecutionException("Failed to update the project!", command.getCommandLineString("pants"), processOutput); } } private ProcessOutput getProcessOutput( @NotNull GeneralCommandLine command ) throws ExecutionException { final Process process = command.createProcess(); myProcesses.add(process); final ProcessOutput processOutput = PantsUtil.getCmdOutput(process, command.getCommandLineString(), null); myProcesses.remove(process); return processOutput; } @NotNull private GeneralCommandLine getPantsExportCommand(final File outputFile, @NotNull Consumer<String> statusConsumer) throws IOException { final GeneralCommandLine commandLine = PantsUtil.defaultCommandLine(getProjectPath()); // Grab the import stage pants rc file for IntelliJ. Optional<String> rcArg = IJRC.getImportPantsRc(commandLine.getWorkDirectory().getPath()); rcArg.ifPresent(commandLine::addParameter); final File targetSpecsFile = FileUtil.createTempFile("pants_target_specs", ".in"); try (FileWriter targetSpecsFileWriter = new FileWriter(targetSpecsFile)) { for (String targetSpec : getTargetSpecs()) { targetSpecsFileWriter.write(targetSpec); targetSpecsFileWriter.write('\n'); } } if (PantsUtil.isCompatiblePantsVersion(getProjectPath(), "1.25.0")) { commandLine.addParameter("--spec-file=" + targetSpecsFile.getPath()); } else { commandLine.addParameter("--target-spec-file=" + targetSpecsFile.getPath()); } commandLine.addParameter("--no-quiet"); if (PantsUtil.isCompatiblePantsVersion(getProjectPath(), "1.24.0")) { commandLine.addParameter("--export-available-target-types"); } if (getOptions().isImportSourceDepsAsJars()) { commandLine.addParameter("export-dep-as-jar"); commandLine.addParameter("--sources"); } else { commandLine.addParameter("export"); } commandLine.addParameter("--output-file=" + outputFile.getPath()); commandLine.addParameter("--formatted"); // json outputs in a compact format if (myResolveSourcesAndDocsForJars) { commandLine.addParameter("--export-libraries-sources"); commandLine.addParameter("--export-libraries-javadocs"); } return commandLine; } @NotNull private List<String> getTargetSpecs() { // If project is opened via pants cli, the targets are in specs. return Collections.unmodifiableList(getOptions().getSelectedTargetSpecs()); } /** * @return if successfully canceled all running processes. false if failed and there were no processes to cancel. */ public boolean cancelAllProcesses() { if (myProcesses.isEmpty()) { return false; } for (Process process : myProcesses) { process.destroy(); } myProcesses.forEach(Process::destroy); return true; } public String getAbsolutePathFromWorkingDir(@NotNull String relativePath) { return new File(getBuildRoot(), relativePath).getPath(); } private static class MyPantsCompileOptions implements PantsCompileOptions { private final String myExternalProjectPath; private final PantsExecutionOptions myExecutionOptions; private MyPantsCompileOptions(@NotNull String externalProjectPath, @NotNull PantsExecutionOptions executionOptions) { myExternalProjectPath = PantsUtil.resolveSymlinks(externalProjectPath); myExecutionOptions = executionOptions; } @NotNull @Override public String getExternalProjectPath() { return myExternalProjectPath; } @NotNull public List<String> getSelectedTargetSpecs() { return myExecutionOptions.getSelectedTargetSpecs(); } public boolean isEnableIncrementalImport() { return myExecutionOptions.isEnableIncrementalImport(); } @Override public boolean isImportSourceDepsAsJars() { return myExecutionOptions.isImportSourceDepsAsJars(); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.network; import com.microsoft.azure.management.apigeneration.Fluent; import com.microsoft.azure.management.network.implementation.NetworkInterfaceInner; import com.microsoft.azure.management.resources.fluentcore.arm.models.GroupableResource; import com.microsoft.azure.management.resources.fluentcore.arm.models.Resource; import com.microsoft.azure.management.resources.fluentcore.model.Appliable; import com.microsoft.azure.management.resources.fluentcore.model.Creatable; import com.microsoft.azure.management.resources.fluentcore.model.Refreshable; import com.microsoft.azure.management.resources.fluentcore.model.Updatable; import com.microsoft.azure.management.resources.fluentcore.model.Wrapper; import java.util.List; import java.util.Map; /** * Network interface. */ @Fluent() public interface NetworkInterface extends GroupableResource, Refreshable<NetworkInterface>, Wrapper<NetworkInterfaceInner>, Updatable<NetworkInterface.Update> { // Getters /** * @return <tt>true</tt> if IP forwarding is enabled in this network interface */ boolean isIpForwardingEnabled(); /** * @return the MAC Address of the network interface */ String macAddress(); /** * * @return the Internal DNS name assigned to this network interface */ String internalDnsNameLabel(); /** * Gets the fully qualified domain name of this network interface. * <p> * A network interface receives FQDN as a part of assigning it to a virtual machine. * * @return the qualified domain name */ String internalFqdn(); /** * @return the internal domain name suffix */ String internalDomainNameSuffix(); /** * @return IP addresses of this network interface's DNS servers */ List<String> dnsServers(); /** * @return applied DNS servers */ List<String> appliedDnsServers(); /** * Gets the private IP address allocated to this network interface's primary IP configuration. * <p> * The private IP will be within the virtual network subnet of this network interface. * * @return the private IP addresses */ String primaryPrivateIp(); /** * @return the private IP allocation method (Dynamic, Static) of this network interface's * primary IP configuration. */ IPAllocationMethod primaryPrivateIpAllocationMethod(); /** * @return the IP configurations of this network interface, indexed by their names */ Map<String, NicIpConfiguration> ipConfigurations(); /** * @return the primary IP configuration of this network interface */ NicIpConfiguration primaryIpConfiguration(); /** * @return the network security group resource id or null if there is no network security group * associated with this network interface. */ String networkSecurityGroupId(); /** * Gets the network security group associated this network interface. * <p> * This method makes a rest API call to fetch the Network Security Group resource. * * @return the network security group associated with this network interface. */ NetworkSecurityGroup getNetworkSecurityGroup(); /** * @return the resource ID of the associated virtual machine, or null if none. */ String virtualMachineId(); // Setters (fluent) /** * The entirety of the network interface definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGroup, DefinitionStages.WithPrimaryNetwork, DefinitionStages.WithPrimaryNetworkSubnet, DefinitionStages.WithPrimaryPrivateIp, DefinitionStages.WithCreate { } /** * Grouping of network interface definition stages. */ interface DefinitionStages { /** * The stage of the network interface definition allowing to associate it with a load balancer. */ interface WithLoadBalancer { /** * Associates the network interface's primary IP configuration with a backend of an existing load balancer. * @param loadBalancer an existing load balancer * @param backendName the name of an existing backend on that load balancer * @return the next stage of the definition */ WithCreate withExistingLoadBalancerBackend(LoadBalancer loadBalancer, String backendName); /** * Associates the network interface's primary IP configuration with an inbound NAT rule of an existing load balancer. * @param loadBalancer an existing load balancer * @param inboundNatRuleName the name of an existing inbound NAT rule on the selected load balancer * @return the next stage of the definition */ WithCreate withExistingLoadBalancerInboundNatRule(LoadBalancer loadBalancer, String inboundNatRuleName); } /** * The first stage of the network interface. */ interface Blank extends GroupableResource.DefinitionWithRegion<WithGroup> { } /** * The stage of the network interface definition allowing to specify the resource group. */ interface WithGroup extends GroupableResource.DefinitionStages.WithGroup<WithPrimaryNetwork> { } /** * The stage of the network interface definition allowing to specify the virtual network for * primary IP configuration. */ interface WithPrimaryNetwork { /** * Create a new virtual network to associate with the network interface's primary IP configuration, * based on the provided definition. * * @param creatable a creatable definition for a new virtual network * @return the next stage of the network interface definition */ WithPrimaryPrivateIp withNewPrimaryNetwork(Creatable<Network> creatable); /** * Creates a new virtual network to associate with the network interface's primary IP configuration. * <p> * The virtual network will be created in the same resource group and region as of network interface, * it will be created with the specified address space and a default subnet covering the entirety of * the network IP address space. * * @param name the name of the new virtual network * @param addressSpace the address space for rhe virtual network * @return the next stage of the network interface definition */ WithPrimaryPrivateIp withNewPrimaryNetwork(String name, String addressSpace); /** * Creates a new virtual network to associate with the network interface's primary IP configuration. * <p> * The virtual network will be created in the same resource group and region as of network interface, * it will be created with the specified address space and a default subnet covering the entirety of * the network IP address space. * * @param addressSpace the address space for the virtual network * @return the next stage of the network interface definition */ WithPrimaryPrivateIp withNewPrimaryNetwork(String addressSpace); /** * Associate an existing virtual network with the network interface's primary IP configuration. * * @param network an existing virtual network * @return the next stage of the network interface definition */ WithPrimaryNetworkSubnet withExistingPrimaryNetwork(Network network); } /** * The stage of the network interface definition allowing to specify subnet. */ interface WithPrimaryNetworkSubnet { /** * Associate a subnet with the network interface's primary IP configuration. * * @param name the subnet name * @return the next stage of the network interface definition */ WithPrimaryPrivateIp withSubnet(String name); } /** * The stage of the network interface definition allowing to specify private IP address within * a virtual network subnet. */ interface WithPrimaryPrivateIp { /** * Enables dynamic private IP address allocation within the specified existing virtual network * subnet for the network interface's primary IP configuration. * * @return the next stage of network interface definition */ WithCreate withPrimaryPrivateIpAddressDynamic(); /** * Assigns the specified static private IP address within the specified existing virtual network * subnet to the network interface's primary IP configuration. * * @param staticPrivateIpAddress the static IP address within the specified subnet to assign to * the network interface * @return the next stage of network interface definition */ WithCreate withPrimaryPrivateIpAddressStatic(String staticPrivateIpAddress); } /** * The stage of the network interface definition allowing to associate public IP address with it's primary * IP configuration. */ interface WithPrimaryPublicIpAddress { /** * Create a new public IP address to associate with network interface's primary IP configuration, based on * the provided definition. * * @param creatable a creatable definition for a new public IP * @return the next stage of the network interface definition */ WithCreate withNewPrimaryPublicIpAddress(Creatable<PublicIpAddress> creatable); /** * Creates a new public IP address in the same region and group as the resource and associate it * with the network interface's primary IP configuration. * <p> * the internal name and DNS label for the public IP address will be derived from the network interface name * * @return the next stage of the network interface definition */ WithCreate withNewPrimaryPublicIpAddress(); /** * Creates a new public IP address in the same region and group as the resource, with the specified DNS label * and associate it with the network interface's primary IP configuration. * <p> * the internal name for the public IP address will be derived from the DNS label * * @param leafDnsLabel the leaf domain label * @return the next stage of the network interface definition */ WithCreate withNewPrimaryPublicIpAddress(String leafDnsLabel); /** * Associates an existing public IP address with the network interface's primary IP configuration. * * @param publicIpAddress an existing public IP address * @return the next stage of the network interface definition */ WithCreate withExistingPrimaryPublicIpAddress(PublicIpAddress publicIpAddress); } /** * The stage of the network interface definition allowing to associate a network security group. */ interface WithNetworkSecurityGroup { /** * Create a new network security group to associate with network interface, based on the provided definition. * * @param creatable a creatable definition for a new network security group * @return the next stage of the network interface definition */ WithCreate withNewNetworkSecurityGroup(Creatable<NetworkSecurityGroup> creatable); /** * Associates an existing network security group with the network interface. * * @param networkSecurityGroup an existing network security group * @return the next stage of the network interface definition */ WithCreate withExistingNetworkSecurityGroup(NetworkSecurityGroup networkSecurityGroup); } /** * The stage of the network interface definition allowing to associate a secondary IP configurations. */ interface WithSecondaryIpConfiguration { /** * Starts definition of a secondary IP configuration. * * @param name name for the IP configuration * @return the first stage of a secondary IP configuration definition */ NicIpConfiguration.DefinitionStages.Blank<WithCreate> defineSecondaryIpConfiguration(String name); } /** * The stage of the network interface definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<NetworkInterface>, Resource.DefinitionWithTags<WithCreate>, WithPrimaryPublicIpAddress, WithNetworkSecurityGroup, WithSecondaryIpConfiguration, WithLoadBalancer { /** * Enable IP forwarding in the network interface. * * @return the next stage of the network interface definition */ WithCreate withIpForwarding(); /** * Specifies the IP address of the custom DNS server to associate with the network interface. * <p> * Note this method's effect is additive, i.e. each time it is used, the new dns server is * added to the network interface. * * @param ipAddress the IP address of the DNS server * @return the next stage of the network interface definition */ WithCreate withDnsServer(String ipAddress); /** * Specifies the internal DNS name label for the network interface. * * @param dnsNameLabel the internal DNS name label * @return the next stage of the network interface definition */ WithCreate withInternalDnsNameLabel(String dnsNameLabel); } } /** * Grouping of network interface update stages. */ interface UpdateStages { /** * The stage of the network interface update allowing to specify subnet. */ interface WithPrimaryNetworkSubnet { /** * Associate a subnet with the network interface. * * @param name the subnet name * @return the next stage of the network interface update */ Update withSubnet(String name); } /** * The stage of the network interface update allowing to specify private IP address within * a virtual network subnet. */ interface WithPrimaryPrivateIp { /** * Enables dynamic private IP address allocation within the specified existing virtual network * subnet for the network interface's primary IP configuration. * * @return the next stage of network interface update */ Update withPrimaryPrivateIpAddressDynamic(); /** * Assigns the specified static private IP address within the specified existing virtual network * subnet to the network interface's primary IP configuration. * * @param staticPrivateIpAddress the static IP address within the specified subnet to assign to * the primary IP configuration * @return the next stage of network interface update */ Update withPrimaryPrivateIpAddressStatic(String staticPrivateIpAddress); } /** * The stage of the network interface update allowing to associate public IP address with it's primary * IP configuration. */ interface WithPrimaryPublicIpAddress { /** * Create a new public IP address to associate the network interface's primary IP configuration, * based on the provided definition. * <p> * if there is public IP associated with the primary IP configuration then that will be removed in * favour of this * * @param creatable a creatable definition for a new public IP * @return the next stage of the network interface update */ Update withNewPrimaryPublicIpAddress(Creatable<PublicIpAddress> creatable); /** * Creates a new public IP address in the same region and group as the resource and associate it * with the network interface's primary IP configuration. * <p> * the internal name and DNS label for the public IP address will be derived from the network interface name, * if there is an existing public IP association then that will be removed in favour of this * * @return the next stage of the network interface update */ Update withNewPrimaryPublicIpAddress(); /** * Creates a new public IP address in the same region and group as the resource, with the specified DNS label * and associate it with the network interface's primary IP configuration. * <p> * the internal name for the public IP address will be derived from the DNS label, if there is an existing * public IP association then that will be removed in favour of this * * @param leafDnsLabel the leaf domain label * @return the next stage of the network interface update */ Update withNewPrimaryPublicIpAddress(String leafDnsLabel); /** * Specifies that remove any public IP associated with the network interface's primary IP configuration. * * @return the next stage of the network interface update */ Update withoutPrimaryPublicIpAddress(); /** * Associates an existing public IP address with the network interface's primary IP configuration. * if there is an existing public IP association then that will be removed in favour of this * * @param publicIpAddress an existing public IP address * @return the next stage of the network interface update */ Update withExistingPrimaryPublicIpAddress(PublicIpAddress publicIpAddress); } /** * The stage of the network interface update allowing to associate network security group. */ interface WithNetworkSecurityGroup { /** * Create a new network security group to associate with network interface, based on the provided definition. * * @param creatable a creatable definition for a new network security group * @return the next stage of the network interface update */ Update withNewNetworkSecurityGroup(Creatable<NetworkSecurityGroup> creatable); /** * Associates an existing network security group with the network interface. * * @param networkSecurityGroup an existing network security group * @return the next stage of the network interface update */ Update withExistingNetworkSecurityGroup(NetworkSecurityGroup networkSecurityGroup); /** * Specifies that remove any network security group associated with the network interface. * * @return the next stage of the network interface update */ Update withoutNetworkSecurityGroup(); } /** * The stage of the network interface update allowing to enable or disable IP forwarding. */ interface WithIpForwarding { /** * Enable IP forwarding in the network interface. * * @return the next stage of the network interface update */ Update withIpForwarding(); /** * Disable IP forwarding in the network interface. * * @return the next stage of the network interface update */ Update withoutIpForwarding(); } /** * The stage of the network interface update allowing to specify DNS servers. */ interface WithDnsServer { /** * Specifies the IP address of the custom DNS server to associate with the network interface. * <p> * Note this method's effect is additive, i.e. each time it is used, the new dns server is * added to the network interface. * * @param ipAddress the IP address of the DNS server * @return the next stage of the network interface update */ Update withDnsServer(String ipAddress); /** * Removes a DNS server associated with the network interface. * * @param ipAddress the IP address of the DNS server * @return the next stage of the network interface update */ Update withoutDnsServer(String ipAddress); /** * Specifies to use the default Azure DNS server for the network interface. * <p> * Using azure DNS server will remove any custom DNS server associated with this network interface. * * @return the next stage of the network interface update */ Update withAzureDnsServer(); } /** * The stage of the network interface update allowing to configure IP configuration. */ interface WithIpConfiguration { /** * Starts definition of a secondary IP configuration. * * @param name name for the IP configuration * @return the first stage of a secondary IP configuration definition */ NicIpConfiguration.UpdateDefinitionStages.Blank<NetworkInterface.Update> defineSecondaryIpConfiguration(String name); /** * Starts update of an IP configuration. * * @param name name of the IP configuration * @return the first stage of an IP configuration update */ NicIpConfiguration.Update updateIpConfiguration(String name); } /** * The stage of the network interface update allowing to associate it with a load balancer. */ interface WithLoadBalancer { /** * Associates the network interface's primary IP configuration with a backend of an existing load balancer. * @param loadBalancer an existing load balancer * @param backendName the name of an existing backend on that load balancer * @return the next stage of the update */ Update withExistingLoadBalancerBackend(LoadBalancer loadBalancer, String backendName); /** * Associates the network interface's primary IP configuration with an inbound NAT rule of an existing load balancer. * @param loadBalancer an existing load balancer * @param inboundNatRuleName the name of an existing inbound NAT rule on the selected load balancer * @return the next stage of the update */ Update withExistingLoadBalancerInboundNatRule(LoadBalancer loadBalancer, String inboundNatRuleName); /** * Removes all the existing associations with any load balancer backends. * @return the next stage of the update */ Update withoutLoadBalancerBackends(); /** * Removes all the existing associations with any load balancer inbound NAT rules. * @return the next stage of the update */ Update withoutLoadBalancerInboundNatRules(); } } /** * The template for an update operation, containing all the settings that * can be modified. * <p> * Call {@link Update#apply()} to apply the changes to the resource in Azure. */ interface Update extends Appliable<NetworkInterface>, Resource.UpdateWithTags<Update>, UpdateStages.WithPrimaryNetworkSubnet, UpdateStages.WithPrimaryPrivateIp, UpdateStages.WithPrimaryPublicIpAddress, UpdateStages.WithNetworkSecurityGroup, UpdateStages.WithIpForwarding, UpdateStages.WithDnsServer, UpdateStages.WithIpConfiguration, UpdateStages.WithLoadBalancer { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.schemas; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; import java.util.stream.Collectors; import org.apache.beam.model.pipeline.v1.SchemaApi; import org.apache.beam.model.pipeline.v1.SchemaApi.ArrayTypeValue; import org.apache.beam.model.pipeline.v1.SchemaApi.AtomicTypeValue; import org.apache.beam.model.pipeline.v1.SchemaApi.FieldValue; import org.apache.beam.model.pipeline.v1.SchemaApi.IterableTypeValue; import org.apache.beam.model.pipeline.v1.SchemaApi.MapTypeEntry; import org.apache.beam.model.pipeline.v1.SchemaApi.MapTypeValue; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.schemas.Schema.Field; import org.apache.beam.sdk.schemas.Schema.FieldType; import org.apache.beam.sdk.schemas.Schema.LogicalType; import org.apache.beam.sdk.schemas.Schema.TypeName; import org.apache.beam.sdk.util.SerializableUtils; import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps; /** Utility methods for translating schemas. */ @Experimental(Kind.SCHEMAS) public class SchemaTranslation { private static final String URN_BEAM_LOGICAL_DATETIME = "beam:logical_type:datetime:v1"; private static final String URN_BEAM_LOGICAL_DECIMAL = "beam:logical_type:decimal:v1"; private static final String URN_BEAM_LOGICAL_JAVASDK = "beam:logical_type:javasdk:v1"; public static SchemaApi.Schema schemaToProto(Schema schema, boolean serializeLogicalType) { String uuid = schema.getUUID() != null ? schema.getUUID().toString() : ""; SchemaApi.Schema.Builder builder = SchemaApi.Schema.newBuilder().setId(uuid); for (Field field : schema.getFields()) { SchemaApi.Field protoField = fieldToProto( field, schema.indexOf(field.getName()), schema.getEncodingPositions().get(field.getName()), serializeLogicalType); builder.addFields(protoField); } builder.addAllOptions(optionsToProto(schema.getOptions())); return builder.build(); } private static SchemaApi.Field fieldToProto( Field field, int fieldId, int position, boolean serializeLogicalType) { return SchemaApi.Field.newBuilder() .setName(field.getName()) .setDescription(field.getDescription()) .setType(fieldTypeToProto(field.getType(), serializeLogicalType)) .setId(fieldId) .setEncodingPosition(position) .addAllOptions(optionsToProto(field.getOptions())) .build(); } private static SchemaApi.FieldType fieldTypeToProto( FieldType fieldType, boolean serializeLogicalType) { SchemaApi.FieldType.Builder builder = SchemaApi.FieldType.newBuilder(); switch (fieldType.getTypeName()) { case ROW: builder.setRowType( SchemaApi.RowType.newBuilder() .setSchema(schemaToProto(fieldType.getRowSchema(), serializeLogicalType))); break; case ARRAY: builder.setArrayType( SchemaApi.ArrayType.newBuilder() .setElementType( fieldTypeToProto(fieldType.getCollectionElementType(), serializeLogicalType))); break; case ITERABLE: builder.setIterableType( SchemaApi.IterableType.newBuilder() .setElementType( fieldTypeToProto(fieldType.getCollectionElementType(), serializeLogicalType))); break; case MAP: builder.setMapType( SchemaApi.MapType.newBuilder() .setKeyType(fieldTypeToProto(fieldType.getMapKeyType(), serializeLogicalType)) .setValueType(fieldTypeToProto(fieldType.getMapValueType(), serializeLogicalType)) .build()); break; case LOGICAL_TYPE: LogicalType logicalType = fieldType.getLogicalType(); SchemaApi.LogicalType.Builder logicalTypeBuilder = SchemaApi.LogicalType.newBuilder() .setArgumentType( fieldTypeToProto(logicalType.getArgumentType(), serializeLogicalType)) .setArgument( fieldValueToProto(logicalType.getArgumentType(), logicalType.getArgument())) .setRepresentation( fieldTypeToProto(logicalType.getBaseType(), serializeLogicalType)) // TODO(BEAM-7855): "javasdk" types should only be a last resort. Types defined in // Beam should have their own URN, and there should be a mechanism for users to // register their own types by URN. .setUrn(URN_BEAM_LOGICAL_JAVASDK); if (serializeLogicalType) { logicalTypeBuilder = logicalTypeBuilder.setPayload( ByteString.copyFrom(SerializableUtils.serializeToByteArray(logicalType))); } builder.setLogicalType(logicalTypeBuilder.build()); break; // Special-case for DATETIME and DECIMAL which are logical types in portable representation, // but not yet in Java. (BEAM-7554) case DATETIME: builder.setLogicalType( SchemaApi.LogicalType.newBuilder() .setUrn(URN_BEAM_LOGICAL_DATETIME) .setRepresentation(fieldTypeToProto(FieldType.INT64, serializeLogicalType)) .build()); break; case DECIMAL: builder.setLogicalType( SchemaApi.LogicalType.newBuilder() .setUrn(URN_BEAM_LOGICAL_DECIMAL) .setRepresentation(fieldTypeToProto(FieldType.BYTES, serializeLogicalType)) .build()); break; case BYTE: builder.setAtomicType(SchemaApi.AtomicType.BYTE); break; case INT16: builder.setAtomicType(SchemaApi.AtomicType.INT16); break; case INT32: builder.setAtomicType(SchemaApi.AtomicType.INT32); break; case INT64: builder.setAtomicType(SchemaApi.AtomicType.INT64); break; case FLOAT: builder.setAtomicType(SchemaApi.AtomicType.FLOAT); break; case DOUBLE: builder.setAtomicType(SchemaApi.AtomicType.DOUBLE); break; case STRING: builder.setAtomicType(SchemaApi.AtomicType.STRING); break; case BOOLEAN: builder.setAtomicType(SchemaApi.AtomicType.BOOLEAN); break; case BYTES: builder.setAtomicType(SchemaApi.AtomicType.BYTES); break; } builder.setNullable(fieldType.getNullable()); return builder.build(); } public static Schema schemaFromProto(SchemaApi.Schema protoSchema) { Schema.Builder builder = Schema.builder(); Map<String, Integer> encodingLocationMap = Maps.newHashMap(); for (SchemaApi.Field protoField : protoSchema.getFieldsList()) { Field field = fieldFromProto(protoField); builder.addField(field); encodingLocationMap.put(protoField.getName(), protoField.getEncodingPosition()); } builder.setOptions(optionsFromProto(protoSchema.getOptionsList())); Schema schema = builder.build(); schema.setEncodingPositions(encodingLocationMap); if (!protoSchema.getId().isEmpty()) { schema.setUUID(UUID.fromString(protoSchema.getId())); } return schema; } private static Field fieldFromProto(SchemaApi.Field protoField) { return Field.of(protoField.getName(), fieldTypeFromProto(protoField.getType())) .withOptions(optionsFromProto(protoField.getOptionsList())) .withDescription(protoField.getDescription()); } private static FieldType fieldTypeFromProto(SchemaApi.FieldType protoFieldType) { FieldType fieldType = fieldTypeFromProtoWithoutNullable(protoFieldType); if (protoFieldType.getNullable()) { fieldType = fieldType.withNullable(true); } return fieldType; } private static FieldType fieldTypeFromProtoWithoutNullable(SchemaApi.FieldType protoFieldType) { switch (protoFieldType.getTypeInfoCase()) { case ATOMIC_TYPE: switch (protoFieldType.getAtomicType()) { case BYTE: return FieldType.of(TypeName.BYTE); case INT16: return FieldType.of(TypeName.INT16); case INT32: return FieldType.of(TypeName.INT32); case INT64: return FieldType.of(TypeName.INT64); case FLOAT: return FieldType.of(TypeName.FLOAT); case DOUBLE: return FieldType.of(TypeName.DOUBLE); case STRING: return FieldType.of(TypeName.STRING); case BOOLEAN: return FieldType.of(TypeName.BOOLEAN); case BYTES: return FieldType.of(TypeName.BYTES); case UNSPECIFIED: throw new IllegalArgumentException("Encountered UNSPECIFIED AtomicType"); default: throw new IllegalArgumentException( "Encountered unknown AtomicType: " + protoFieldType.getAtomicType()); } case ROW_TYPE: return FieldType.row(schemaFromProto(protoFieldType.getRowType().getSchema())); case ARRAY_TYPE: return FieldType.array(fieldTypeFromProto(protoFieldType.getArrayType().getElementType())); case ITERABLE_TYPE: return FieldType.iterable( fieldTypeFromProto(protoFieldType.getIterableType().getElementType())); case MAP_TYPE: return FieldType.map( fieldTypeFromProto(protoFieldType.getMapType().getKeyType()), fieldTypeFromProto(protoFieldType.getMapType().getValueType())); case LOGICAL_TYPE: // Special-case for DATETIME and DECIMAL which are logical types in portable representation, // but not yet in Java. (BEAM-7554) String urn = protoFieldType.getLogicalType().getUrn(); if (urn.equals(URN_BEAM_LOGICAL_DATETIME)) { return FieldType.DATETIME; } else if (urn.equals(URN_BEAM_LOGICAL_DECIMAL)) { return FieldType.DECIMAL; } else if (urn.equals(URN_BEAM_LOGICAL_JAVASDK)) { return FieldType.logicalType( (LogicalType) SerializableUtils.deserializeFromByteArray( protoFieldType.getLogicalType().getPayload().toByteArray(), "logicalType")); } else { throw new IllegalArgumentException("Encountered unsupported logical type URN: " + urn); } default: throw new IllegalArgumentException( "Unexpected type_info: " + protoFieldType.getTypeInfoCase()); } } public static SchemaApi.Row rowToProto(Row row) { SchemaApi.Row.Builder builder = SchemaApi.Row.newBuilder(); for (int i = 0; i < row.getFieldCount(); ++i) { builder.addValues(fieldValueToProto(row.getSchema().getField(i).getType(), row.getValue(i))); } return builder.build(); } public static Object rowFromProto(SchemaApi.Row row, FieldType fieldType) { Row.Builder builder = Row.withSchema(fieldType.getRowSchema()); for (int i = 0; i < row.getValuesCount(); ++i) { builder.addValue( fieldValueFromProto(fieldType.getRowSchema().getField(i).getType(), row.getValues(i))); } return builder.build(); } static SchemaApi.FieldValue fieldValueToProto(FieldType fieldType, Object value) { FieldValue.Builder builder = FieldValue.newBuilder(); switch (fieldType.getTypeName()) { case ARRAY: return builder .setArrayValue( arrayValueToProto(fieldType.getCollectionElementType(), (Iterable) value)) .build(); case ITERABLE: return builder .setIterableValue( iterableValueToProto(fieldType.getCollectionElementType(), (Iterable) value)) .build(); case MAP: return builder .setMapValue( mapToProto(fieldType.getMapKeyType(), fieldType.getMapValueType(), (Map) value)) .build(); case ROW: return builder.setRowValue(rowToProto((Row) value)).build(); case LOGICAL_TYPE: default: return builder.setAtomicValue(primitiveRowFieldToProto(fieldType, value)).build(); } } static Object fieldValueFromProto(FieldType fieldType, SchemaApi.FieldValue value) { switch (fieldType.getTypeName()) { case ARRAY: return arrayValueFromProto(fieldType.getCollectionElementType(), value.getArrayValue()); case ITERABLE: return iterableValueFromProto( fieldType.getCollectionElementType(), value.getIterableValue()); case MAP: return mapFromProto( fieldType.getMapKeyType(), fieldType.getMapValueType(), value.getMapValue()); case ROW: return rowFromProto(value.getRowValue(), fieldType); case LOGICAL_TYPE: default: return primitiveFromProto(fieldType, value.getAtomicValue()); } } private static SchemaApi.ArrayTypeValue arrayValueToProto( FieldType elementType, Iterable values) { return ArrayTypeValue.newBuilder() .addAllElement(Iterables.transform(values, e -> fieldValueToProto(elementType, e))) .build(); } private static Iterable arrayValueFromProto( FieldType elementType, SchemaApi.ArrayTypeValue values) { return values.getElementList().stream() .map(e -> fieldValueFromProto(elementType, e)) .collect(Collectors.toList()); } private static SchemaApi.IterableTypeValue iterableValueToProto( FieldType elementType, Iterable values) { return IterableTypeValue.newBuilder() .addAllElement(Iterables.transform(values, e -> fieldValueToProto(elementType, e))) .build(); } private static Object iterableValueFromProto(FieldType elementType, IterableTypeValue values) { return values.getElementList().stream() .map(e -> fieldValueFromProto(elementType, e)) .collect(Collectors.toList()); } private static SchemaApi.MapTypeValue mapToProto( FieldType keyType, FieldType valueType, Map<Object, Object> map) { MapTypeValue.Builder builder = MapTypeValue.newBuilder(); for (Map.Entry entry : map.entrySet()) { MapTypeEntry mapProtoEntry = MapTypeEntry.newBuilder() .setKey(fieldValueToProto(keyType, entry.getKey())) .setValue(fieldValueToProto(valueType, entry.getValue())) .build(); builder.addEntries(mapProtoEntry); } return builder.build(); } private static Object mapFromProto( FieldType mapKeyType, FieldType mapValueType, MapTypeValue mapValue) { return mapValue.getEntriesList().stream() .collect( Collectors.toMap( entry -> fieldValueFromProto(mapKeyType, entry.getKey()), entry -> fieldValueFromProto(mapValueType, entry.getValue()))); } private static AtomicTypeValue primitiveRowFieldToProto(FieldType fieldType, Object value) { switch (fieldType.getTypeName()) { case BYTE: return AtomicTypeValue.newBuilder().setByte((byte) value).build(); case INT16: return AtomicTypeValue.newBuilder().setInt16((short) value).build(); case INT32: return AtomicTypeValue.newBuilder().setInt32((int) value).build(); case INT64: return AtomicTypeValue.newBuilder().setInt64((long) value).build(); case FLOAT: return AtomicTypeValue.newBuilder().setFloat((float) value).build(); case DOUBLE: return AtomicTypeValue.newBuilder().setDouble((double) value).build(); case STRING: return AtomicTypeValue.newBuilder().setString((String) value).build(); case BOOLEAN: return AtomicTypeValue.newBuilder().setBoolean((boolean) value).build(); case BYTES: return AtomicTypeValue.newBuilder().setBytes(ByteString.copyFrom((byte[]) value)).build(); default: throw new RuntimeException("FieldType unexpected " + fieldType.getTypeName()); } } private static Object primitiveFromProto(FieldType fieldType, AtomicTypeValue value) { switch (fieldType.getTypeName()) { case BYTE: return (byte) value.getByte(); case INT16: return (short) value.getInt16(); case INT32: return value.getInt32(); case INT64: return value.getInt64(); case FLOAT: return value.getFloat(); case DOUBLE: return value.getDouble(); case STRING: return value.getString(); case BOOLEAN: return value.getBoolean(); case BYTES: return value.getBytes().toByteArray(); default: throw new RuntimeException("FieldType unexpected " + fieldType.getTypeName()); } } private static List<SchemaApi.Option> optionsToProto(Schema.Options options) { List<SchemaApi.Option> protoOptions = new ArrayList<>(); for (String name : options.getOptionNames()) { protoOptions.add( SchemaApi.Option.newBuilder() .setName(name) .setType(fieldTypeToProto(Objects.requireNonNull(options.getType(name)), false)) .setValue( fieldValueToProto( Objects.requireNonNull(options.getType(name)), options.getValue(name))) .build()); } return protoOptions; } private static Schema.Options optionsFromProto(List<SchemaApi.Option> protoOptions) { Schema.Options.Builder optionBuilder = Schema.Options.builder(); for (SchemaApi.Option protoOption : protoOptions) { FieldType fieldType = fieldTypeFromProto(protoOption.getType()); optionBuilder.setOption( protoOption.getName(), fieldType, fieldValueFromProto(fieldType, protoOption.getValue())); } return optionBuilder.build(); } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.dataservices.core.engine; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.dataservices.common.DBConstants; import org.wso2.carbon.dataservices.common.DBConstants.DBSFields; import org.wso2.carbon.dataservices.common.DBConstants.ResultTypes; import org.wso2.carbon.dataservices.common.DBConstants.ServiceStatusValues; import org.wso2.carbon.dataservices.core.DBUtils; import org.wso2.carbon.dataservices.core.DataServiceFault; import org.wso2.carbon.dataservices.core.DataServiceUser; import org.wso2.carbon.dataservices.core.auth.AuthorizationProvider; import org.wso2.carbon.dataservices.core.description.config.Config; import org.wso2.carbon.dataservices.core.description.event.EventTrigger; import org.wso2.carbon.dataservices.core.description.operation.Operation; import org.wso2.carbon.dataservices.core.description.operation.OperationFactory; import org.wso2.carbon.dataservices.core.description.query.Query; import org.wso2.carbon.dataservices.core.description.resource.Resource; import org.wso2.carbon.dataservices.core.description.resource.Resource.ResourceID; import org.wso2.carbon.dataservices.core.description.resource.ResourceFactory; import org.wso2.carbon.dataservices.core.description.xa.DSSXATransactionManager; import org.wso2.carbon.dataservices.core.internal.DataServicesDSComponent; import org.wso2.carbon.event.core.EventBroker; import org.wso2.carbon.event.core.exception.EventBrokerException; import org.wso2.carbon.event.core.subscription.Subscription; import org.wso2.securevault.SecretResolver; import javax.transaction.TransactionManager; import javax.xml.stream.XMLStreamWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * This class is the logical representation of a data service, and is the * location of all the queries, operations and configurations associated with a specific DS. */ public class DataService { private static final Log log = LogFactory.getLog(DataService.class); /** * Name of the data service */ private String name; /** * Service namespace */ private String serviceNamespace; /** * All the requests that are handled by the dataservice, i.e. operations, resources */ private Map<String, CallableRequest> callableRequests; /** * Operations which belongs to the dataservice */ private Map<String, Operation> operations; /** * Resources which belongs to the dataservice, this maps a resource id to an resource, * where a resource id contains a path and a HTTP method */ private Map<ResourceID, Resource> resourceMap; /** * Data source configurations that are contained in the dataservice */ private Map<String, Config> configs; /** * Queries defined in this dataservice */ private Map<String, Query> queries; /** * Event triggers used in this dataservice, i.e. input/output event-triggers */ private Map<String, EventTrigger> eventTriggers; /** * Description of the dataservice */ private String description; /** * password manager configuration of the data service */ private SecretResolver secretResolver; /** * The default namespace to be used, when the user doesn't explicitly mention the namespace * to be used in the result of the dataservice. The default namepace is given the * "baseURI" attribute, in the dataservice document element. */ private String defaultNamespace; /** * The physical file path of the dataservice, if known */ private String dsLocation; /** * Relative file path of the dataservice, if known */ private String dsRelativeLocation; /** * The service status of the dataservices, can be either "active" or "inactive", * mainly used in WIP services. */ private String serviceStatus; /** * States if batch requests are enabled, if so, the batch operations are also * created in the WSDL */ private boolean batchRequestsEnabled; /** * States if boxcarring is enabled, if so, boxcarring related operations are * also created * This is also used to identify requestBox requests */ private boolean boxcarringEnabled; /** * The current user who is sending requests */ private static ThreadLocal<DataServiceUser> currentUser = new ThreadLocal<DataServiceUser>(); /** * the JNDI name of the app server transaction manager */ private String containerUserTxName; /** * the DSS XA transaction manager */ private DSSXATransactionManager txManager; /** * flag to check if streaming is disabled */ private boolean disableStreaming; /** * flag to check if boxcarring legacy mode is disabled */ private boolean disableLegacyBoxcarringMode; /** * The tenant to which this service belongs to. */ private int tenantId; /** * Authorization Provider which is used to retrieve user name and roles in configuration time and runtime(to be * used in role based filtering) */ private AuthorizationProvider authorizationProvider; public DataService(String name, String description, String defaultNamespace, String dsLocation, String serviceStatus, boolean batchRequestsEnabled, boolean boxcarringEnabled, String containerUserTxName) throws DataServiceFault { this.name = name; this.callableRequests = new HashMap<String, CallableRequest>(); this.operations = new HashMap<String, Operation>(); this.resourceMap = new LinkedHashMap<ResourceID, Resource>(); this.configs = new HashMap<String, Config>(); this.eventTriggers = new HashMap<String, EventTrigger>(); this.queries = new HashMap<String, Query>(); this.description = description; this.defaultNamespace = defaultNamespace; this.dsLocation = dsLocation; this.setRelativeDsLocation(this.dsLocation); this.serviceStatus = serviceStatus; this.batchRequestsEnabled = batchRequestsEnabled; this.boxcarringEnabled = boxcarringEnabled; this.containerUserTxName = containerUserTxName; /* initialize transaction manager */ initXA(); /* set tenant id */ this.tenantId = DBUtils.getCurrentTenantId(); } private void initXA() throws DataServiceFault { TransactionManager txManager = DBUtils.getContainerTransactionManager( this.getContainerUserTransactionName()); this.txManager = new DSSXATransactionManager(txManager); } private void initBoxcarring() throws DataServiceFault { /* add empty query, begin_boxcar, abort_boxcar */ this.addQuery(new Query(this, DBConstants.EMPTY_QUERY_ID, new ArrayList<QueryParam>(), null, null, null, null, null, this.getDefaultNamespace()) { public Object runPreQuery(InternalParamCollection params, int queryLevel) { return null; } @Override public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel) throws DataServiceFault { } }); /* empty query for end_boxcar */ Result endBoxcarResult = new Result("dummy", "dummy", DBConstants.WSO2_DS_NAMESPACE, null, ResultTypes.XML); endBoxcarResult.setXsAny(true); endBoxcarResult.setDefaultElementGroup(new OutputElementGroup(null, null, null, null)); this.addQuery(new Query(this, DBConstants.EMPTY_END_BOXCAR_QUERY_ID, new ArrayList<QueryParam>(), endBoxcarResult, null, null, null, null, this.getDefaultNamespace()) { public Object runPreQuery(InternalParamCollection params, int queryLevel) { return null; } @Override public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel) throws DataServiceFault { } }); /* operations */ this.addOperation(OperationFactory.createBeginBoxcarOperation(this)); this.addOperation(OperationFactory.createEndBoxcarOperation(this)); this.addOperation(OperationFactory.createAbortBoxcarOperation(this)); } /** * Helper method to initialise request box. * * @throws DataServiceFault */ public void initRequestBox() throws DataServiceFault { initRequestBoxForOperation(); initRequestBoxForResource(); } /** * Helper method to initialise request box operation, (if there are no operations already, then it will return * without doing anything) * * @throws DataServiceFault */ private void initRequestBoxForOperation() throws DataServiceFault { if (this.getOperationNames().isEmpty()) { if (log.isDebugEnabled()) { log.debug("There are no Operations available in data service, So request box won't be generated for Operations"); } return; } addEmptyQueryForRequestBox(); /* operation */ this.addOperation(OperationFactory.createRequestBoxOperation(this)); } /** * Helper method to initialise request box resource, (if there are no resources already, then it will return * without doing anything) * * @throws DataServiceFault */ private void initRequestBoxForResource() throws DataServiceFault { if (this.getResourceIds().isEmpty()) { if (log.isDebugEnabled()) { log.debug("There are no Resources available in data service, So request box won't be generated for Resources"); } return; } addEmptyQueryForRequestBox(); /* resource */ this.addResource(ResourceFactory.createRequestBoxResource(this)); } /** * Helper method to add empty query to be used for request box operation and resource. * * @throws DataServiceFault */ private void addEmptyQueryForRequestBox() throws DataServiceFault { if (this.getQuery(DBConstants.EMPTY_END_BOXCAR_QUERY_ID) != null) { if (log.isDebugEnabled()) { log.debug("Empty query already exist, returning without trying to add it again"); } return; } if (log.isDebugEnabled()) { log.debug("Empty query does not exist, so adding it for used in request box operation/resource"); } /* empty query for requestBox */ Result endRequestBoxResult = new Result("dummy", "dummy", DBConstants.WSO2_DS_NAMESPACE, null, ResultTypes.XML); endRequestBoxResult.setXsAny(true); endRequestBoxResult.setDefaultElementGroup(new OutputElementGroup(null, null, null, null)); this.addQuery(new Query(this, DBConstants.EMPTY_END_BOXCAR_QUERY_ID, new ArrayList<QueryParam>(), endRequestBoxResult, null, null, null, null, this.getDefaultNamespace()) { public Object runPreQuery(InternalParamCollection params, int queryLevel) { return null; } @Override public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel) throws DataServiceFault { } }); } /** * Initializes the data service object. */ public void init() throws DataServiceFault { /* add operations related to boxcarring and request Box */ if (this.isBoxcarringEnabled() && this.disableLegacyBoxcarringMode) { initRequestBox(); } else if (this.isBoxcarringEnabled()) { initBoxcarring(); initRequestBox(); } /* init callable requests */ for (CallableRequest callableRequest : this.getCallableRequests().values()) { callableRequest.getCallQuery().init(); } /* init queries */ for (Query query : this.getQueries().values()) { if (query.hasResult()) { query.getResult().getDefaultElementGroup().init(); } } } public int getTenantId() { return tenantId; } public boolean isDisableStreaming() { return disableStreaming; } public void setDisableStreaming(boolean disableStreaming) { this.disableStreaming = disableStreaming; } public boolean isDisableLegacyBoxcarringMode() { return this.disableLegacyBoxcarringMode; } public void setDisableLegacyBoxcarringMode(boolean disableLegacyBoxcarringMode) { this.disableLegacyBoxcarringMode = disableLegacyBoxcarringMode; } public DSSXATransactionManager getDSSTxManager() { return txManager; } public String getContainerUserTransactionName() { return containerUserTxName; } /** * Cleanup operations done when undeploying the data service. */ public void cleanup() throws DataServiceFault { if (log.isDebugEnabled()) { log.debug("Data Service '" + this.getName() + "' cleanup start.."); } /* remove event subscriptions */ EventBroker eventBroker = DataServicesDSComponent.getEventBroker(); if (eventBroker != null) { this.clearDataServicesEventSubscriptions(eventBroker); } /* cleanup configs */ for (Config config : this.getConfigs().values()) { config.close(); } if (log.isDebugEnabled()) { log.debug("Data Service '" + this.getName() + "' cleanup end."); } } private void clearDataServicesEventSubscriptions( EventBroker eventBroker) throws DataServiceFault { try { String dsName; for (Subscription subs : eventBroker.getAllSubscriptions(null)) { dsName = subs.getProperties().get(DBConstants.DATA_SERVICE_NAME); if (dsName != null && this.getName().equals(dsName)) { eventBroker.unsubscribe(subs.getId()); } } } catch (EventBrokerException e) { throw new DataServiceFault(e); } } public String getServiceNamespace() { return serviceNamespace; } public void setServiceNamespace(String serviceNamespace) { this.serviceNamespace = serviceNamespace; } public Map<String, EventTrigger> getEventTriggers() { return eventTriggers; } public EventTrigger getEventTrigger(String triggerId) { return this.getEventTriggers().get(triggerId); } public void addEventTrigger(EventTrigger eventTrigger) { this.getEventTriggers().put(eventTrigger.getTriggerId(), eventTrigger); } public boolean isBatchRequestsEnabled() { return batchRequestsEnabled; } public boolean isBoxcarringEnabled() { return boxcarringEnabled; } public static DataServiceUser getCurrentUser() { return currentUser.get(); } public static void setCurrentUser(DataServiceUser user) { currentUser.set(user); } public String getDsLocation() { return dsLocation; } public String getDefaultNamespace() { return defaultNamespace; } public Map<String, CallableRequest> getCallableRequests() { return callableRequests; } public CallableRequest getCallableRequest(String requestName) { return this.getCallableRequests().get(requestName); } public String getDescription() { return description; } public String getName() { return name; } private Map<String, Operation> getOperations() { return operations; } public Set<String> getOperationNames() { return this.getOperations().keySet(); } private Map<ResourceID, Resource> getResourceMap() { return resourceMap; } public Set<ResourceID> getResourceIds() { return this.getResourceMap().keySet(); } public Map<String, Config> getConfigs() { return configs; } public Config getConfig(String configId) { return this.getConfigs().get(configId); } public void addConfig(Config config) { this.getConfigs().put(config.getConfigId(), config); } public Operation getOperation(String opName) { return this.getOperations().get(opName); } public Resource getResource(ResourceID resourceId) { return this.getResourceMap().get(resourceId); } private void addCallableRequest(CallableRequest callableRequest) { this.getCallableRequests().put(callableRequest.getRequestName(), callableRequest); } public void addOperation(Operation operation) { this.getOperations().put(operation.getName(), operation); this.addCallableRequest(operation); } public void addResource(Resource resource) { this.getResourceMap().put(resource.getResourceId(), resource); this.addCallableRequest(resource); } public Map<String, Query> getQueries() { return queries; } public Query getQuery(String queryId) { return this.getQueries().get(queryId); } public void addQuery(Query query) { this.getQueries().put(query.getQueryId(), query); } public SecretResolver getSecretResolver() { return secretResolver; } public void setSecretResolver(SecretResolver secretResolver) { this.secretResolver = secretResolver; } public String getRelativeDsLocation() { return this.dsRelativeLocation; } private void setRelativeDsLocation(String location) { if (location != null && !"".equals(location)) { String[] dsPathContents = location.trim().split("dataservices"); this.dsRelativeLocation = dsPathContents[dsPathContents.length-1]; } } /** * Getter method for Authorization Provider. * * @return authorizationProvider instance. */ public AuthorizationProvider getAuthorizationProvider() { return authorizationProvider; } /** * Setter method for Authorization Provider. * * @param authorizationProvider instance. */ public void setAuthorizationProvider(AuthorizationProvider authorizationProvider) { this.authorizationProvider = authorizationProvider; } /** * Instructs the data service to run the request with the given name * with the given parameters. * * @param xmlWriter XMLStreamWriter used to write the result * @param requestName The service request name * @param params The parameters to be used for the service call * @throws DataServiceFault Thrown if a problem occurs in service dispatching */ public void invoke(XMLStreamWriter xmlWriter, String requestName, Map<String, ParamValue> params) throws DataServiceFault { try { this.getCallableRequest(requestName).execute(xmlWriter, this.extractParams(params)); } catch (DataServiceFault e) { this.fillInDataServiceFault(e, requestName, params); throw e; } catch (Exception e) { DataServiceFault dsf = new DataServiceFault(e); this.fillInDataServiceFault(dsf, requestName, params); throw dsf; } } private void fillInDataServiceFault(DataServiceFault dsf, String requestName, Map<String, ParamValue> params) { dsf.setSourceDataService(this); dsf.setCurrentRequestName(requestName); dsf.setCurrentParams(params); } /** * Convert the parameters passed in to a collection of ExternalParam objects. * An ExternalParam is a value that is passed into "call queries". */ private ExternalParamCollection extractParams(Map<String, ParamValue> params) { ExternalParamCollection epc = new ExternalParamCollection(); for (Entry<String, ParamValue> entry : params.entrySet()) { /* 'toLowerCase' - workaround for different character case issues in column names. * This is because, some DBMSs like H2, the results they give, the column names * will not match the column names they actually return. For example, * .... * <query id="select_query_count"> * <sql>SELECT COUNT(*) as orderDetailsCount FROM OrderDetails</sql> * <result element="Orders" rowName="OrderDetails"> * <element name="orderDetailsCount" column="orderDetailsCount" xsdType="integer" /> * </result> * </query> * .... * The above query, the column that should be returned should be "orderDetailsCount", * to be matched by the result's column entry, mentioning, that it's expecting a * column value "orderDetailsCount". But H2 doesn't return this name. * So to overcome this, all the parameter names (the result itself is a parameter * for output elements(static elements, call queries)), are lower cased before passed in. */ epc.addParam(new ExternalParam(entry.getKey().toLowerCase(), entry.getValue(), DBSFields.QUERY_PARAM)); } return epc; } public String getResultWrapperForRequest(String requestName) { return this.getCallableRequest(requestName).getCallQuery().getResultWrapper(); } /** * Returns the namespace for the given request name. */ public String getNamespaceForRequest(String requestName) { CallQuery callQuery = this.getCallableRequest(requestName).getCallQuery(); return callQuery.getNamespace(); } public boolean hasResultForRequest(String requestName) { return this.getCallableRequest(requestName).getCallQuery().isHasResult(); } public boolean isReturningRequestStatus(String requestName) { return this.getCallableRequest(requestName).isReturnRequestStatus(); } public String getServiceStatus() { return serviceStatus; } public void setServiceStatus(String serviceStatus) { this.serviceStatus = serviceStatus; } public boolean isServiceInactive() { return this.getServiceStatus() != null && this.getServiceStatus().equals(ServiceStatusValues.INACTIVE); } @Override public String toString() { StringBuffer buff = new StringBuffer(); buff.append("Name: " + this.getName() + "\n"); buff.append("Location: " + this.getRelativeDsLocation() + "\n"); buff.append("Description: " + (this.getDescription() != null ? this.getDescription() : "N/A") + "\n"); buff.append("Default Namespace: " + this.getDefaultNamespace() + "\n"); return buff.toString(); } public boolean isInDTX() { return this.getDSSTxManager().isInDTX(); } }
package com.nguyenmanhtuan.benhandientu; import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import org.json.JSONException; import org.json.JSONObject; import com.nguyenmanhtuan.utils.DatabaseHandler; import com.nguyenmanhtuan.utils.JSONParser; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.res.Configuration; import android.content.res.Resources; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.AsyncTask; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.Toast; public class SetExamActivity extends Activity implements OnClickListener, OnCheckedChangeListener { private Locale myLocale; CheckBox cbLiver, cbBlood, cbUrine; Button btCheck, btSetExam; String email = null; // String liver = null; // String blood = null; // String urine = null; String liver,blood,urine; String networkwarning; String examwarning; String examcreate; String examconfirm; String examliver; String examblood; String examurine; String check; String load; String contact; JSONParser jParser = new JSONParser(); private static String url_create_exam = "http://192.168.56.1/emr_connect/create_exam.php"; private static final String KEY_SUCCESS = "success"; @Override protected void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); setContentView(R.layout.activity_setexam); cbLiver = (CheckBox) findViewById(R.id.cbLiver); cbBlood = (CheckBox) findViewById(R.id.cbBlood); cbUrine = (CheckBox) findViewById(R.id.cbUrine); btCheck = (Button) findViewById(R.id.btCheck); btSetExam = (Button) findViewById(R.id.btSetExam); liver = "no"; blood = "no"; urine = "no"; networkwarning = getResources().getString(R.string.networkwarning); check = getResources().getString(R.string.check); load = getResources().getString(R.string.load); contact = getResources().getString(R.string.contact); examcreate = getResources().getString(R.string.examcreate); examconfirm = getResources().getString(R.string.examconfirm); examwarning = getResources().getString(R.string.examwarning); examliver = getResources().getString(R.string.examliver); examblood = getResources().getString(R.string.examblood); examurine = getResources().getString(R.string.examurine); DatabaseHandler db = new DatabaseHandler(getApplicationContext()); HashMap<String, String> user = new HashMap<String, String>(); user = db.getUserDetails(); email = user.get("email"); cbLiver.setOnCheckedChangeListener(this); cbBlood.setOnCheckedChangeListener(this); cbUrine.setOnCheckedChangeListener(this); btCheck.setOnClickListener(this); btSetExam.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.btCheck: if (liver.equals("no") && blood.equals("no") && urine.equals("no")) { Toast.makeText(SetExamActivity.this, examwarning, Toast.LENGTH_LONG) .show(); } else if (liver.equals("yes") && blood.equals("yes") && urine.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" + examliver + "\n" +examblood + "\n" +examurine, Toast.LENGTH_LONG).show(); } else if (liver.equals("yes") && blood.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" + examliver + "\n" +examblood, Toast.LENGTH_LONG).show(); } else if (blood.equals("yes") && urine.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm + "\n" +examblood + "\n" +examurine, Toast.LENGTH_LONG).show(); } else if (liver.equals("yes") && urine.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" + examliver + "\n" +examurine, Toast.LENGTH_LONG).show(); } else if (liver.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" + examliver , Toast.LENGTH_LONG).show(); } else if (blood.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" +examblood , Toast.LENGTH_LONG).show(); } else if (urine.equals("yes")) { Toast.makeText(SetExamActivity.this, examconfirm +"\n" +examurine, Toast.LENGTH_LONG).show(); } break; case R.id.btSetExam: if (liver.equals("no") && blood.equals("no") && urine.equals("no")) { Toast.makeText(SetExamActivity.this, examwarning, Toast.LENGTH_LONG) .show(); }else{ new NetCheck().execute(); } break; } } @Override public void onCheckedChanged(CompoundButton arg0, boolean isChecked) { switch (arg0.getId()) { case R.id.cbLiver: if (isChecked) { liver = "yes"; } else { liver = "no"; } break; case R.id.cbBlood: if (isChecked) { blood = "yes"; } else { blood = "no"; } break; case R.id.cbUrine: if (isChecked) { urine = "yes"; } else { urine = "no"; } break; } } private class NetCheck extends AsyncTask<String, String, Boolean> { private ProgressDialog nDialog; @Override protected void onPreExecute() { super.onPreExecute(); nDialog = new ProgressDialog(SetExamActivity.this); nDialog.setTitle(check); nDialog.setMessage(load+"..."); nDialog.setIndeterminate(false); nDialog.setCancelable(true); nDialog.show(); } /** * Gets current device state and checks for working internet connection * by trying Google. **/ @Override protected Boolean doInBackground(String... args) { ConnectivityManager cm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo netInfo = cm.getActiveNetworkInfo(); if (netInfo != null && netInfo.isConnected()) { try { URL url = new URL("http://www.google.com"); HttpURLConnection urlc = (HttpURLConnection) url.openConnection(); urlc.setConnectTimeout(3000); urlc.connect(); if (urlc.getResponseCode() == 200) { return true; } } catch (MalformedURLException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return false; } @Override protected void onPostExecute(Boolean th) { if (th == true) { nDialog.dismiss(); new ProcessSetExam().execute(); } else { nDialog.dismiss(); Toast.makeText(getApplicationContext(), networkwarning, Toast.LENGTH_SHORT).show(); } } } class ProcessSetExam extends AsyncTask<String, String, String> { private ProgressDialog pDialog; /** * Before starting background thread Show Progress Dialog */ @Override protected void onPreExecute() { super.onPreExecute(); pDialog = new ProgressDialog(SetExamActivity.this); pDialog.setMessage(examcreate +"..."); pDialog.setIndeterminate(false); pDialog.setCancelable(true); pDialog.show(); } /** * Creating product */ protected String doInBackground(String... args) { // Building Parameters List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new BasicNameValuePair("email", email)); params.add(new BasicNameValuePair("liver", liver)); params.add(new BasicNameValuePair("blood", blood)); params.add(new BasicNameValuePair("urine", urine)); // getting JSON Object // Note that create exam url accepts POST method JSONObject json = jParser.makeHttpRequest(url_create_exam, "POST", params); // check log cat fro response Log.d("Create Response", json.toString()); // check for success tag try { int success = json.getInt(KEY_SUCCESS); if (success == 1) { // successfully created exam Intent i = new Intent(getApplicationContext(), AllRecordsActivity.class); startActivity(i); // closing this screen finish(); } else { // failed to create exam } } catch (JSONException e) { e.printStackTrace(); } return null; } /** * After completing background task Dismiss the progress dialog **/ protected void onPostExecute(String file_url) { // dismiss the dialog once done pDialog.dismiss(); } } public void setLocale(String lang) { myLocale = new Locale(lang); Resources res = getResources(); DisplayMetrics dm = res.getDisplayMetrics(); Configuration conf = res.getConfiguration(); conf.locale = myLocale; res.updateConfiguration(conf, dm); Intent refresh = new Intent(this, SetExamActivity.class); startActivity(refresh); } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.android; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.TransitiveInfoProvider; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.Objects; import javax.annotation.Nullable; /** * A provider that supplies ResourceContainers from its transitive closure. */ @Immutable public final class AndroidResourcesProvider implements TransitiveInfoProvider { private final Label label; private final NestedSet<ResourceContainer> transitiveAndroidResources; private final NestedSet<ResourceContainer> directAndroidResources; public AndroidResourcesProvider( Label label, NestedSet<ResourceContainer> transitiveAndroidResources, NestedSet<ResourceContainer> directAndroidResources) { this.label = label; this.directAndroidResources = directAndroidResources; this.transitiveAndroidResources = transitiveAndroidResources; } /** * Returns the label that is associated with this piece of information. */ public Label getLabel() { return label; } /** * Returns the transitive ResourceContainers for the label. */ public NestedSet<ResourceContainer> getTransitiveAndroidResources() { return transitiveAndroidResources; } /** * Returns the immediate ResourceContainers for the label. */ public NestedSet<ResourceContainer> getDirectAndroidResources() { return directAndroidResources; } /** * The type of resource in question: either asset or a resource. */ public enum ResourceType { ASSETS("assets"), RESOURCES("resources"); private final String attribute; private ResourceType(String attribute) { this.attribute = attribute; } public String getAttribute() { return attribute; } } /** * The resources contributed by a single target. */ @Immutable public static final class ResourceContainer { private final Label label; private final String javaPackage; private final String renameManifestPackage; private final boolean constantsInlined; private final Artifact apk; private final Artifact manifest; private final ImmutableList<Artifact> assets; private final ImmutableList<Artifact> resources; private final ImmutableList<PathFragment> assetsRoots; private final ImmutableList<PathFragment> resourcesRoots; private final boolean manifestExported; private final Artifact javaSourceJar; private final Artifact rTxt; private Artifact symbolsTxt; public ResourceContainer(Label label, String javaPackage, @Nullable String renameManifestPackage, boolean constantsInlined, Artifact apk, Artifact manifest, Artifact javaSourceJar, ImmutableList<Artifact> assets, ImmutableList<Artifact> resources, ImmutableList<PathFragment> assetsRoots, ImmutableList<PathFragment> resourcesRoots, boolean manifestExported, Artifact rTxt, Artifact symbolsTxt) { this.javaSourceJar = javaSourceJar; this.manifestExported = manifestExported; this.label = Preconditions.checkNotNull(label); this.javaPackage = Preconditions.checkNotNull(javaPackage); this.renameManifestPackage = renameManifestPackage; this.constantsInlined = constantsInlined; this.apk = Preconditions.checkNotNull(apk); this.manifest = Preconditions.checkNotNull(manifest); this.assets = Preconditions.checkNotNull(assets); this.resources = Preconditions.checkNotNull(resources); this.assetsRoots = Preconditions.checkNotNull(assetsRoots); this.resourcesRoots = Preconditions.checkNotNull(resourcesRoots); this.rTxt = rTxt; this.symbolsTxt = symbolsTxt; } public Label getLabel() { return label; } public String getJavaPackage() { return javaPackage; } public String getRenameManifestPackage() { return renameManifestPackage; } public boolean getConstantsInlined() { return constantsInlined; } public Artifact getApk() { return apk; } public Artifact getJavaSourceJar() { return javaSourceJar; } public Artifact getManifest() { return manifest; } public boolean isManifestExported() { return manifestExported; } public ImmutableList<Artifact> getArtifacts(ResourceType resourceType) { return resourceType == ResourceType.ASSETS ? assets : resources; } public Iterable<Artifact> getArtifacts() { return Iterables.concat(assets, resources); } public Artifact getRTxt() { return rTxt; } public Artifact getSymbolsTxt() { return symbolsTxt; } public ImmutableList<PathFragment> getRoots(ResourceType resourceType) { return resourceType == ResourceType.ASSETS ? assetsRoots : resourcesRoots; } @Override public int hashCode() { return Objects.hash(label, rTxt, symbolsTxt); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ResourceContainer)) { return false; } ResourceContainer other = (ResourceContainer) obj; return Objects.equals(label, other.label) && Objects.equals(rTxt, other.rTxt) && Objects.equals(symbolsTxt, other.symbolsTxt); } @Override public String toString() { return String.format( "ResourceContainer [label=%s, javaPackage=%s, renameManifestPackage=%s," + " constantsInlined=%s, apk=%s, manifest=%s, assets=%s, resources=%s, assetsRoots=%s," + " resourcesRoots=%s, manifestExported=%s, javaSourceJar=%s, rTxt=%s, symbolsTxt=%s]", label, javaPackage, renameManifestPackage, constantsInlined, apk, manifest, assets, resources, assetsRoots, resourcesRoots, manifestExported, javaSourceJar, rTxt, symbolsTxt); } } }
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.suggest.memory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.spotify.heroic.common.Grouped; import com.spotify.heroic.common.Groups; import com.spotify.heroic.common.OptionalLimit; import com.spotify.heroic.common.Series; import com.spotify.heroic.filter.Filter; import com.spotify.heroic.suggest.KeySuggest; import com.spotify.heroic.suggest.SuggestBackend; import com.spotify.heroic.suggest.TagKeyCount; import com.spotify.heroic.suggest.TagSuggest; import com.spotify.heroic.suggest.TagValueSuggest; import com.spotify.heroic.suggest.TagValuesSuggest; import com.spotify.heroic.suggest.WriteSuggest; import eu.toolchain.async.AsyncFramework; import eu.toolchain.async.AsyncFuture; import lombok.Data; import lombok.ToString; import javax.inject.Inject; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @MemoryScope @ToString(of = {}) public class MemoryBackend implements SuggestBackend, Grouped { private static final float SCORE = 1.0f; private final SortedMap<String, Set<String>> keys = new TreeMap<>(); private final SortedMap<String, Set<TagId>> tagKeys = new TreeMap<>(); private final SortedMap<String, Set<TagId>> tagValues = new TreeMap<>(); private final HashMap<String, KeyDocument> keyIndex = new HashMap<>(); private final HashMap<TagId, TagDocument> tagIndex = new HashMap<>(); private final SortedSet<Series> series = new TreeSet<>(); private final ReadWriteLock lock = new ReentrantReadWriteLock(); private final Groups groups; private final AsyncFramework async; @Inject public MemoryBackend(final Groups groups, final AsyncFramework async) { this.groups = groups; this.async = async; } @Override public AsyncFuture<Void> configure() { return async.resolved(); } @Override public AsyncFuture<TagValuesSuggest> tagValuesSuggest(TagValuesSuggest.Request request) { final Map<String, Set<String>> counts = new HashMap<>(); final OptionalLimit groupLimit = request.getGroupLimit(); try (final Stream<Series> series = lookupSeries(request.getFilter())) { series.forEach(s -> { for (final Map.Entry<String, String> e : s.getTags().entrySet()) { Set<String> c = counts.get(e.getKey()); if (c == null) { c = new HashSet<>(); counts.put(e.getKey(), c); } if (groupLimit.isGreaterOrEqual(c.size())) { continue; } c.add(e.getValue()); } }); } final List<TagValuesSuggest.Suggestion> suggestions = ImmutableList.copyOf(request .getLimit() .limitStream(counts.entrySet().stream()) .map(e -> new TagValuesSuggest.Suggestion(e.getKey(), ImmutableSortedSet.copyOf(e.getValue()), false)) .iterator()); return async.resolved(TagValuesSuggest.of(suggestions, false)); } @Override public AsyncFuture<TagKeyCount> tagKeyCount(final TagKeyCount.Request request) { final Map<String, Set<String>> counts = new HashMap<>(); try (final Stream<Series> series = lookupSeries(request.getFilter())) { series.forEach(s -> { for (final Map.Entry<String, String> e : s.getTags().entrySet()) { Set<String> c = counts.get(e.getKey()); if (c == null) { c = new HashSet<>(); counts.put(e.getKey(), c); } c.add(e.getValue()); } }); } final List<TagKeyCount.Suggestion> suggestions = ImmutableList.copyOf(request .getLimit() .limitStream(counts.entrySet().stream()) .map(e -> new TagKeyCount.Suggestion(e.getKey(), (long) e.getValue().size(), Optional.empty())) .iterator()); return async.resolved(TagKeyCount.of(suggestions, false)); } @Override public AsyncFuture<TagSuggest> tagSuggest(final TagSuggest.Request request) { final Optional<Set<String>> keys = request.getKey().map(MemoryBackend::analyze); final Optional<Set<String>> values = request.getValue().map(MemoryBackend::analyze); try (final Stream<TagDocument> docs = lookupTags(request.getFilter())) { final Set<TagId> ids = docs.map(TagDocument::getId).collect(Collectors.toSet()); keys.ifPresent(parts -> parts.forEach( k -> ids.retainAll(tagKeys.getOrDefault(k, ImmutableSet.of())))); values.ifPresent(parts -> parts.forEach( k -> ids.retainAll(tagValues.getOrDefault(k, ImmutableSet.of())))); final List<TagSuggest.Suggestion> suggestions = ImmutableList.copyOf( ImmutableSortedSet.copyOf(request .getLimit() .limitStream(ids.stream()) .map(tagIndex::get) .filter(v -> v != null) .map(d -> new TagSuggest.Suggestion(SCORE, d.id.key, d.id.value)) .iterator())); return async.resolved(TagSuggest.of(suggestions)); } } @Override public AsyncFuture<KeySuggest> keySuggest(final KeySuggest.Request request) { final Optional<Set<String>> analyzedKeys = request.getKey().map(MemoryBackend::analyze); final Set<String> ids; try (final Stream<KeyDocument> docs = lookupKeys(request.getFilter())) { ids = docs.map(KeyDocument::getId).collect(Collectors.toSet()); analyzedKeys.ifPresent(parts -> parts.forEach( k -> ids.retainAll(keys.getOrDefault(k, ImmutableSet.of())))); } final List<KeySuggest.Suggestion> suggestions = ImmutableList.copyOf(request .getLimit() .limitStream(ids.stream()) .map(d -> new KeySuggest.Suggestion(SCORE, d)) .iterator()); return async.resolved(KeySuggest.of(suggestions)); } @Override public AsyncFuture<TagValueSuggest> tagValueSuggest(final TagValueSuggest.Request request) { try (final Stream<TagDocument> docs = lookupTags(request.getFilter())) { final Stream<TagId> ids = docs.map(TagDocument::getId); final List<String> values = request .getLimit() .limitStream( request.getKey().map(k -> ids.filter(id -> id.key.equals(k))).orElse(ids)) .map(id -> id.value) .collect(Collectors.toList()); return async.resolved(TagValueSuggest.of(values, false)); } } @Override public AsyncFuture<WriteSuggest> write(final WriteSuggest.Request request) { final Series s = request.getSeries(); final Lock l = lock.writeLock(); l.lock(); try { series.add(s); keyIndex.put(s.getKey(), new KeyDocument(s.getKey(), s)); for (final String t : analyze(s.getKey())) { putEntry(keys, t, s.getKey()); } for (final Map.Entry<String, String> tag : s.getTags().entrySet()) { final TagId id = new TagId(tag.getKey(), tag.getValue()); tagIndex.put(id, new TagDocument(id, s)); for (final String t : analyze(tag.getKey())) { putEntry(tagKeys, t, id); } for (final String t : analyze(tag.getValue())) { putEntry(tagValues, t, id); } } return async.resolved(WriteSuggest.of()); } finally { l.unlock(); } } private <K, V> void putEntry( final SortedMap<K, Set<V>> index, final K key, final V value ) { Set<V> store = index.get(key); if (store == null) { store = new HashSet<>(); index.put(key, store); } store.add(value); } @Override public boolean isReady() { return true; } @Override public Groups groups() { return groups; } private static final Pattern p = Pattern.compile("([^a-zA-Z0-9]+|(?<=[a-z0-9])(?=[A-Z]))"); static Set<String> analyze(final String input) { if (input.isEmpty()) { return ImmutableSet.of(); } final String[] parts = p.split(input); final Set<String> output = new HashSet<>(); for (final String p : parts) { final String l = p.toLowerCase(); if (l.length() == 0) { continue; } output.add(l); output.addAll(prefix(l)); } return output; } private static Collection<String> prefix(final String input) { final Set<String> prefixes = new HashSet<>(36); for (int i = 1; i < Math.min(input.length(), 20); i++) { prefixes.add(input.substring(0, i)); } return prefixes; } private Stream<KeyDocument> lookupKeys(final Filter filter) { final Lock l = lock.readLock(); l.lock(); return keyIndex.values().stream().filter(e -> filter.apply(e.series)).onClose(l::unlock); } private Stream<TagDocument> lookupTags(final Filter filter) { final Lock l = lock.readLock(); l.lock(); return tagIndex.values().stream().filter(e -> filter.apply(e.series)).onClose(l::unlock); } private Stream<Series> lookupSeries(final Filter filter) { final Lock l = lock.readLock(); l.lock(); return series.stream().filter(filter::apply).onClose(l::unlock); } @Data static class TagId { private final String key; private final String value; } @Data static class KeyDocument { private final String id; private final Series series; } @Data static class TagDocument { private final TagId id; private final Series series; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.starlark; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.config.ExecutionTransitionFactory; import com.google.devtools.build.lib.analysis.config.HostTransition; import com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition; import com.google.devtools.build.lib.analysis.config.TransitionFactories; import com.google.devtools.build.lib.analysis.config.transitions.SplitTransition; import com.google.devtools.build.lib.analysis.config.transitions.TransitionFactory; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Attribute.AllowedValueSet; import com.google.devtools.build.lib.packages.Attribute.ImmutableAttributeFactory; import com.google.devtools.build.lib.packages.Attribute.StarlarkComputedDefaultTemplate; import com.google.devtools.build.lib.packages.AttributeTransitionData; import com.google.devtools.build.lib.packages.AttributeValueSource; import com.google.devtools.build.lib.packages.BazelModuleContext; import com.google.devtools.build.lib.packages.BazelStarlarkContext; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.packages.StarlarkAspect; import com.google.devtools.build.lib.packages.StarlarkCallbackHelper; import com.google.devtools.build.lib.packages.StarlarkProviderIdentifier; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.packages.Type.ConversionException; import com.google.devtools.build.lib.packages.Type.LabelClass; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.starlarkbuildapi.StarlarkAttrModuleApi; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.FileTypeSet; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import net.starlark.java.eval.Dict; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.Module; import net.starlark.java.eval.Printer; import net.starlark.java.eval.Sequence; import net.starlark.java.eval.Starlark; import net.starlark.java.eval.StarlarkFunction; import net.starlark.java.eval.StarlarkInt; import net.starlark.java.eval.StarlarkThread; /** * A helper class to provide Attr module in Starlark. * * <p>It exposes functions (for example, 'attr.string', 'attr.label_list', etc.) to Starlark users. * The functions are executed through reflection. As everywhere in Starlark, arguments are * type-checked with the signature and cannot be null. */ public final class StarlarkAttrModule implements StarlarkAttrModuleApi { // Arguments // TODO(adonovan): opt: this class does a lot of redundant hashtable lookups. private static boolean containsNonNoneKey(Map<String, Object> arguments, String key) { return arguments.containsKey(key) && arguments.get(key) != Starlark.NONE; } private static void setAllowedFileTypes( String attr, Object fileTypesObj, Attribute.Builder<?> builder) throws EvalException { if (fileTypesObj == Boolean.TRUE) { builder.allowedFileTypes(FileTypeSet.ANY_FILE); } else if (fileTypesObj == Boolean.FALSE) { builder.allowedFileTypes(FileTypeSet.NO_FILE); } else if (fileTypesObj instanceof Sequence) { ImmutableList<String> arg = ImmutableList.copyOf(Sequence.cast(fileTypesObj, String.class, "allow_files argument")); builder.allowedFileTypes(FileType.of(arg)); } else { throw Starlark.errorf("%s should be a boolean or a string list", attr); } } private static ImmutableAttributeFactory createAttributeFactory( Type<?> type, String doc, Map<String, Object> arguments, StarlarkThread thread) throws EvalException { // We use an empty name now so that we can set it later. // This trick makes sense only in the context of Starlark (builtin rules should not use it). return createAttributeFactory(type, doc, arguments, thread, ""); } private static ImmutableAttributeFactory createAttributeFactory( Type<?> type, String doc, Map<String, Object> arguments, StarlarkThread thread, String name) throws EvalException { return createAttribute(type, doc, arguments, thread, name).buildPartial(); } @SuppressWarnings("unchecked") private static Attribute.Builder<?> createAttribute( Type<?> type, String doc, Map<String, Object> arguments, StarlarkThread thread, String name) throws EvalException { Attribute.Builder<?> builder = Attribute.attr(name, type).setDoc(doc); Object defaultValue = arguments.get(DEFAULT_ARG); if (!Starlark.isNullOrNone(defaultValue)) { if (defaultValue instanceof StarlarkFunction) { // Computed attribute. Non label type attributes already caused a type check error. StarlarkCallbackHelper callback = new StarlarkCallbackHelper( (StarlarkFunction) defaultValue, thread.getSemantics(), BazelStarlarkContext.from(thread)); // StarlarkComputedDefaultTemplate needs to know the names of all attributes that it depends // on. However, this method does not know anything about other attributes. // We solve this problem by asking the StarlarkCallbackHelper for the parameter names used // in the function definition, which must be the names of attributes used by the callback. builder.value( new StarlarkComputedDefaultTemplate(type, callback.getParameterNames(), callback)); } else if (defaultValue instanceof StarlarkLateBoundDefault) { builder.value((StarlarkLateBoundDefault) defaultValue); // unchecked cast } else { BazelStarlarkContext bazelStarlarkContext = BazelStarlarkContext.from(thread); builder.defaultValue( defaultValue, new BuildType.LabelConversionContext( BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread)).label(), bazelStarlarkContext.getRepoMapping(), bazelStarlarkContext.getConvertedLabelsInPackage()), DEFAULT_ARG); } } Object flagsArg = arguments.get(FLAGS_ARG); if (flagsArg != null) { for (String flag : Sequence.noneableCast(flagsArg, String.class, FLAGS_ARG)) { builder.setPropertyFlag(flag); } } if (containsNonNoneKey(arguments, MANDATORY_ARG) && (Boolean) arguments.get(MANDATORY_ARG)) { builder.setPropertyFlag("MANDATORY"); } if (containsNonNoneKey(arguments, ALLOW_EMPTY_ARG) && !(Boolean) arguments.get(ALLOW_EMPTY_ARG)) { builder.setPropertyFlag("NON_EMPTY"); } if (containsNonNoneKey(arguments, EXECUTABLE_ARG) && (Boolean) arguments.get(EXECUTABLE_ARG)) { builder.setPropertyFlag("EXECUTABLE"); if (!containsNonNoneKey(arguments, CONFIGURATION_ARG)) { throw Starlark.errorf( "cfg parameter is mandatory when executable=True is provided. Please see " + "https://www.bazel.build/versions/master/docs/skylark/rules.html#configurations " + "for more details."); } } if (containsNonNoneKey(arguments, ALLOW_FILES_ARG) && containsNonNoneKey(arguments, ALLOW_SINGLE_FILE_ARG)) { throw Starlark.errorf("Cannot specify both allow_files and allow_single_file"); } if (containsNonNoneKey(arguments, ALLOW_FILES_ARG)) { Object fileTypesObj = arguments.get(ALLOW_FILES_ARG); setAllowedFileTypes(ALLOW_FILES_ARG, fileTypesObj, builder); } else if (containsNonNoneKey(arguments, ALLOW_SINGLE_FILE_ARG)) { Object fileTypesObj = arguments.get(ALLOW_SINGLE_FILE_ARG); setAllowedFileTypes(ALLOW_SINGLE_FILE_ARG, fileTypesObj, builder); builder.setPropertyFlag("SINGLE_ARTIFACT"); } else if (type.getLabelClass() == LabelClass.DEPENDENCY) { builder.allowedFileTypes(FileTypeSet.NO_FILE); } Object ruleClassesObj = arguments.get(ALLOW_RULES_ARG); if (ruleClassesObj != null && ruleClassesObj != Starlark.NONE) { builder.allowedRuleClasses( Sequence.cast( ruleClassesObj, String.class, "allowed rule classes for attribute definition")); } Object valuesArg = arguments.get(VALUES_ARG); if (valuesArg != null) { List<Object> values = Sequence.noneableCast(valuesArg, Object.class, VALUES_ARG); if (!values.isEmpty()) { builder.allowedValues(new AllowedValueSet(values)); } } if (containsNonNoneKey(arguments, PROVIDERS_ARG)) { Object obj = arguments.get(PROVIDERS_ARG); ImmutableList<ImmutableSet<StarlarkProviderIdentifier>> providersList = buildProviderPredicate(Sequence.cast(obj, Object.class, PROVIDERS_ARG), PROVIDERS_ARG); // If there is at least one empty set, there is no restriction. if (providersList.stream().noneMatch(ImmutableSet::isEmpty)) { builder.mandatoryProvidersList(providersList); } } if (containsNonNoneKey(arguments, CONFIGURATION_ARG)) { Object trans = arguments.get(CONFIGURATION_ARG); boolean isSplit = trans instanceof SplitTransition || trans instanceof TransitionFactory || trans instanceof StarlarkDefinedConfigTransition; if (isSplit && defaultValue instanceof StarlarkLateBoundDefault) { throw Starlark.errorf( "late-bound attributes must not have a split configuration transition"); } if (trans.equals("host")) { builder.cfg(HostTransition.createFactory()); } else if (trans.equals("exec")) { builder.cfg(ExecutionTransitionFactory.create()); } else if (trans instanceof ExecutionTransitionFactory) { builder.cfg((ExecutionTransitionFactory) trans); } else if (trans instanceof SplitTransition) { builder.cfg(TransitionFactories.of((SplitTransition) trans)); } else if (trans instanceof TransitionFactory) { builder.cfg((TransitionFactory<AttributeTransitionData>) trans); // unchecked cast } else if (trans instanceof StarlarkDefinedConfigTransition) { StarlarkDefinedConfigTransition starlarkDefinedTransition = (StarlarkDefinedConfigTransition) trans; if (starlarkDefinedTransition.isForAnalysisTesting()) { builder.hasAnalysisTestTransition(); } else { builder.hasStarlarkDefinedTransition(); } builder.cfg(new StarlarkAttributeTransitionProvider(starlarkDefinedTransition)); } else if (!trans.equals("target")) { // We don't actively advertise the hard-coded but exposed transitions like // android_split_transition because users of those transitions should already know about // them. throw Starlark.errorf( "cfg must be either 'host', 'target', 'exec' or a starlark defined transition defined" + " by the exec() or transition() functions."); } } if (containsNonNoneKey(arguments, ASPECTS_ARG)) { Object obj = arguments.get(ASPECTS_ARG); for (StarlarkAspect aspect : Sequence.cast(obj, StarlarkAspect.class, "aspects")) { aspect.attachToAttribute(builder); } } return builder; } /** * Builds a list of sets of accepted providers from Starlark list {@code obj}. The list can either * be a list of providers (in that case the result is a list with one set) or a list of lists of * providers (then the result is the list of sets). * * @param argumentName used in error messages. */ static ImmutableList<ImmutableSet<StarlarkProviderIdentifier>> buildProviderPredicate( Sequence<?> obj, String argumentName) throws EvalException { if (obj.isEmpty()) { return ImmutableList.of(); } boolean isListOfProviders = true; for (Object o : obj) { if (!isProvider(o)) { isListOfProviders = false; break; } } if (isListOfProviders) { return ImmutableList.of(getStarlarkProviderIdentifiers(obj)); } else { return getProvidersList(obj, argumentName); } } /** * Returns true if {@code o} is a Starlark provider (either a declared provider or a legacy * provider name. */ static boolean isProvider(Object o) { return o instanceof String || o instanceof Provider; } /** * Converts Starlark identifiers of providers (either a string or a provider value) to their * internal representations. */ static ImmutableSet<StarlarkProviderIdentifier> getStarlarkProviderIdentifiers(Sequence<?> list) throws EvalException { ImmutableList.Builder<StarlarkProviderIdentifier> result = ImmutableList.builder(); for (Object obj : list) { if (obj instanceof String) { result.add(StarlarkProviderIdentifier.forLegacy((String) obj)); } else if (obj instanceof Provider) { Provider constructor = (Provider) obj; if (!constructor.isExported()) { throw Starlark.errorf( "Providers should be top-level values in extension files that define them."); } result.add(StarlarkProviderIdentifier.forKey(constructor.getKey())); } } return ImmutableSet.copyOf(result.build()); } private static ImmutableList<ImmutableSet<StarlarkProviderIdentifier>> getProvidersList( Sequence<?> starlarkList, String argumentName) throws EvalException { ImmutableList.Builder<ImmutableSet<StarlarkProviderIdentifier>> providersList = ImmutableList.builder(); String errorMsg = "Illegal argument: element in '%s' is of unexpected type. " + "Either all elements should be providers, " + "or all elements should be lists of providers, but got %s."; for (Object o : starlarkList) { if (!(o instanceof Sequence)) { throw Starlark.errorf(errorMsg, argumentName, "an element of type " + Starlark.type(o)); } for (Object value : (Sequence) o) { if (!isProvider(value)) { throw Starlark.errorf( errorMsg, argumentName, "list with an element of type " + Starlark.type(value)); } } providersList.add(getStarlarkProviderIdentifiers((Sequence<?>) o)); } return providersList.build(); } private static Descriptor createAttrDescriptor( String name, Map<String, Object> kwargs, Type<?> type, StarlarkThread thread) throws EvalException { try { return new Descriptor(name, createAttributeFactory(type, null, kwargs, thread)); } catch (ConversionException e) { throw new EvalException(e.getMessage()); } } private static final Map<Type<?>, String> whyNotConfigurable = ImmutableMap.<Type<?>, String>builder() .put(BuildType.LICENSE, "loading phase license checking logic assumes non-configurable values") .put(BuildType.OUTPUT, "output paths are part of the static graph structure") .build(); /** * If the given attribute type is non-configurable, returns the reason why. Otherwise, returns * {@code null}. */ @Nullable public static String maybeGetNonConfigurableReason(Type<?> type) { return whyNotConfigurable.get(type); } private static Descriptor createNonconfigurableAttrDescriptor( String name, Map<String, Object> kwargs, Type<?> type, StarlarkThread thread) throws EvalException { String whyNotConfigurableReason = Preconditions.checkNotNull(maybeGetNonConfigurableReason(type), type); try { // We use an empty name now so that we can set it later. // This trick makes sense only in the context of Starlark (builtin rules should not use it). return new Descriptor( name, createAttribute(type, null, kwargs, thread, "") .nonconfigurable(whyNotConfigurableReason) .buildPartial()); } catch (ConversionException e) { throw new EvalException(e.getMessage()); } } @Override public void repr(Printer printer) { printer.append("<attr>"); } @Override public Descriptor intAttribute( StarlarkInt defaultValue, String doc, Boolean mandatory, Sequence<?> values, StarlarkThread thread) throws EvalException { // TODO(bazel-team): Replace literal strings with constants. BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.int"); return createAttrDescriptor( "int", optionMap(DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, VALUES_ARG, values), Type.INTEGER, thread); } @Override public Descriptor stringAttribute( String defaultValue, String doc, Boolean mandatory, Sequence<?> values, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.string"); return createAttrDescriptor( "string", optionMap(DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, VALUES_ARG, values), Type.STRING, thread); } @Override public Descriptor labelAttribute( Object defaultValue, // Label | String | LateBoundDefaultApi | StarlarkFunction String doc, Boolean executable, Object allowFiles, Object allowSingleFile, Boolean mandatory, Sequence<?> providers, Object allowRules, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.label"); ImmutableAttributeFactory attribute = createAttributeFactory( BuildType.LABEL, doc, optionMap( DEFAULT_ARG, defaultValue, EXECUTABLE_ARG, executable, ALLOW_FILES_ARG, allowFiles, ALLOW_SINGLE_FILE_ARG, allowSingleFile, MANDATORY_ARG, mandatory, PROVIDERS_ARG, providers, ALLOW_RULES_ARG, allowRules, CONFIGURATION_ARG, cfg, ASPECTS_ARG, aspects), thread, "label"); return new Descriptor("label", attribute); } @Override public Descriptor stringListAttribute( Boolean mandatory, Boolean allowEmpty, Sequence<?> defaultValue, String doc, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.string_list"); return createAttrDescriptor( "string_list", optionMap( DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty), Type.STRING_LIST, thread); } @Override public Descriptor intListAttribute( Boolean mandatory, Boolean allowEmpty, Sequence<?> defaultValue, String doc, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.int_list"); return createAttrDescriptor( "int_list", optionMap( DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty), Type.INTEGER_LIST, thread); } @Override public Descriptor labelListAttribute( Boolean allowEmpty, Object defaultValue, // Sequence | StarlarkFunction String doc, Object allowFiles, Object allowRules, Sequence<?> providers, Sequence<?> flags, Boolean mandatory, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.label_list"); Map<String, Object> kwargs = optionMap( DEFAULT_ARG, defaultValue, ALLOW_FILES_ARG, allowFiles, ALLOW_RULES_ARG, allowRules, PROVIDERS_ARG, providers, FLAGS_ARG, flags, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty, CONFIGURATION_ARG, cfg, ASPECTS_ARG, aspects); ImmutableAttributeFactory attribute = createAttributeFactory(BuildType.LABEL_LIST, doc, kwargs, thread, "label_list"); return new Descriptor("label_list", attribute); } @Override public Descriptor labelKeyedStringDictAttribute( Boolean allowEmpty, Object defaultValue, // Dict | StarlarkFunction String doc, Object allowFiles, Object allowRules, Sequence<?> providers, Sequence<?> flags, Boolean mandatory, Object cfg, Sequence<?> aspects, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.label_keyed_string_dict"); Map<String, Object> kwargs = optionMap( DEFAULT_ARG, defaultValue, ALLOW_FILES_ARG, allowFiles, ALLOW_RULES_ARG, allowRules, PROVIDERS_ARG, providers, FLAGS_ARG, flags, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty, CONFIGURATION_ARG, cfg, ASPECTS_ARG, aspects); ImmutableAttributeFactory attribute = createAttributeFactory( BuildType.LABEL_KEYED_STRING_DICT, doc, kwargs, thread, "label_keyed_string_dict"); return new Descriptor("label_keyed_string_dict", attribute); } @Override public Descriptor boolAttribute( Boolean defaultValue, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.bool"); return createAttrDescriptor( "bool", optionMap(DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory), Type.BOOLEAN, thread); } @Override public Descriptor outputAttribute( String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.output"); return createNonconfigurableAttrDescriptor( "output", optionMap(MANDATORY_ARG, mandatory), BuildType.OUTPUT, thread); } @Override public Descriptor outputListAttribute( Boolean allowEmpty, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.output_list"); return createAttrDescriptor( "output_list", optionMap( MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty), BuildType.OUTPUT_LIST, thread); } @Override public Descriptor stringDictAttribute( Boolean allowEmpty, Dict<?, ?> defaultValue, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.string_dict"); return createAttrDescriptor( "string_dict", optionMap( DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty), Type.STRING_DICT, thread); } @Override public Descriptor stringListDictAttribute( Boolean allowEmpty, Dict<?, ?> defaultValue, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.string_list_dict"); return createAttrDescriptor( "string_list_dict", optionMap( DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory, ALLOW_EMPTY_ARG, allowEmpty), Type.STRING_LIST_DICT, thread); } @Override public Descriptor licenseAttribute( Object defaultValue, String doc, Boolean mandatory, StarlarkThread thread) throws EvalException { BazelStarlarkContext.from(thread).checkLoadingOrWorkspacePhase("attr.license"); return createNonconfigurableAttrDescriptor( "license", optionMap(DEFAULT_ARG, defaultValue, MANDATORY_ARG, mandatory), BuildType.LICENSE, thread); } /** A descriptor of an attribute defined in Starlark. */ @AutoCodec public static final class Descriptor implements StarlarkAttrModuleApi.Descriptor { private final ImmutableAttributeFactory attributeFactory; private final String name; @AutoCodec.VisibleForSerialization Descriptor(String name, ImmutableAttributeFactory attributeFactory) { this.attributeFactory = Preconditions.checkNotNull(attributeFactory); this.name = name; } public boolean hasDefault() { return attributeFactory.isValueSet(); } public AttributeValueSource getValueSource() { return attributeFactory.getValueSource(); } public Attribute build(String name) { return attributeFactory.build(name); } @Override public void repr(Printer printer) { printer.append("<attr." + name + ">"); } } // Returns an immutable map from a list of alternating name/value pairs, // skipping values that are null or None. Keys must be unique. private static Map<String, Object> optionMap(Object... pairs) { Preconditions.checkArgument(pairs.length % 2 == 0); ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); for (int i = 0; i < pairs.length; i += 2) { String key = (String) Preconditions.checkNotNull(pairs[i]); Object value = pairs[i + 1]; if (value != null && value != Starlark.NONE) { b.put(key, value); } } return b.build(); } }
package model; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Manages the user information. This is service provider. */ public class UserManager { /** * Accesses the database to get all users that have not been banned. * This finds only users with the actual "user" distinction. * @return an observable list of all the unbanned users */ public ObservableList<User> getUnbannedUsers() { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username, password, fullname, ban, attempt, " + "type, emailaddress, homeaddress, company, jobtitle, department FROM USER"; ResultSet rs = stmt.executeQuery(sql); List<User> reportList = new ArrayList<>(); while (rs.next()) { String userType = UserTypeEnum.USER.toString(); String responseType = rs.getString("type"); if (userType.equals(responseType) && (rs.getInt("ban") == 0)) { User temp = new User(rs.getString("username"), rs.getString("fullname"), rs.getInt("ban"), rs.getString("type"), rs.getString("emailaddress"), rs.getString("homeaddress"), rs.getString("company"), rs.getString("jobtitle"), rs.getString("department")); reportList.add(temp); } } return FXCollections.observableList(reportList); } catch (Exception e) { e.printStackTrace(); return null; } } /** * Accesses the database to get all users that have been banned. * This finds only users with the actual "user" distinction. * @return an observable list of all the banned users */ public ObservableList<User> getBannedUsers() { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username, password, fullname, ban, attempt, type, emailaddress, " + "homeaddress, company, " + "jobtitle, department FROM USER"; ResultSet rs = stmt.executeQuery(sql); List<User> reportList = new ArrayList<>(); while (rs.next()) { String responseType = rs.getString("type"); String userType = UserTypeEnum.USER.toString(); if (userType.equals(responseType) && (rs.getInt("ban") == 1)) { User temp = new User(rs.getString("username"), rs.getString("fullname"), rs.getInt("ban"), rs.getString("type"), rs.getString("emailaddress"), rs.getString("homeaddress"), rs.getString("company"), rs.getString("jobtitle"), rs.getString("department")); reportList.add(temp); } } return FXCollections.observableList(reportList); } catch (Exception e) { e.printStackTrace(); return null; } } /** * Accesses the database to get all users that have been blocked. * This finds users with the actual "user", "manager", and "worker" * distinctions. * @return an observable list of all the blocked users */ public ObservableList<User> getUnblockedUsers() { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username, password, fullname, ban, attempt, type, " + "emailaddress, homeaddress, company, jobtitle, department FROM USER"; ResultSet rs = stmt.executeQuery(sql); List<User> reportList = new ArrayList<>(); while (rs.next()) { String responseType = rs.getString("type"); String adminType = UserTypeEnum.ADMIN.toString(); if (!adminType.equals(responseType) && (rs.getInt("attempt") < 3)) { User temp = new User(rs.getString("username"), rs.getString("fullname"), rs.getInt("ban"), rs.getString("type"), rs.getString("emailaddress"), rs.getString("homeaddress"), rs.getString("company"), rs.getString("jobtitle"), rs.getString("department")); reportList.add(temp); } } return FXCollections.observableList(reportList); } catch (Exception e) { e.printStackTrace(); return null; } } /** * Accesses the database to get all users that have not been blocked. * This finds users with the actual "user", "manager", and "worker" distinctions. * @return an observable list of all the unblocked users */ public ObservableList<User> getBlockedUsers() { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username, password, fullname, ban, attempt, type, emailaddress, homeaddress, " + "company, jobtitle, department FROM USER"; ResultSet rs = stmt.executeQuery(sql); List<User> reportList = new ArrayList<>(); while (rs.next()) { String responseType = rs.getString("type"); String adminType = UserTypeEnum.ADMIN.toString(); if (!adminType.equals(responseType) && (rs.getInt("attempt") > 2)) { User temp = new User(rs.getString("username"), rs.getString("fullname"), rs.getInt("ban"), rs.getString("type"), rs.getString("emailaddress"), rs.getString("homeaddress"), rs.getString("company"), rs.getString("jobtitle"), rs.getString("department")); reportList.add(temp); } } return FXCollections.observableList(reportList); } catch (Exception e) { e.printStackTrace(); return null; } } /** * Gets an observable list of all the users in the database to be used by the controller of the app. * @param currentUser A reference to the current user so that the current user is the only user not * included in the observable list * @return ObservableList of all users in database. (Used by Admin to delete users). */ public ObservableList<User> getUsers(User currentUser) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username, password, " + "fullname, ban, attempt, type, emailad" + "dress, homeaddress, company, jobtitle, department FROM USER"; ResultSet rs = stmt.executeQuery(sql); List<User> reportList = new ArrayList<>(); while (rs.next()) { String username = rs.getString("username"); String curUser = currentUser.get_username(); if (!username.equals(curUser)) { User temp = new User(rs.getString("username"), rs.getString("fullname"), rs.getInt("ban"), rs.getString("type"), rs.getString("emailaddress"), rs.getString("homeaddress"), rs.getString("company"), rs.getString("jobtitle"), rs.getString("department")); reportList.add(temp); } } return FXCollections.observableList(reportList); } catch (Exception e) { e.printStackTrace(); return null; } } /** * Bans the selected users from submitting water reports and removes them from the list shown in the view. * @param selectedDelete a list of users to ban * @param mainList a list of all users that could be banned */ public void banUsers(Iterable<User> selectedDelete, Collection<User> mainList) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:" + "3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); for (User current : selectedDelete) { String sql = "UPDATE USER SET ban = '1' WHERE username = '" + current.get_username() + "';"; stmt.executeUpdate(sql); mainList.remove(current); } } catch (Exception ignored) { } } /** * Blocks user from logging in and using the application. Removes them from the list of users shown in the view. * @param selectedDelete a list of users to block * @param mainList a list of all users that could be blocked */ public void blockUsers(Iterable<User> selectedDelete, Collection<User> mainList) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4fre" + "e.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); for (User current : selectedDelete) { String sql = "UPDATE USER SET attempt = '99' WHERE username = '" + current.get_username() + "';"; stmt.executeUpdate(sql); mainList.remove(current); } } catch (Exception ignored) { } } /** * Deletes user from the application database. Removes them from the list of users shown in the view. * @param selectedDelete a list of users to delete * @param mainList a list of all users that could be detleted */ public void deleteUsers(Iterable<User> selectedDelete, Collection<User> mainList) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://d" + "b4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); for (User current : selectedDelete) { String sql = "DELETE FROM USER WHERE username = '" + current.get_username() + "';"; stmt.executeUpdate(sql); mainList.remove(current); } } catch (Exception ignored) { } } /** * Creates the specified user in the database. * * @param username the user's desired username * @param password the user's desired password * @param fullName the user's full name * @param userType the user's user type */ public void createUser(String username, String password, String fullName, UserTypeEnum userType) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "INSERT INTO `USER` (`username`, `password`, `fullname`, `ban`, " + "`attempt`, `type`) VALUES ('" + username + "', '" + password + "', '" + fullName + "', '0', '0', '" + userType.toString() +"')"; stmt.executeUpdate(sql); } catch (Exception e) { e.printStackTrace(); } } /** * Accesses the database to get any users with the inputted username. * @param username a string of a username that a user wants to have * @return the users with the specified username */ public ResultSet getUsersWithUsername(String username) { Connection conn; Statement stmt; try { Class.forName("com.mysql.jdbc.Driver"); conn = DriverManager.getConnection("jdbc:mysql://db4free.net:3306/bitsplease", "bitsplease", "bitsplease"); stmt = conn.createStatement(); String sql = "SELECT username FROM USER WHERE username = '" + username + "'"; return stmt.executeQuery(sql); } catch (Exception e) { e.printStackTrace(); return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.pig; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import java.util.Iterator; import org.apache.cassandra.cli.CliMain; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.thrift.AuthenticationException; import org.apache.cassandra.thrift.AuthorizationException; import org.apache.cassandra.thrift.Cassandra; import org.apache.cassandra.thrift.ColumnOrSuperColumn; import org.apache.cassandra.thrift.ColumnParent; import org.apache.cassandra.thrift.ColumnPath; import org.apache.cassandra.thrift.ConsistencyLevel; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.NotFoundException; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.UnavailableException; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.Tuple; import org.apache.thrift.TException; import org.apache.thrift.transport.TTransportException; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class ThriftColumnFamilyTest extends PigTestBase { private static String[] statements = { "create keyspace thriftKs with placement_strategy = 'org.apache.cassandra.locator.SimpleStrategy' and" + " strategy_options={replication_factor:1};", "use thriftKs;", "create column family SomeApp " + " with comparator = UTF8Type " + " and default_validation_class = UTF8Type " + " and key_validation_class = UTF8Type " + " and column_metadata = [{column_name: name, validation_class: UTF8Type, index_type: KEYS}, " + "{column_name: vote_type, validation_class: UTF8Type}, " + "{column_name: rating, validation_class: Int32Type}, " + "{column_name: score, validation_class: LongType}, " + "{column_name: percent, validation_class: FloatType}, " + "{column_name: atomic_weight, validation_class: DoubleType}, " + "{column_name: created, validation_class: DateType},]; ", "create column family CopyOfSomeApp " + "with key_validation_class = UTF8Type " + "and default_validation_class = UTF8Type " + "and comparator = UTF8Type " + "and column_metadata = " + "[ " + "{column_name: name, validation_class: UTF8Type, index_type: KEYS}, " + "{column_name: vote_type, validation_class: UTF8Type}, " + "{column_name: rating, validation_class: Int32Type}, " + "{column_name: score, validation_class: LongType}, " + "{column_name: percent, validation_class: FloatType}, " + "{column_name: atomic_weight, validation_class: DoubleType}, " + "{column_name: created, validation_class: DateType}, " + "];", "set SomeApp['foo']['name'] = 'User Foo';", "set SomeApp['foo']['vote_type'] = 'like';", "set SomeApp['foo']['rating'] = 8;", "set SomeApp['foo']['score'] = 125000;", "set SomeApp['foo']['percent'] = '85.0';", "set SomeApp['foo']['atomic_weight'] = '2.7182818284590451';", "set SomeApp['foo']['created'] = 1335890877;", "set SomeApp['bar']['name'] = 'User Bar';", "set SomeApp['bar']['vote_type'] = 'like';", "set SomeApp['bar']['rating'] = 9;", "set SomeApp['bar']['score'] = 15000;", "set SomeApp['bar']['percent'] = '35.0';", "set SomeApp['bar']['atomic_weight'] = '3.1415926535897931';", "set SomeApp['bar']['created'] = 1335890877;", "set SomeApp['baz']['name'] = 'User Baz';", "set SomeApp['baz']['vote_type'] = 'dislike';", "set SomeApp['baz']['rating'] = 3;", "set SomeApp['baz']['score'] = 512000;", "set SomeApp['baz']['percent'] = '95.3';", "set SomeApp['baz']['atomic_weight'] = '1.61803399';", "set SomeApp['baz']['created'] = 1335890877;", "set SomeApp['baz']['extra1'] = 'extra1';", "set SomeApp['baz']['extra2'] = 'extra2';", "set SomeApp['baz']['extra3'] = 'extra3';", "set SomeApp['qux']['name'] = 'User Qux';", "set SomeApp['qux']['vote_type'] = 'dislike';", "set SomeApp['qux']['rating'] = 2;", "set SomeApp['qux']['score'] = 12000;", "set SomeApp['qux']['percent'] = '64.7';", "set SomeApp['qux']['atomic_weight'] = '0.660161815846869';", "set SomeApp['qux']['created'] = 1335890877;", "set SomeApp['qux']['extra1'] = 'extra1';", "set SomeApp['qux']['extra2'] = 'extra2';", "set SomeApp['qux']['extra3'] = 'extra3';", "set SomeApp['qux']['extra4'] = 'extra4';", "set SomeApp['qux']['extra5'] = 'extra5';", "set SomeApp['qux']['extra6'] = 'extra6';", "set SomeApp['qux']['extra7'] = 'extra7';", "create column family U8 with " + "key_validation_class = UTF8Type and " + "comparator = UTF8Type;", "create column family Bytes with " + "key_validation_class = BytesType and " + "comparator = UTF8Type;", "set U8['foo']['x'] = ascii('Z');", "set Bytes[ascii('foo')]['x'] = ascii('Z');", "create column family CC with " + "key_validation_class = UTF8Type and " + "default_validation_class=CounterColumnType " + "and comparator=UTF8Type;", "incr CC['chuck']['kick'];", "incr CC['chuck']['kick'];", "incr CC['chuck']['kick'];", "incr CC['chuck']['fist'];", "create column family Compo " + "with key_validation_class = UTF8Type " + "and default_validation_class = UTF8Type " + "and comparator = 'CompositeType(UTF8Type,UTF8Type)';", "set Compo['punch']['bruce:lee'] = 'ouch';", "set Compo['punch']['bruce:bruce'] = 'hunh?';", "set Compo['kick']['bruce:lee'] = 'oww';", "set Compo['kick']['bruce:bruce'] = 'watch it, mate';", "create column family CompoInt " + "with key_validation_class = UTF8Type " + "and default_validation_class = UTF8Type " + "and comparator = 'CompositeType(LongType,LongType)';", "set CompoInt['clock']['1:0'] = 'z';", "set CompoInt['clock']['1:30'] = 'zzzz';", "set CompoInt['clock']['2:30'] = 'daddy?';", "set CompoInt['clock']['6:30'] = 'coffee...';", "create column family CompoIntCopy " + "with key_validation_class = UTF8Type " + "and default_validation_class = UTF8Type " + "and comparator = 'CompositeType(LongType,LongType)';", "create column family CompoKey " + "with key_validation_class = 'CompositeType(UTF8Type,LongType)' " + "and default_validation_class = UTF8Type " + "and comparator = LongType;", "set CompoKey['clock:10']['1'] = 'z';", "set CompoKey['clock:20']['1'] = 'zzzz';", "set CompoKey['clock:30']['2'] = 'daddy?';", "set CompoKey['clock:40']['6'] = 'coffee...';", "create column family CompoKeyCopy " + "with key_validation_class = 'CompositeType(UTF8Type,LongType)' " + "and default_validation_class = UTF8Type " + "and comparator = LongType;" }; @BeforeClass public static void setup() throws TTransportException, IOException, InterruptedException, ConfigurationException, AuthenticationException, AuthorizationException, InvalidRequestException, UnavailableException, TimedOutException, TException, NotFoundException, CharacterCodingException, ClassNotFoundException, NoSuchFieldException, IllegalAccessException, InstantiationException { startCassandra(); setupDataByCli(statements); startHadoopCluster(); } @Test public void testCqlStorage() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { //regular thrift column families pig.registerQuery("data = load 'cql://thriftKs/SomeApp?" + defaultParameters + "' using CqlStorage();"); //(bar,3.141592653589793,1335890877,User Bar,35.0,9,15000,like) //(baz,1.61803399,1335890877,User Baz,95.3,3,512000,dislike) //(foo,2.718281828459045,1335890877,User Foo,85.0,8,125000,like) //(qux,0.660161815846869,1335890877,User Qux,64.7,2,12000,dislike) //{key: chararray,atomic_weight: double,created: long,name: chararray,percent: float,rating: int,score: long,vote_type: chararray} Iterator<Tuple> it = pig.openIterator("data"); int count = 0; while (it.hasNext()) { count ++; Tuple t = it.next(); if (count == 1) { Assert.assertEquals(t.get(0), "bar"); Assert.assertEquals(t.get(1), 3.141592653589793d); Assert.assertEquals(t.get(3), "User Bar"); Assert.assertEquals(t.get(4), 35.0f); Assert.assertEquals(t.get(5), 9); Assert.assertEquals(t.get(6), 15000L); Assert.assertEquals(t.get(7), "like"); } else if (count == 2) { Assert.assertEquals(t.get(0), "baz"); Assert.assertEquals(t.get(1), 1.61803399d); Assert.assertEquals(t.get(3), "User Baz"); Assert.assertEquals(t.get(4), 95.3f); Assert.assertEquals(t.get(5), 3); Assert.assertEquals(t.get(6), 512000L); Assert.assertEquals(t.get(7), "dislike"); }else if (count == 3) { Assert.assertEquals(t.get(0), "foo"); Assert.assertEquals(t.get(1), 2.718281828459045d); Assert.assertEquals(t.get(3), "User Foo"); Assert.assertEquals(t.get(4), 85.0f); Assert.assertEquals(t.get(5), 8); Assert.assertEquals(t.get(6), 125000L); Assert.assertEquals(t.get(7), "like"); } else if (count == 4) { Assert.assertEquals(t.get(0), "qux"); Assert.assertEquals(t.get(1), 0.660161815846869d); Assert.assertEquals(t.get(3), "User Qux"); Assert.assertEquals(t.get(4), 64.7f); Assert.assertEquals(t.get(5), 2); Assert.assertEquals(t.get(6), 12000L); Assert.assertEquals(t.get(7), "dislike"); } } Assert.assertEquals(count, 4); //Test counter colun family pig.registerQuery("cc_data = load 'cql://thriftKs/CC?" + defaultParameters + "' using CqlStorage();"); //(chuck,fist,1) //(chuck,kick,3) // {key: chararray,column1: chararray,value: long} it = pig.openIterator("cc_data"); count = 0; while (it.hasNext()) { count ++; Tuple t = it.next(); if (count == 1) { Assert.assertEquals(t.get(0), "chuck"); Assert.assertEquals(t.get(1), "fist"); Assert.assertEquals(t.get(2), 1L); } else if (count == 2) { Assert.assertEquals(t.get(0), "chuck"); Assert.assertEquals(t.get(1), "kick"); Assert.assertEquals(t.get(2), 3L); } } Assert.assertEquals(count, 2); //Test composite column family pig.registerQuery("compo_data = load 'cql://thriftKs/Compo?" + defaultParameters + "' using CqlStorage();"); //(kick,bruce,bruce,watch it, mate) //(kick,bruce,lee,oww) //(punch,bruce,bruce,hunh?) //(punch,bruce,lee,ouch) //{key: chararray,column1: chararray,column2: chararray,value: chararray} it = pig.openIterator("compo_data"); count = 0; while (it.hasNext()) { count ++; Tuple t = it.next(); if (count == 1) { Assert.assertEquals(t.get(0), "kick"); Assert.assertEquals(t.get(1), "bruce"); Assert.assertEquals(t.get(2), "bruce"); Assert.assertEquals(t.get(3), "watch it, mate"); } else if (count == 2) { Assert.assertEquals(t.get(0), "kick"); Assert.assertEquals(t.get(1), "bruce"); Assert.assertEquals(t.get(2), "lee"); Assert.assertEquals(t.get(3), "oww"); } else if (count == 3) { Assert.assertEquals(t.get(0), "punch"); Assert.assertEquals(t.get(1), "bruce"); Assert.assertEquals(t.get(2), "bruce"); Assert.assertEquals(t.get(3), "hunh?"); } else if (count == 4) { Assert.assertEquals(t.get(0), "punch"); Assert.assertEquals(t.get(1), "bruce"); Assert.assertEquals(t.get(2), "lee"); Assert.assertEquals(t.get(3), "ouch"); } } Assert.assertEquals(count, 4); } @Test public void testCassandraStorageSchema() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException { //results: (qux,(atomic_weight,0.660161815846869),(created,1335890877),(name,User Qux),(percent,64.7), //(rating,2),(score,12000),(vote_type,dislike),{(extra1,extra1), //(extra2,extra2),(extra3,extra3), //(extra4,extra4),(extra5,extra5), //(extra6,extra6),(extra7,extra7)}) pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //schema: {key: chararray,atomic_weight: (name: chararray,value: double),created: (name: chararray,value: long), //name: (name: chararray,value: chararray),percent: (name: chararray,value: float), //rating: (name: chararray,value: int),score: (name: chararray,value: long), //vote_type: (name: chararray,value: chararray),columns: {(name: chararray,value: chararray)}} Iterator<Tuple> it = pig.openIterator("rows"); int count = 0; if (it.hasNext()) { Tuple t = it.next(); String rowKey = t.get(0).toString(); if ("qux".equals(rowKey)) { Tuple column = (Tuple) t.get(1); Assert.assertEquals(column.get(0), "atomic_weight"); Assert.assertEquals(column.get(1), 0.660161815846869d); column = (Tuple) t.get(3); Assert.assertEquals(column.get(0), "name"); Assert.assertEquals(column.get(1), "User Qux"); column = (Tuple) t.get(4); Assert.assertEquals(column.get(0), "percent"); Assert.assertEquals(column.get(1), 64.7f); column = (Tuple) t.get(5); Assert.assertEquals(column.get(0), "rating"); Assert.assertEquals(column.get(1), 2); column = (Tuple) t.get(6); Assert.assertEquals(column.get(0), "score"); Assert.assertEquals(column.get(1), 12000L); column = (Tuple) t.get(7); Assert.assertEquals(column.get(0), "vote_type"); Assert.assertEquals(column.get(1), "dislike"); DataBag columns = (DataBag) t.get(8); Iterator<Tuple> iter = columns.iterator(); int i = 0; while(iter.hasNext()) { i++; column = iter.next(); Assert.assertEquals(column.get(0), "extra"+i); } Assert.assertEquals(7, columns.size()); } } } @Test public void testCassandraStorageFullCopy() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { createColumnFamily("thriftKs", "CopyOfSomeApp", statements[3]); pig.setBatchOn(); pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //full copy pig.registerQuery("STORE rows INTO 'cassandra://thriftKs/CopyOfSomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.executeBatch(); Assert.assertEquals("User Qux", getColumnValue("thriftKs", "CopyOfSomeApp", "name", "qux", "UTF8Type")); Assert.assertEquals("dislike", getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "qux", "UTF8Type")); Assert.assertEquals("64.7", getColumnValue("thriftKs", "CopyOfSomeApp", "percent", "qux", "FloatType")); } @Test public void testCassandraStorageSigleTupleCopy() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { createColumnFamily("thriftKs", "CopyOfSomeApp", statements[3]); pig.setBatchOn(); pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //sigle tuple pig.registerQuery("onecol = FOREACH rows GENERATE key, percent;"); pig.registerQuery("STORE onecol INTO 'cassandra://thriftKs/CopyOfSomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.executeBatch(); String value = null; try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "name", "qux", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "qux", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); Assert.assertEquals("64.7", getColumnValue("thriftKs", "CopyOfSomeApp", "percent", "qux", "FloatType")); } @Test public void testCassandraStorageBagOnlyCopy() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { createColumnFamily("thriftKs", "CopyOfSomeApp", statements[3]); pig.setBatchOn(); pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //bag only pig.registerQuery("other = FOREACH rows GENERATE key, columns;"); pig.registerQuery("STORE other INTO 'cassandra://thriftKs/CopyOfSomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.executeBatch(); String value = null; try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "name", "qux", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "qux", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "percent", "qux", "FloatType"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); Assert.assertEquals("extra1", getColumnValue("thriftKs", "CopyOfSomeApp", "extra1", "qux", "UTF8Type")); } @Test public void testCassandraStorageFilter() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { createColumnFamily("thriftKs", "CopyOfSomeApp", statements[3]); pig.setBatchOn(); pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //filter pig.registerQuery("likes = FILTER rows by vote_type.value eq 'like' and rating.value > 5;"); pig.registerQuery("STORE likes INTO 'cassandra://thriftKs/CopyOfSomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.executeBatch(); Assert.assertEquals("like", getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "bar", "UTF8Type")); Assert.assertEquals("like", getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "foo", "UTF8Type")); String value = null; try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "qux", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "baz", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); createColumnFamily("thriftKs", "CopyOfSomeApp", statements[3]); pig.setBatchOn(); pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.registerQuery("dislikes_extras = FILTER rows by vote_type.value eq 'dislike' AND COUNT(columns) > 0;"); pig.registerQuery("STORE dislikes_extras INTO 'cassandra://thriftKs/CopyOfSomeApp?" + defaultParameters + "' USING CassandraStorage();"); pig.registerQuery("visible = FILTER rows BY COUNT(columns) == 0;"); pig.executeBatch(); Assert.assertEquals("dislike", getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "baz", "UTF8Type")); Assert.assertEquals("dislike", getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "qux", "UTF8Type")); value = null; try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "bar", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); try { value = getColumnValue("thriftKs", "CopyOfSomeApp", "vote_type", "foo", "UTF8Type"); } catch (NotFoundException e) { Assert.assertTrue(true); } if (value != null) Assert.fail(); } @Test public void testCassandraStorageJoin() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { //test key types with a join pig.registerQuery("U8 = load 'cassandra://thriftKs/U8?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("Bytes = load 'cassandra://thriftKs/Bytes?" + defaultParameters + "' using CassandraStorage();"); //cast key to chararray pig.registerQuery("b = foreach Bytes generate (chararray)key, columns;"); //key in Bytes is a bytearray, U8 chararray //(foo,{(x,Z)},foo,{(x,Z)}) pig.registerQuery("a = join Bytes by key, U8 by key;"); Iterator<Tuple> it = pig.openIterator("a"); if (it.hasNext()) { Tuple t = it.next(); Assert.assertEquals(t.get(0), new DataByteArray("foo".getBytes())); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), "x"); Assert.assertEquals(t1.get(1), new DataByteArray("Z".getBytes())); String column = (String) t.get(2); Assert.assertEquals(column, "foo"); columns = (DataBag) t.get(3); iter = columns.iterator(); Tuple t2 = iter.next(); Assert.assertEquals(t2.get(0), "x"); Assert.assertEquals(t2.get(1), new DataByteArray("Z".getBytes())); } //key should now be cast into a chararray //(foo,{(x,Z)},foo,{(x,Z)}) pig.registerQuery("c = join b by (chararray)key, U8 by (chararray)key;"); it = pig.openIterator("c"); if (it.hasNext()) { Tuple t = it.next(); Assert.assertEquals(t.get(0), "foo"); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), "x"); Assert.assertEquals(t1.get(1), new DataByteArray("Z".getBytes())); String column = (String) t.get(2); Assert.assertEquals(column, "foo"); columns = (DataBag) t.get(3); iter = columns.iterator(); Tuple t2 = iter.next(); Assert.assertEquals(t2.get(0), "x"); Assert.assertEquals(t2.get(1), new DataByteArray("Z".getBytes())); } } @Test public void testCassandraStorageCounterCF() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { pig.registerQuery("rows = LOAD 'cassandra://thriftKs/SomeApp?" + defaultParameters + "' USING CassandraStorage();"); //Test counter column family support pig.registerQuery("CC = load 'cassandra://thriftKs/CC?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("total_hits = foreach CC generate key, SUM(columns.value);"); //(chuck,4) Iterator<Tuple> it = pig.openIterator("total_hits"); if (it.hasNext()) { Tuple t = it.next(); Assert.assertEquals(t.get(0), "chuck"); Assert.assertEquals(t.get(1), 4l); } } @Test public void testCassandraStorageCompositeColumnCF() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { //Test CompositeType pig.registerQuery("compo = load 'cassandra://thriftKs/Compo?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("compo = foreach compo generate key as method, flatten(columns);"); pig.registerQuery("lee = filter compo by columns::name == ('bruce','lee');"); //(kick,(bruce,lee),oww) //(punch,(bruce,lee),ouch) Iterator<Tuple> it = pig.openIterator("lee"); int count = 0; while (it.hasNext()) { count ++; Tuple t = it.next(); if (count == 1) Assert.assertEquals(t.get(0), "kick"); else Assert.assertEquals(t.get(0), "punch"); Tuple t1 = (Tuple) t.get(1); Assert.assertEquals(t1.get(0), "bruce"); Assert.assertEquals(t1.get(1), "lee"); if (count == 1) Assert.assertEquals(t.get(2), "oww"); else Assert.assertEquals(t.get(2), "ouch"); } Assert.assertEquals(count, 2); pig.registerQuery("night = load 'cassandra://thriftKs/CompoInt?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("night = foreach night generate flatten(columns);"); pig.registerQuery("night = foreach night generate (int)columns::name.$0+(double)columns::name.$1/60 as hour, columns::value as noise;"); //What happens at the darkest hour? pig.registerQuery("darkest = filter night by hour > 2 and hour < 5;"); //(2.5,daddy?) it = pig.openIterator("darkest"); if (it.hasNext()) { Tuple t = it.next(); Assert.assertEquals(t.get(0), 2.5d); Assert.assertEquals(t.get(1), "daddy?"); } pig.setBatchOn(); pig.registerQuery("compo_int_rows = LOAD 'cassandra://thriftKs/CompoInt?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("STORE compo_int_rows INTO 'cassandra://thriftKs/CompoIntCopy?" + defaultParameters + "' using CassandraStorage();"); pig.executeBatch(); pig.registerQuery("compocopy_int_rows = LOAD 'cassandra://thriftKs/CompoIntCopy?" + defaultParameters + "' using CassandraStorage();"); //(clock,{((1,0),z),((1,30),zzzz),((2,30),daddy?),((6,30),coffee...)}) it = pig.openIterator("compocopy_int_rows"); count = 0; if (it.hasNext()) { Tuple t = it.next(); Assert.assertEquals(t.get(0), "clock"); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); while (iter.hasNext()) { count ++; Tuple t1 = iter.next(); Tuple inner = (Tuple) t1.get(0); if (count == 1) { Assert.assertEquals(inner.get(0), 1L); Assert.assertEquals(inner.get(1), 0L); Assert.assertEquals(t1.get(1), "z"); } else if (count == 2) { Assert.assertEquals(inner.get(0), 1L); Assert.assertEquals(inner.get(1), 30L); Assert.assertEquals(t1.get(1), "zzzz"); } else if (count == 3) { Assert.assertEquals(inner.get(0), 2L); Assert.assertEquals(inner.get(1), 30L); Assert.assertEquals(t1.get(1), "daddy?"); } else if (count == 4) { Assert.assertEquals(inner.get(0), 6L); Assert.assertEquals(inner.get(1), 30L); Assert.assertEquals(t1.get(1), "coffee..."); } } Assert.assertEquals(count, 4); } } @Test public void testCassandraStorageCompositeKeyCF() throws IOException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException, AuthenticationException, AuthorizationException { //Test CompositeKey pig.registerQuery("compokeys = load 'cassandra://thriftKs/CompoKey?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("compokeys = filter compokeys by key.$1 == 40;"); //((clock,40),{(6,coffee...)}) Iterator<Tuple> it = pig.openIterator("compokeys"); if (it.hasNext()) { Tuple t = it.next(); Tuple key = (Tuple) t.get(0); Assert.assertEquals(key.get(0), "clock"); Assert.assertEquals(key.get(1), 40L); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); if (iter.hasNext()) { Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), 6L); Assert.assertEquals(t1.get(1), "coffee..."); } } pig.setBatchOn(); pig.registerQuery("compo_key_rows = LOAD 'cassandra://thriftKs/CompoKey?" + defaultParameters + "' using CassandraStorage();"); pig.registerQuery("STORE compo_key_rows INTO 'cassandra://thriftKs/CompoKeyCopy?" + defaultParameters + "' using CassandraStorage();"); pig.executeBatch(); pig.registerQuery("compo_key_copy_rows = LOAD 'cassandra://thriftKs/CompoKeyCopy?" + defaultParameters + "' using CassandraStorage();"); //((clock,10),{(1,z)}) //((clock,20),{(1,zzzz)}) //((clock,30),{(2,daddy?)}) //((clock,40),{(6,coffee...)}) it = pig.openIterator("compo_key_copy_rows"); int count = 0; while (it.hasNext()) { Tuple t = it.next(); count ++; if (count == 1) { Tuple key = (Tuple) t.get(0); Assert.assertEquals(key.get(0), "clock"); Assert.assertEquals(key.get(1), 10L); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); if (iter.hasNext()) { Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), 1L); Assert.assertEquals(t1.get(1), "z"); } } else if (count == 2) { Tuple key = (Tuple) t.get(0); Assert.assertEquals(key.get(0), "clock"); Assert.assertEquals(key.get(1), 20L); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); if (iter.hasNext()) { Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), 1L); Assert.assertEquals(t1.get(1), "zzzz"); } } else if (count == 3) { Tuple key = (Tuple) t.get(0); Assert.assertEquals(key.get(0), "clock"); Assert.assertEquals(key.get(1), 30L); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); if (iter.hasNext()) { Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), 2L); Assert.assertEquals(t1.get(1), "daddy?"); } } else if (count == 4) { Tuple key = (Tuple) t.get(0); Assert.assertEquals(key.get(0), "clock"); Assert.assertEquals(key.get(1), 40L); DataBag columns = (DataBag) t.get(1); Iterator<Tuple> iter = columns.iterator(); if (iter.hasNext()) { Tuple t1 = iter.next(); Assert.assertEquals(t1.get(0), 6L); Assert.assertEquals(t1.get(1), "coffee..."); } } } Assert.assertEquals(count, 4); } private String getColumnValue(String ks, String cf, String colName, String key, String validator) throws AuthenticationException, AuthorizationException, InvalidRequestException, UnavailableException, TimedOutException, TException, NotFoundException, IOException { Cassandra.Client client = getClient(); client.set_keyspace(ks); ByteBuffer key_user_id = ByteBufferUtil.bytes(key); long timestamp = System.currentTimeMillis(); ColumnPath cp = new ColumnPath(cf); ColumnParent par = new ColumnParent(cf); cp.column = ByteBufferUtil.bytes(colName); // read ColumnOrSuperColumn got = client.get(key_user_id, cp, ConsistencyLevel.ONE); return parseType(validator).getString(got.getColumn().value); } private void createColumnFamily(String ks, String cf, String statement) throws CharacterCodingException, ClassNotFoundException, TException, TimedOutException, NotFoundException, InvalidRequestException, NoSuchFieldException, UnavailableException, IllegalAccessException, InstantiationException { CliMain.connect("127.0.0.1", 9170); try { CliMain.processStatement("use " + ks + ";"); CliMain.processStatement("drop column family " + cf + ";"); } catch (Exception e) { } CliMain.processStatement(statement); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.datamigration.v1beta1; /** * CloudDatabaseMigrationService request. * * @since 1.3 */ @SuppressWarnings("javadoc") public abstract class CloudDatabaseMigrationServiceRequest<T> extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest<T> { /** * @param client Google client * @param method HTTP Method * @param uriTemplate URI template for the path relative to the base URL. If it starts with a "/" * the base path from the base URL will be stripped out. The URI template can also be a * full URL. URI template expansion is done using * {@link com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)} * @param content A POJO that can be serialized into JSON or {@code null} for none * @param responseClass response class to parse into */ public CloudDatabaseMigrationServiceRequest( CloudDatabaseMigrationService client, String method, String uriTemplate, Object content, Class<T> responseClass) { super( client, method, uriTemplate, content, responseClass); } /** V1 error format. */ @com.google.api.client.util.Key("$.xgafv") private java.lang.String $Xgafv; /** * V1 error format. */ public java.lang.String get$Xgafv() { return $Xgafv; } /** V1 error format. */ public CloudDatabaseMigrationServiceRequest<T> set$Xgafv(java.lang.String $Xgafv) { this.$Xgafv = $Xgafv; return this; } /** OAuth access token. */ @com.google.api.client.util.Key("access_token") private java.lang.String accessToken; /** * OAuth access token. */ public java.lang.String getAccessToken() { return accessToken; } /** OAuth access token. */ public CloudDatabaseMigrationServiceRequest<T> setAccessToken(java.lang.String accessToken) { this.accessToken = accessToken; return this; } /** Data format for response. */ @com.google.api.client.util.Key private java.lang.String alt; /** * Data format for response. [default: json] */ public java.lang.String getAlt() { return alt; } /** Data format for response. */ public CloudDatabaseMigrationServiceRequest<T> setAlt(java.lang.String alt) { this.alt = alt; return this; } /** JSONP */ @com.google.api.client.util.Key private java.lang.String callback; /** * JSONP */ public java.lang.String getCallback() { return callback; } /** JSONP */ public CloudDatabaseMigrationServiceRequest<T> setCallback(java.lang.String callback) { this.callback = callback; return this; } /** Selector specifying which fields to include in a partial response. */ @com.google.api.client.util.Key private java.lang.String fields; /** * Selector specifying which fields to include in a partial response. */ public java.lang.String getFields() { return fields; } /** Selector specifying which fields to include in a partial response. */ public CloudDatabaseMigrationServiceRequest<T> setFields(java.lang.String fields) { this.fields = fields; return this; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ @com.google.api.client.util.Key private java.lang.String key; /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public java.lang.String getKey() { return key; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public CloudDatabaseMigrationServiceRequest<T> setKey(java.lang.String key) { this.key = key; return this; } /** OAuth 2.0 token for the current user. */ @com.google.api.client.util.Key("oauth_token") private java.lang.String oauthToken; /** * OAuth 2.0 token for the current user. */ public java.lang.String getOauthToken() { return oauthToken; } /** OAuth 2.0 token for the current user. */ public CloudDatabaseMigrationServiceRequest<T> setOauthToken(java.lang.String oauthToken) { this.oauthToken = oauthToken; return this; } /** Returns response with indentations and line breaks. */ @com.google.api.client.util.Key private java.lang.Boolean prettyPrint; /** * Returns response with indentations and line breaks. [default: true] */ public java.lang.Boolean getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public CloudDatabaseMigrationServiceRequest<T> setPrettyPrint(java.lang.Boolean prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ @com.google.api.client.util.Key private java.lang.String quotaUser; /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ public java.lang.String getQuotaUser() { return quotaUser; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ public CloudDatabaseMigrationServiceRequest<T> setQuotaUser(java.lang.String quotaUser) { this.quotaUser = quotaUser; return this; } /** Legacy upload protocol for media (e.g. "media", "multipart"). */ @com.google.api.client.util.Key private java.lang.String uploadType; /** * Legacy upload protocol for media (e.g. "media", "multipart"). */ public java.lang.String getUploadType() { return uploadType; } /** Legacy upload protocol for media (e.g. "media", "multipart"). */ public CloudDatabaseMigrationServiceRequest<T> setUploadType(java.lang.String uploadType) { this.uploadType = uploadType; return this; } /** Upload protocol for media (e.g. "raw", "multipart"). */ @com.google.api.client.util.Key("upload_protocol") private java.lang.String uploadProtocol; /** * Upload protocol for media (e.g. "raw", "multipart"). */ public java.lang.String getUploadProtocol() { return uploadProtocol; } /** Upload protocol for media (e.g. "raw", "multipart"). */ public CloudDatabaseMigrationServiceRequest<T> setUploadProtocol(java.lang.String uploadProtocol) { this.uploadProtocol = uploadProtocol; return this; } @Override public final CloudDatabaseMigrationService getAbstractGoogleClient() { return (CloudDatabaseMigrationService) super.getAbstractGoogleClient(); } @Override public CloudDatabaseMigrationServiceRequest<T> setDisableGZipContent(boolean disableGZipContent) { return (CloudDatabaseMigrationServiceRequest<T>) super.setDisableGZipContent(disableGZipContent); } @Override public CloudDatabaseMigrationServiceRequest<T> setRequestHeaders(com.google.api.client.http.HttpHeaders headers) { return (CloudDatabaseMigrationServiceRequest<T>) super.setRequestHeaders(headers); } @Override public CloudDatabaseMigrationServiceRequest<T> set(String parameterName, Object value) { return (CloudDatabaseMigrationServiceRequest<T>) super.set(parameterName, value); } }
/* * Copyright 2005 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.rule; import org.drools.core.WorkingMemory; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.reteoo.LeftTuple; import org.drools.core.spi.AcceptsReadAccessor; import org.drools.core.spi.CompiledInvoker; import org.drools.core.spi.Evaluator; import org.drools.core.spi.FieldValue; import org.drools.core.spi.InternalReadAccessor; import org.drools.core.spi.ReadAccessor; import org.drools.core.spi.Restriction; import org.drools.core.spi.ReturnValueExpression; import org.drools.core.spi.ReturnValueExpression.SafeReturnValueExpression; import org.drools.core.spi.Tuple; import org.drools.core.spi.Wireable; import org.kie.internal.security.KiePolicyHelper; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; public class ReturnValueRestriction implements Restriction, AcceptsReadAccessor, Wireable { private static final long serialVersionUID = 510l; private ReturnValueExpression expression; private String[] requiredGlobals; private Declaration[] requiredDeclarations; private Declaration[] previousDeclarations; private Declaration[] localDeclarations; private Evaluator evaluator; private InternalReadAccessor readAccessor; private static final Declaration[] noRequiredDeclarations = new Declaration[]{}; private static final String[] noRequiredGlobals = new String[]{}; private List<ReturnValueRestriction> cloned = Collections.<ReturnValueRestriction> emptyList(); public ReturnValueRestriction() { } public ReturnValueRestriction(final InternalReadAccessor fieldExtractor, final Declaration[] previousDeclarations, final Declaration[] localDeclarations, final String[] requiredGlobals, final Evaluator evaluator) { this( fieldExtractor, null, previousDeclarations, localDeclarations, requiredGlobals, evaluator ); } public ReturnValueRestriction(final InternalReadAccessor fieldExtractor, final ReturnValueExpression returnValueExpression, final Declaration[] previousDeclarations, final Declaration[] localDeclarations, final String[] requiredGlobals, final Evaluator evaluator) { this.expression = returnValueExpression; this.readAccessor = fieldExtractor; if ( previousDeclarations != null ) { this.previousDeclarations = previousDeclarations; } else { this.previousDeclarations = ReturnValueRestriction.noRequiredDeclarations; } if ( localDeclarations != null ) { this.localDeclarations = localDeclarations; } else { this.localDeclarations = ReturnValueRestriction.noRequiredDeclarations; } if ( requiredGlobals != null ) { this.requiredGlobals = requiredGlobals; } else { this.requiredGlobals = ReturnValueRestriction.noRequiredGlobals; } this.evaluator = evaluator; this.requiredDeclarations = new Declaration[this.previousDeclarations.length + this.localDeclarations.length]; System.arraycopy( this.previousDeclarations, 0, this.requiredDeclarations, 0, this.previousDeclarations.length ); System.arraycopy( this.localDeclarations, 0, this.requiredDeclarations, this.previousDeclarations.length, this.localDeclarations.length ); } @SuppressWarnings("unchecked") public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { expression = (ReturnValueExpression) in.readObject(); requiredGlobals = (String[]) in.readObject(); requiredDeclarations = (Declaration[]) in.readObject(); previousDeclarations = (Declaration[]) in.readObject(); localDeclarations = (Declaration[]) in.readObject(); evaluator = (Evaluator) in.readObject(); readAccessor = (InternalReadAccessor) in.readObject(); this.cloned = (List<ReturnValueRestriction>) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { if ( this.expression instanceof CompiledInvoker ) { out.writeObject( null ); } else { out.writeObject( this.expression ); } out.writeObject( requiredGlobals ); out.writeObject( requiredDeclarations ); out.writeObject( previousDeclarations ); out.writeObject( localDeclarations ); out.writeObject( evaluator ); out.writeObject( readAccessor ); out.writeObject( this.cloned ); } public void setReadAccessor(InternalReadAccessor readAccessor) { this.readAccessor = readAccessor; } public Declaration[] getRequiredDeclarations() { return this.requiredDeclarations; } public Declaration[] getPreviousDeclarations() { return this.previousDeclarations; } public Declaration[] getLocalDeclarations() { return this.localDeclarations; } public String[] getRequiredGlobals() { return this.requiredGlobals; } public void replaceDeclaration(Declaration oldDecl, Declaration newDecl) { for ( int i = 0; i < this.requiredDeclarations.length; i++ ) { if ( this.requiredDeclarations[i].equals( oldDecl ) ) { this.requiredDeclarations[i] = newDecl; } } for ( int i = 0; i < this.previousDeclarations.length; i++ ) { if ( this.previousDeclarations[i].equals( oldDecl ) ) { this.previousDeclarations[i] = newDecl; } } for ( int i = 0; i < this.localDeclarations.length; i++ ) { if ( this.localDeclarations[i].equals( oldDecl ) ) { this.localDeclarations[i] = newDecl; } } this.expression.replaceDeclaration( oldDecl, newDecl ); } public void wire(Object object) { setReturnValueExpression( KiePolicyHelper.isPolicyEnabled() ? new SafeReturnValueExpression( (ReturnValueExpression) object ) : (ReturnValueExpression) object ); for ( ReturnValueRestriction clone : this.cloned ) { clone.wire( object ); } } public void setReturnValueExpression(final ReturnValueExpression expression) { this.expression = expression; } public ReturnValueExpression getExpression() { return this.expression; } public Evaluator getEvaluator() { return this.evaluator; } public boolean isTemporal() { return this.evaluator.isTemporal(); } public boolean isAllowed(final InternalReadAccessor readAccessor, final InternalFactHandle handle, final Tuple tuple, final WorkingMemory workingMemory, final ContextEntry context) { try { return this.evaluator.evaluate( (InternalWorkingMemory) workingMemory, this.readAccessor, handle, this.expression.evaluate( handle, tuple, this.previousDeclarations, this.localDeclarations, workingMemory, ((ReturnValueContextEntry) context).dialectContext ) ); } catch ( final Exception e ) { throw new RuntimeException( e ); } } public boolean isAllowed(final InternalReadAccessor extractor, final InternalFactHandle handle, final InternalWorkingMemory workingMemory, final ContextEntry context) { try { ReturnValueContextEntry ctx = (ReturnValueContextEntry) context; FieldValue value = this.expression.evaluate( handle, null, this.previousDeclarations, this.localDeclarations, workingMemory, ctx.dialectContext ); return this.evaluator.evaluate( workingMemory, this.readAccessor, handle, value ); } catch ( final Exception e ) { throw new RuntimeException( e ); } } public boolean isAllowedCachedLeft(final ContextEntry context, final InternalFactHandle handle) { try { ReturnValueContextEntry ctx = (ReturnValueContextEntry) context; FieldValue value = this.expression.evaluate( handle, ctx.tuple, this.previousDeclarations, this.localDeclarations, ctx.workingMemory, ctx.dialectContext ); return this.evaluator.evaluate( ctx.workingMemory, this.readAccessor, handle, value ); } catch ( final Exception e ) { throw new RuntimeException( e ); } } public boolean isAllowedCachedRight(final Tuple tuple, final ContextEntry context) { try { ReturnValueContextEntry ctx = (ReturnValueContextEntry) context; FieldValue value = this.expression.evaluate( ctx.handle, tuple, this.previousDeclarations, this.localDeclarations, ctx.workingMemory, ctx.dialectContext ); return this.evaluator.evaluate( ctx.workingMemory, this.readAccessor, ctx.handle, value ); } catch ( final Exception e ) { throw new RuntimeException( e ); } } public int hashCode() { final int PRIME = 31; int result = 1; result = PRIME * result + this.evaluator.hashCode(); result = PRIME * result + ((this.expression != null) ? this.expression.hashCode() : 0); result = PRIME * result + ReturnValueRestriction.hashCode( this.localDeclarations ); result = PRIME * result + ReturnValueRestriction.hashCode( this.previousDeclarations ); result = PRIME * result + ReturnValueRestriction.hashCode( this.requiredGlobals ); return result; } public boolean equals(final Object object) { if ( object == this ) { return true; } if ( object == null || object.getClass() != ReturnValueRestriction.class ) { return false; } final ReturnValueRestriction other = (ReturnValueRestriction) object; if ( this.localDeclarations.length != other.localDeclarations.length ) { return false; } if ( this.previousDeclarations.length != other.previousDeclarations.length ) { return false; } if ( this.requiredGlobals.length != other.requiredGlobals.length ) { return false; } if ( !Arrays.equals( this.localDeclarations, other.localDeclarations ) ) { return false; } if ( !Arrays.equals( this.previousDeclarations, other.previousDeclarations ) ) { return false; } return Arrays.equals(this.requiredGlobals, other.requiredGlobals) && this.evaluator.equals(other.evaluator) && this.expression.equals(other.expression); } private static int hashCode(final Object[] array) { final int PRIME = 31; if ( array == null ) { return 0; } int result = 1; for (Object anArray : array) { result = PRIME * result + (anArray == null ? 0 : anArray.hashCode()); } return result; } public ContextEntry createContextEntry() { ReturnValueContextEntry ctx = new ReturnValueContextEntry( this.readAccessor, this.previousDeclarations, this.localDeclarations ); ctx.dialectContext = this.expression.createContext(); return ctx; } public ReturnValueRestriction clone() { Declaration[] previous = new Declaration[this.previousDeclarations.length]; for ( int i = 0; i < previous.length; i++ ) { previous[i] = this.previousDeclarations[i].clone(); } Declaration[] local = new Declaration[this.localDeclarations.length]; for ( int i = 0; i < local.length; i++ ) { local[i] = this.localDeclarations[i].clone(); } ReturnValueRestriction clone = new ReturnValueRestriction( this.readAccessor, this.expression, previous, local, this.requiredGlobals, this.evaluator ); if ( this.cloned == Collections.EMPTY_LIST ) { this.cloned = new ArrayList<ReturnValueRestriction>( 1 ); } this.cloned.add( clone ); return clone; } public static class ReturnValueContextEntry implements ContextEntry { private static final long serialVersionUID = 510l; public ReadAccessor fieldExtractor; public InternalFactHandle handle; public Tuple tuple; public InternalWorkingMemory workingMemory; public Declaration[] previousDeclarations; public Declaration[] localDeclarations; private ContextEntry entry; public Object dialectContext; public ReturnValueContextEntry() { } public ReturnValueContextEntry(final ReadAccessor fieldExtractor, final Declaration[] previousDeclarations, final Declaration[] localDeclarations) { this.fieldExtractor = fieldExtractor; this.previousDeclarations = previousDeclarations; this.localDeclarations = localDeclarations; } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { fieldExtractor = (ReadAccessor) in.readObject(); handle = (InternalFactHandle) in.readObject(); tuple = (LeftTuple) in.readObject(); workingMemory = (InternalWorkingMemory) in.readObject(); previousDeclarations = (Declaration[]) in.readObject(); localDeclarations = (Declaration[]) in.readObject(); entry = (ContextEntry) in.readObject(); dialectContext = in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( fieldExtractor ); out.writeObject( handle ); out.writeObject( tuple ); out.writeObject( workingMemory ); out.writeObject( previousDeclarations ); out.writeObject( localDeclarations ); out.writeObject( entry ); out.writeObject( dialectContext ); } public ContextEntry getNext() { return this.entry; } public void setNext(final ContextEntry entry) { this.entry = entry; } public void updateFromFactHandle(final InternalWorkingMemory workingMemory, final InternalFactHandle handle) { this.workingMemory = workingMemory; this.handle = handle; } public void updateFromTuple(final InternalWorkingMemory workingMemory, final Tuple tuple) { this.workingMemory = workingMemory; this.tuple = tuple; } /* (non-Javadoc) * @see org.kie.rule.ReturnValueContextEntry#getFieldExtractor() */ public ReadAccessor getFieldExtractor() { return this.fieldExtractor; } /* (non-Javadoc) * @see org.kie.rule.ReturnValueContextEntry#getObject() */ public InternalFactHandle getHandle() { return this.handle; } /* (non-Javadoc) * @see org.kie.rule.ReturnValueContextEntry#getRequiredDeclarations() */ public Declaration[] getPreviousDeclarations() { return this.previousDeclarations; } public Declaration[] getLocalDeclarations() { return this.localDeclarations; } /* (non-Javadoc) * @see org.kie.rule.ReturnValueContextEntry#getWorkingMemory() */ public InternalWorkingMemory getWorkingMemory() { return this.workingMemory; } public void resetTuple() { this.tuple = null; } public void resetFactHandle() { this.handle = null; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ package fixtures.url; import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceResponse; import fixtures.url.models.ErrorException; import java.io.IOException; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in PathItems. */ public interface PathItems { /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getAllWithValues(String localStringPath, String pathItemStringPath) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getAllWithValuesAsync(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getAllWithValues(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getAllWithValuesAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @return the {@link ServiceResponse} object if successful. */ Observable<ServiceResponse<Void>> getAllWithValuesAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getGlobalQueryNull(String localStringPath, String pathItemStringPath) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getGlobalQueryNullAsync(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getGlobalQueryNull(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getGlobalQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value 'localStringQuery' * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @return the {@link ServiceResponse} object if successful. */ Observable<ServiceResponse<Void>> getGlobalQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery); /** * send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getGlobalAndLocalQueryNull(String localStringPath, String pathItemStringPath) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getGlobalAndLocalQueryNullAsync(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain null value * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getGlobalAndLocalQueryNull(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain null value * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getGlobalAndLocalQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain null value * @param pathItemStringQuery A string value 'pathItemStringQuery' that appears as a query parameter * @return the {@link ServiceResponse} object if successful. */ Observable<ServiceResponse<Void>> getGlobalAndLocalQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getLocalPathItemQueryNull(String localStringPath, String pathItemStringPath) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getLocalPathItemQueryNullAsync(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value null * @param pathItemStringQuery should contain value null * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getLocalPathItemQueryNull(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery) throws ErrorException, IOException, IllegalArgumentException; /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value null * @param pathItemStringQuery should contain value null * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ ServiceCall<Void> getLocalPathItemQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery, final ServiceCallback<Void> serviceCallback); /** * send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null. * * @param localStringPath should contain value 'localStringPath' * @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path * @param localStringQuery should contain value null * @param pathItemStringQuery should contain value null * @return the {@link ServiceResponse} object if successful. */ Observable<ServiceResponse<Void>> getLocalPathItemQueryNullAsync(String localStringPath, String pathItemStringPath, String localStringQuery, String pathItemStringQuery); }
/* * Copyright 2016-2017 The Sponge authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openksavi.sponge.groovy.core; import java.io.IOException; import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; import groovy.lang.Binding; import groovy.lang.GroovyObject; import groovy.lang.GroovyRuntimeException; import groovy.lang.GroovyShell; import groovy.lang.MetaMethod; import groovy.lang.MissingPropertyException; import groovy.lang.Script; import groovy.util.GroovyScriptEngine; import groovy.util.ResourceException; import groovy.util.ScriptException; import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.mutable.Mutable; import org.apache.commons.lang3.mutable.MutableObject; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.customizers.ImportCustomizer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.openksavi.sponge.SpongeException; import org.openksavi.sponge.action.ActionBuilder; import org.openksavi.sponge.core.engine.BaseSpongeEngine; import org.openksavi.sponge.core.kb.BaseScriptKnowledgeBaseInterpreter; import org.openksavi.sponge.core.kb.CachedScriptClassInstancePovider; import org.openksavi.sponge.core.kb.ScriptClassInstanceProvider; import org.openksavi.sponge.core.plugin.BasePlugin; import org.openksavi.sponge.core.util.SpongeUtils; import org.openksavi.sponge.core.util.exception.SpongeExceptionUtils; import org.openksavi.sponge.engine.SpongeEngine; import org.openksavi.sponge.groovy.GroovyActionBuilder; import org.openksavi.sponge.groovy.GroovyConstants; import org.openksavi.sponge.groovy.GroovyRule; import org.openksavi.sponge.kb.KnowledgeBase; import org.openksavi.sponge.kb.KnowledgeBaseConstants; import org.openksavi.sponge.kb.KnowledgeBaseScript; import org.openksavi.sponge.kb.ScriptKnowledgeBaseInterpreter; import org.openksavi.sponge.plugin.Plugin; import org.openksavi.sponge.rule.Rule; /** * Knowledge base interpreter supporting knowledge base to be defined in the Groovy language. */ public class GroovyKnowledgeBaseInterpreter extends BaseScriptKnowledgeBaseInterpreter { private static final Logger logger = LoggerFactory.getLogger(GroovyKnowledgeBaseInterpreter.class); public static final String PROP_CLASSPATH = "groovy.classpath"; /** Groovy shell. This is the interface to Groovy used by the engine. */ private GroovyShell shell; private Binding binding; private List<Script> scripts; public GroovyKnowledgeBaseInterpreter(SpongeEngine engine, KnowledgeBase knowledgeBase) { super(new GroovyKnowledgeBaseEngineOperations((BaseSpongeEngine) engine, knowledgeBase), GroovyConstants.TYPE); } @Override protected void prepareInterpreter() { overwriteProcessorClass(Rule.class, GroovyRule.class); overwriteProcessorBuilderClass(ActionBuilder.class, GroovyActionBuilder.class); ImportCustomizer importCustomizer = new ImportCustomizer(); getSimplifiedImportClasses() .forEach((interfaceClass, scriptClass) -> addImport(importCustomizer, scriptClass, interfaceClass.getSimpleName())); addImport(importCustomizer, BasePlugin.class, Plugin.class.getSimpleName()); getStandardImportClasses().forEach(cls -> addImport(importCustomizer, cls)); CompilerConfiguration configuration = new CompilerConfiguration(); configuration.addCompilationCustomizers(importCustomizer); binding = createBinding(); shell = new GroovyShell(binding, configuration); scripts = Collections.synchronizedList(new ArrayList<>()); setVariable(KnowledgeBaseConstants.VAR_ENGINE_OPERATIONS, getEngineOperations()); setClasspath(getEngineOperations() != null ? getEngineOperations().getEngine() : null); } protected Binding createBinding() { return new Binding(new ConcurrentHashMap<>()); } /** * Clears the interpreter state. */ @Override public void onClear() { synchronized (interpteterSynchro) { invalidateCache(); if (scripts != null) { scripts.clear(); scripts = null; } shell = null; binding = null; } } private void setClasspath(SpongeEngine engine) { if (engine != null) { String claaspath = getEngineOperations().getProperty(PROP_CLASSPATH, null); if (claaspath != null) { Stream.of(StringUtils.split(claaspath, PROP_PATH_SEPARATOR)) .forEachOrdered(path -> shell.getClassLoader().addClasspath(path)); } } } @Override public void setVariable(String name, Object value) { try { binding.setProperty(name, value); scripts.forEach(script -> script.setProperty(name, value)); } catch (Throwable e) { throw SpongeUtils.wrapException(this, e); } } /** * Result {@code null} means that there is no variable. Result other than {@code null} means that there is a variable (that may possibly * be {@code null}). * * @param name the name of the variable. * @return a holder for a variable. */ protected Mutable<Object> doGetVariable(String name) { List<Object> variables = scripts.stream().filter(script -> script.getMetaClass().hasProperty(script.getMetaClass().getTheClass(), name) != null) .map(script -> script.getProperty(name)).collect(Collectors.toList()); if (variables.isEmpty()) { try { return new MutableObject<>(binding.getProperty(name)); } catch (MissingPropertyException e) { return null; // This means that no variable has been found! } } return new MutableObject<>(variables.get(0)); } @Override public Object getVariable(String name) { try { Mutable<Object> variable = doGetVariable(name); if (variable == null) { throw new SpongeException("Variable '" + name + "' not found in any of the scripts"); } return variable.getValue(); } catch (Throwable e) { throw SpongeUtils.wrapException("getVariable", this, e); } } @Override public boolean hasVariable(String name) { try { return doGetVariable(name) != null; } catch (Throwable e) { throw SpongeUtils.wrapException("hasVariable", this, e); } } /** * Adds import from the package. * * @param importCustomizer Groovy import customizer. * @param clazz class to be imported. */ protected void addImport(ImportCustomizer importCustomizer, Class<?> clazz) { addImport(importCustomizer, clazz, clazz.getSimpleName()); } protected void addImport(ImportCustomizer importCustomizer, Class<?> clazz, String alias) { importCustomizer.addImport(alias, clazz.getName()); } @SuppressWarnings("unchecked") @Override public <T> T invokeOptionalFunction(String name, T defaultValue) { try { return (T) doInvokeFunction(name, true, defaultValue, null); } catch (Throwable e) { throw SpongeUtils.wrapException(name, this, e); } } @SuppressWarnings("unchecked") @Override public <T> T invokeFunction(String name, Class<T> cls, Object... args) { try { return (T) doInvokeFunction(name, false, null, args); } catch (Throwable e) { throw SpongeUtils.wrapException(name, this, e); } } protected Object doInvokeFunction(String name, boolean optional, Object defaultValue, Object[] args) { Object result = null; boolean invoked = false; for (Script script : scripts) { MetaMethod method = script.getMetaClass().getMetaMethod(name, args != null ? args : new Object[0]); if (method != null) { if (invoked) { // Invoke only the last function of the same name. This is required for compatibility with other supported // scripting languages. break; } result = script.invokeMethod(name, args); invoked = true; } } if (!invoked) { if (optional) { return defaultValue; } else { throw new SpongeException("Missing function '" + name + "'"); } } return result; } @Override public boolean isKnowledgeBaseException(Throwable exception) { return SpongeExceptionUtils.containsException(exception, GroovyRuntimeException.class); } @Override public Throwable getJavaException(Throwable knowledgeBaseException) { return SpongeExceptionUtils.getException(knowledgeBaseException, GroovyRuntimeException.class); } /** * Returns {@code null} if not script-based processor. */ @Override public String getScriptKnowledgeBaseProcessorClassName(Object processorClass) { if (processorClass instanceof GroovyClassWrapper) { GroovyClassWrapper classWrapper = (GroovyClassWrapper) processorClass; return classWrapper.getWrappedClass().getName(); } return null; } @SuppressWarnings("unchecked") @Override public <T> T eval(String code) { try { return (T) shell.evaluate(code); } catch (Throwable e) { throw SpongeUtils.wrapException("eval", this, e); } } @SuppressWarnings("unchecked") @Override public <T> T eval(Reader reader, String filename) { try { return (T) shell.evaluate(reader, filename); } catch (Throwable e) { throw SpongeUtils.wrapException(filename, this, e); } } @Override public Object invokeMethod(Object target, String name, Object... args) { try { return ((GroovyObject) target).invokeMethod(name, args); } catch (Throwable e) { throw SpongeUtils.wrapException(target + "." + name, this, e); } } @Override protected ScriptKnowledgeBaseInterpreter createInterpreterInstance(SpongeEngine engine, KnowledgeBase knowledgeBase) { return new GroovyKnowledgeBaseInterpreter(engine, knowledgeBase); } @Override protected void doReload(List<KnowledgeBaseScript> scripts) { if (this.scripts != null) { this.scripts.clear(); } super.doReload(scripts); } @Override protected void doLoad(Reader reader, String name) { Script script = shell.parse(reader, name); script.setBinding(binding); script.run(); // Add the last script as the first. scripts.add(0, script); } private List<String> createClasspath(SpongeEngine engine) { List<String> result = new ArrayList<>(); if (engine != null) { String classpath = getEngineOperations().getProperty(PROP_CLASSPATH, null); if (classpath != null) { result.addAll(Arrays.asList(StringUtils.split(classpath, PROP_PATH_SEPARATOR))); } } return result; } public void reloadClass(Class<?> clazz) { try { shell.getClassLoader().loadClass(clazz.getName()); } catch (ClassNotFoundException e) { throw SpongeUtils.wrapException(this, e); } } public Script reloadScript(String scriptName) { try { invalidateCache(); GroovyScriptEngine groovy = new GroovyScriptEngine(createClasspath(getEngineOperations().getEngine()).toArray(new String[0]), shell.getClassLoader()); Script script = groovy.createScript(scriptName, binding); script.run(); return script; } catch (IOException | ResourceException | ScriptException e) { throw SpongeUtils.wrapException(this, e); } } @Override public void scanToAutoEnable() { List<String> autoEnabled = new ArrayList<>(); // Java-based processor classes (not returned by shell.getClassLoader().getLoadedClasses()) are not auto-enabled. Stream.of(shell.getClassLoader().getLoadedClasses()).forEachOrdered(cls -> { if (getProcessorClasses().values().stream().filter(processorClass -> ClassUtils.isAssignable(cls, processorClass)).findFirst() .isPresent()) { String name = cls.getName(); if (!isProcessorAbstract(name)) { autoEnabled.add(name); ((GroovyKnowledgeBaseEngineOperations) getEngineOperations()).enable(cls); } } }); if (logger.isDebugEnabled() && !autoEnabled.isEmpty()) { logger.debug("Auto-enabling: {}", autoEnabled); } } @SuppressWarnings({ "unchecked" }) @Override protected <T> ScriptClassInstanceProvider<T> createScriptClassInstancePovider() { return new CachedScriptClassInstancePovider<Script, T>(getEngineOperations().getEngine(), (expression) -> shell.parse(expression), "new %s()", (script, javaClass) -> (T) script.run()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.usages.impl; import com.intellij.find.FindManager; import com.intellij.icons.AllIcons; import com.intellij.ide.*; import com.intellij.ide.actions.CloseTabToolbarAction; import com.intellij.navigation.NavigationItem; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.progress.util.ProgressWrapper; import com.intellij.openapi.progress.util.TooManyUsagesStatus; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.SimpleToolWindowPanel; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.*; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.SmartPointerManager; import com.intellij.psi.SmartPsiElementPointer; import com.intellij.psi.impl.PsiDocumentManagerBase; import com.intellij.ui.*; import com.intellij.ui.components.JBTabbedPane; import com.intellij.ui.content.Content; import com.intellij.ui.treeStructure.Tree; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewBundle; import com.intellij.usageView.UsageViewManager; import com.intellij.usages.*; import com.intellij.usages.rules.*; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Convertor; import com.intellij.util.containers.TransferToEDTQueue; import com.intellij.util.enumeration.EmptyEnumeration; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.DialogUtil; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.*; import javax.swing.plaf.TreeUI; import javax.swing.plaf.basic.BasicTreeUI; import javax.swing.tree.*; import java.awt.*; import java.awt.datatransfer.StringSelection; import java.awt.event.*; import java.util.*; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; /** * @author max */ public class UsageViewImpl implements UsageView, UsageModelTracker.UsageModelTrackerListener { @NonNls public static final String SHOW_RECENT_FIND_USAGES_ACTION_ID = "UsageView.ShowRecentFindUsages"; private final UsageNodeTreeBuilder myBuilder; private final MyPanel myRootPanel; @NotNull private final JTree myTree; private Content myContent; private final UsageViewPresentation myPresentation; private final UsageTarget[] myTargets; private final Factory<UsageSearcher> myUsageSearcherFactory; private final Project myProject; private volatile boolean mySearchInProgress = true; private final ExporterToTextFile myTextFileExporter = new ExporterToTextFile(this); private final Alarm myUpdateAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD); private final UsageModelTracker myModelTracker; private final Map<Usage, UsageNode> myUsageNodes = new ConcurrentHashMap<Usage, UsageNode>(); public static final UsageNode NULL_NODE = new UsageNode(NullUsage.INSTANCE, new UsageViewTreeModelBuilder(new UsageViewPresentation(), UsageTarget.EMPTY_ARRAY)); private final ButtonPanel myButtonPanel = new ButtonPanel(); private volatile boolean isDisposed; private volatile boolean myChangesDetected = false; public static final Comparator<Usage> USAGE_COMPARATOR = new Comparator<Usage>() { @Override public int compare(final Usage o1, final Usage o2) { if (o1 == o2) return 0; if (o1 == NULL_NODE) return -1; if (o2 == NULL_NODE) return 1; if (o1 instanceof Comparable && o2 instanceof Comparable) { final int selfcompared = ((Comparable<Usage>)o1).compareTo(o2); if (selfcompared != 0) return selfcompared; if (o1 instanceof UsageInFile && o2 instanceof UsageInFile) { UsageInFile u1 = (UsageInFile)o1; UsageInFile u2 = (UsageInFile)o2; VirtualFile f1 = u1.getFile(); VirtualFile f2 = u2.getFile(); if (f1 != null && f1.isValid() && f2 != null && f2.isValid()) { return f1.getPresentableUrl().compareTo(f2.getPresentableUrl()); } } return 0; } return o1.toString().compareTo(o2.toString()); } }; @NonNls private static final String HELP_ID = "ideaInterface.find"; private UsageContextPanel myCurrentUsageContextPanel; private List<UsageContextPanel.Provider> myUsageContextPanelProviders; private UsageContextPanel.Provider myCurrentUsageContextProvider; private JPanel myCentralPanel; private final GroupNode myRoot; private final UsageViewTreeModelBuilder myModel; private final Object lock = new Object(); private Splitter myPreviewSplitter; private volatile ProgressIndicator associatedProgress; // the progress that current find usages is running under // true if usages tree is currently expanding // (either at the end of find usages thanks to the 'expand usages after find' setting or // because the user pressed 'expand all' button. During this, some ugly hacks applied // to speed up the expanding (see getExpandedDescendants() here and UsageViewTreeCellRenderer.customizeCellRenderer()) private boolean expandingAll; private final UsageViewTreeCellRenderer myUsageViewTreeCellRenderer; UsageViewImpl(@NotNull final Project project, @NotNull UsageViewPresentation presentation, @NotNull UsageTarget[] targets, Factory<UsageSearcher> usageSearcherFactory) { myPresentation = presentation; myTargets = targets; myUsageSearcherFactory = usageSearcherFactory; myProject = project; myTree = new Tree() { { ToolTipManager.sharedInstance().registerComponent(this); } @Override public String getToolTipText(MouseEvent e) { TreePath path = getPathForLocation(e.getX(), e.getY()); if (path != null) { if (getCellRenderer() instanceof UsageViewTreeCellRenderer) { return UsageViewTreeCellRenderer.getTooltipFromPresentation(path.getLastPathComponent()); } } return null; } @Override public boolean isPathEditable(final TreePath path) { return path.getLastPathComponent() instanceof UsageViewTreeModelBuilder.TargetsRootNode; } // hack to avoid quadratic expandAll() @Override public Enumeration<TreePath> getExpandedDescendants(TreePath parent) { return expandingAll ? EmptyEnumeration.<TreePath>getInstance() : super.getExpandedDescendants(parent); } }; myRootPanel = new MyPanel(myTree); Disposer.register(this, myRootPanel); myModelTracker = new UsageModelTracker(project); Disposer.register(this, myModelTracker); myModel = new UsageViewTreeModelBuilder(myPresentation, targets); myRoot = (GroupNode)myModel.getRoot(); myBuilder = new UsageNodeTreeBuilder(myTargets, getActiveGroupingRules(project), getActiveFilteringRules(project), myRoot, myProject); final MessageBusConnection messageBusConnection = myProject.getMessageBus().connect(this); messageBusConnection.subscribe(UsageFilteringRuleProvider.RULES_CHANGED, new Runnable() { @Override public void run() { rulesChanged(); } }); myUsageViewTreeCellRenderer = new UsageViewTreeCellRenderer(this); if (!myPresentation.isDetachedMode()) { UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { // lock here to avoid concurrent execution of this init and dispose in other thread synchronized (lock) { if (isDisposed) return; myTree.setModel(myModel); myRootPanel.setLayout(new BorderLayout()); SimpleToolWindowPanel toolWindowPanel = new SimpleToolWindowPanel(false, true); myRootPanel.add(toolWindowPanel, BorderLayout.CENTER); JPanel toolbarPanel = new JPanel(new BorderLayout()); toolbarPanel.add(createActionsToolbar(), BorderLayout.WEST); toolbarPanel.add(createFiltersToolbar(), BorderLayout.CENTER); toolWindowPanel.setToolbar(toolbarPanel); myCentralPanel = new JPanel(new BorderLayout()); setupCentralPanel(); initTree(); toolWindowPanel.setContent(myCentralPanel); myTree.setCellRenderer(myUsageViewTreeCellRenderer); collapseAll(); myModelTracker.addListener(UsageViewImpl.this); if (myPresentation.isShowCancelButton()) { addButtonToLowerPane(new Runnable() { @Override public void run() { close(); } }, UsageViewBundle.message("usage.view.cancel.button")); } myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(final TreeSelectionEvent e) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (isDisposed || myProject.isDisposed()) return; updateOnSelectionChanged(); } }); } }); } } }); } myTransferToEDTQueue = new TransferToEDTQueue<Runnable>("Insert usages", new Processor<Runnable>() { @Override public boolean process(Runnable runnable) { runnable.run(); return true; } }, new Condition<Object>() { @Override public boolean value(Object o) { return isDisposed || project.isDisposed(); } },200); } protected boolean searchHasBeenCancelled() { ProgressIndicator progress = associatedProgress; return progress != null && progress.isCanceled(); } protected void cancelCurrentSearch() { ProgressIndicator progress = associatedProgress; if (progress != null) { ProgressWrapper.unwrap(progress).cancel(); } } private void clearRendererCache() { // clear renderer cache of node preferred size TreeUI ui = myTree.getUI(); if (ui instanceof BasicTreeUI) { AbstractLayoutCache treeState = ReflectionUtil.getField(BasicTreeUI.class, ui, AbstractLayoutCache.class, "treeState"); Rectangle visibleRect = myTree.getVisibleRect(); int rowForLocation = myTree.getClosestRowForLocation(0, visibleRect.y); int visibleRowCount = getVisibleRowCount(); for (int i = rowForLocation + visibleRowCount + 1; i >= rowForLocation; i--) { final TreePath eachPath = myTree.getPathForRow(i); if (eachPath == null) continue; treeState.invalidatePathBounds(eachPath); } myTree.repaint(visibleRect); } else { myTree.setCellRenderer(myUsageViewTreeCellRenderer); } } private int getVisibleRowCount() { // myTree.getVisibleRowCount returns 20 return TreeUtil.getVisibleRowCountForFixedRowHeight(myTree); } private void setupCentralPanel() { myCentralPanel.removeAll(); disposeUsageContextPanels(); JScrollPane treePane = ScrollPaneFactory.createScrollPane(myTree); // add reaction to scrolling: // since the UsageViewTreeCellRenderer ignores invisible nodes (outside the viewport), their preferred size is incorrect // and we need to recalculate them when the node scrolled into the visible rectangle treePane.getViewport().addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { clearRendererCache(); } }); myTree.addTreeExpansionListener(new TreeExpansionListener() { @Override public void treeExpanded(TreeExpansionEvent event) { clearRendererCache(); } @Override public void treeCollapsed(TreeExpansionEvent event) { clearRendererCache(); } }); myPreviewSplitter = new Splitter(false, 0.5f, 0.1f, 0.9f); myPreviewSplitter.setFirstComponent(treePane); myCentralPanel.add(myPreviewSplitter, BorderLayout.CENTER); if (UsageViewSettings.getInstance().IS_PREVIEW_USAGES) { myPreviewSplitter.setProportion(UsageViewSettings.getInstance().PREVIEW_USAGES_SPLITTER_PROPORTIONS); treePane.putClientProperty(UIUtil.KEEP_BORDER_SIDES, SideBorder.RIGHT); final JBTabbedPane tabbedPane = new JBTabbedPane(SwingConstants.BOTTOM){ @NotNull @Override protected Insets getInsetsForTabComponent() { return new Insets(0,0,0,0); } }; UsageContextPanel.Provider[] extensions = Extensions.getExtensions(UsageContextPanel.Provider.EP_NAME, myProject); myUsageContextPanelProviders = ContainerUtil.filter(extensions, new Condition<UsageContextPanel.Provider>() { @Override public boolean value(UsageContextPanel.Provider provider) { return provider.isAvailableFor(UsageViewImpl.this); } }); for (UsageContextPanel.Provider provider : myUsageContextPanelProviders) { JComponent component; if (myCurrentUsageContextProvider == null || myCurrentUsageContextProvider == provider) { myCurrentUsageContextProvider = provider; myCurrentUsageContextPanel = provider.create(this); component = myCurrentUsageContextPanel.createComponent(); } else { component = new JLabel(); } tabbedPane.addTab(provider.getTabTitle(), component); } int index = myUsageContextPanelProviders.indexOf(myCurrentUsageContextProvider); tabbedPane.setSelectedIndex(index); tabbedPane.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { int currentIndex = tabbedPane.getSelectedIndex(); UsageContextPanel.Provider selectedProvider = myUsageContextPanelProviders.get(currentIndex); if (selectedProvider != myCurrentUsageContextProvider) { tabSelected(selectedProvider); } } }); tabbedPane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT)); myPreviewSplitter.setSecondComponent(tabbedPane); } else { myPreviewSplitter.setProportion(1); } myCentralPanel.add(myButtonPanel, BorderLayout.SOUTH); myRootPanel.revalidate(); myRootPanel.repaint(); } private void tabSelected(@NotNull final UsageContextPanel.Provider provider) { myCurrentUsageContextProvider = provider; setupCentralPanel(); updateOnSelectionChanged(); } private void disposeUsageContextPanels() { if (myCurrentUsageContextPanel != null) { saveSplitterProportions(); Disposer.dispose(myCurrentUsageContextPanel); myCurrentUsageContextPanel = null; } } private static UsageFilteringRule[] getActiveFilteringRules(final Project project) { final UsageFilteringRuleProvider[] providers = Extensions.getExtensions(UsageFilteringRuleProvider.EP_NAME); List<UsageFilteringRule> list = new ArrayList<UsageFilteringRule>(providers.length); for (UsageFilteringRuleProvider provider : providers) { ContainerUtil.addAll(list, provider.getActiveRules(project)); } return list.toArray(new UsageFilteringRule[list.size()]); } private static UsageGroupingRule[] getActiveGroupingRules(@NotNull final Project project) { final UsageGroupingRuleProvider[] providers = Extensions.getExtensions(UsageGroupingRuleProvider.EP_NAME); List<UsageGroupingRule> list = new ArrayList<UsageGroupingRule>(providers.length); for (UsageGroupingRuleProvider provider : providers) { ContainerUtil.addAll(list, provider.getActiveRules(project)); } Collections.sort(list, new Comparator<UsageGroupingRule>() { @Override public int compare(final UsageGroupingRule o1, final UsageGroupingRule o2) { return getRank(o1) - getRank(o2); } private int getRank(final UsageGroupingRule rule) { if (rule instanceof OrderableUsageGroupingRule) { return ((OrderableUsageGroupingRule)rule).getRank(); } return Integer.MAX_VALUE; } }); return list.toArray(new UsageGroupingRule[list.size()]); } @Override public void modelChanged(boolean isPropertyChange) { if (!isPropertyChange) { myChangesDetected = true; } updateLater(); } private void initTree() { myTree.setRootVisible(false); myTree.setShowsRootHandles(true); SmartExpander.installOn(myTree); TreeUtil.installActions(myTree); EditSourceOnDoubleClickHandler.install(myTree); myTree.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (KeyEvent.VK_ENTER == e.getKeyCode()) { TreePath leadSelectionPath = myTree.getLeadSelectionPath(); if (leadSelectionPath == null) return; DefaultMutableTreeNode node = (DefaultMutableTreeNode)leadSelectionPath.getLastPathComponent(); if (node instanceof UsageNode) { final Usage usage = ((UsageNode)node).getUsage(); usage.navigate(false); usage.highlightInEditor(); } else if (node.isLeaf()) { Navigatable navigatable = getNavigatableForNode(node); if (navigatable != null && navigatable.canNavigate()) { navigatable.navigate(false); } } } } }); TreeUtil.selectFirstNode(myTree); PopupHandler.installPopupHandler(myTree, IdeActions.GROUP_USAGE_VIEW_POPUP, ActionPlaces.USAGE_VIEW_POPUP); myTree.addTreeExpansionListener(new TreeExpansionListener() { @Override public void treeExpanded(TreeExpansionEvent event) { TreePath path = event.getPath(); Object component = path.getLastPathComponent(); if (!(component instanceof Node)) return; Node node = (Node)component; if (!expandingAll && node.needsUpdate()) { checkNodeValidity(node, path); } } @Override public void treeCollapsed(TreeExpansionEvent event) { } }); TreeUIHelper.getInstance().installTreeSpeedSearch(myTree, new Convertor<TreePath, String>() { @Override public String convert(TreePath o) { Object value = o.getLastPathComponent(); TreeCellRenderer renderer = myTree.getCellRenderer(); if (renderer instanceof UsageViewTreeCellRenderer) { UsageViewTreeCellRenderer coloredRenderer = (UsageViewTreeCellRenderer)renderer; return coloredRenderer.getPlainTextForNode(value); } return value == null ? null : value.toString(); } }, true); } @NotNull private JComponent createActionsToolbar() { DefaultActionGroup group = new DefaultActionGroup() { @Override public void update(AnActionEvent e) { super.update(e); myButtonPanel.update(); } @Override public boolean isDumbAware() { return true; } }; AnAction[] actions = createActions(); for (final AnAction action : actions) { if (action != null) { group.add(action); } } return toUsageViewToolbar(group); } @NotNull private JComponent toUsageViewToolbar(@NotNull DefaultActionGroup group) { ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.USAGE_VIEW_TOOLBAR, group, false); actionToolbar.setTargetComponent(myRootPanel); return actionToolbar.getComponent(); } @NotNull private JComponent createFiltersToolbar() { final DefaultActionGroup group = new DefaultActionGroup(); final AnAction[] groupingActions = createGroupingActions(); for (AnAction groupingAction : groupingActions) { group.add(groupingAction); } addFilteringActions(group); group.add(new PreviewUsageAction(this)); group.add(new SortMembersAlphabeticallyAction(this)); return toUsageViewToolbar(group); } public void addFilteringActions(@NotNull DefaultActionGroup group) { final JComponent component = getComponent(); if (getPresentation().isMergeDupLinesAvailable()) { final MergeDupLines mergeDupLines = new MergeDupLines(); mergeDupLines.registerCustomShortcutSet(mergeDupLines.getShortcutSet(), component, this); group.add(mergeDupLines); } final UsageFilteringRuleProvider[] providers = Extensions.getExtensions(UsageFilteringRuleProvider.EP_NAME); for (UsageFilteringRuleProvider provider : providers) { AnAction[] actions = provider.createFilteringActions(this); for (AnAction action : actions) { group.add(action); } } } public void scheduleDisposeOnClose(@NotNull Disposable disposable) { Disposer.register(this, disposable); } @NotNull private AnAction[] createActions() { final TreeExpander treeExpander = new TreeExpander() { @Override public void expandAll() { UsageViewImpl.this.expandAll(); UsageViewSettings.getInstance().setExpanded(true); } @Override public boolean canExpand() { return true; } @Override public void collapseAll() { UsageViewImpl.this.collapseAll(); UsageViewSettings.getInstance().setExpanded(false); } @Override public boolean canCollapse() { return true; } }; CommonActionsManager actionsManager = CommonActionsManager.getInstance(); final JComponent component = getComponent(); final AnAction expandAllAction = actionsManager.createExpandAllAction(treeExpander, component); final AnAction collapseAllAction = actionsManager.createCollapseAllAction(treeExpander, component); scheduleDisposeOnClose(new Disposable() { @Override public void dispose() { expandAllAction.unregisterCustomShortcutSet(component); collapseAllAction.unregisterCustomShortcutSet(component); } }); return new AnAction[] { canShowSettings() ? showSettings() : null, ActionManager.getInstance().getAction("UsageView.Rerun"), new CloseAction(), ActionManager.getInstance().getAction("PinToolwindowTab"), createRecentFindUsagesAction(), expandAllAction, collapseAllAction, actionsManager.createPrevOccurenceAction(myRootPanel), actionsManager.createNextOccurenceAction(myRootPanel), actionsManager.installAutoscrollToSourceHandler(myProject, myTree, new MyAutoScrollToSourceOptionProvider()), actionsManager.createExportToTextFileAction(myTextFileExporter), actionsManager.createHelpAction(HELP_ID) }; } private boolean canShowSettings() { if (myTargets.length == 0) return false; NavigationItem target = myTargets[0]; return target instanceof ConfigurableUsageTarget; } @NotNull private AnAction showSettings() { final ConfigurableUsageTarget configurableUsageTarget = getConfigurableTarget(myTargets); String description = configurableUsageTarget == null ? "Show find usages settings dialog" : "Show settings for "+configurableUsageTarget.getLongDescriptiveName(); return new AnAction("Settings...", description, AllIcons.General.ProjectSettings) { { KeyboardShortcut shortcut = configurableUsageTarget == null ? getShowUsagesWithSettingsShortcut() : configurableUsageTarget.getShortcut(); if (shortcut != null) { registerCustomShortcutSet(new CustomShortcutSet(shortcut), getComponent()); } } @Override public void actionPerformed(AnActionEvent e) { FindManager.getInstance(getProject()).showSettingsAndFindUsages(myTargets); } }; } private static ConfigurableUsageTarget getConfigurableTarget(@NotNull UsageTarget[] targets) { ConfigurableUsageTarget configurableUsageTarget = null; if (targets.length != 0) { NavigationItem target = targets[0]; if (target instanceof ConfigurableUsageTarget) { configurableUsageTarget = (ConfigurableUsageTarget)target; } } return configurableUsageTarget; } @NotNull private AnAction createRecentFindUsagesAction() { AnAction action = ActionManager.getInstance().getAction(SHOW_RECENT_FIND_USAGES_ACTION_ID); action.registerCustomShortcutSet(action.getShortcutSet(), getComponent()); return action; } @NotNull private AnAction[] createGroupingActions() { final UsageGroupingRuleProvider[] providers = Extensions.getExtensions(UsageGroupingRuleProvider.EP_NAME); List<AnAction> list = new ArrayList<AnAction>(providers.length); for (UsageGroupingRuleProvider provider : providers) { ContainerUtil.addAll(list, provider.createGroupingActions(this)); } return list.toArray(new AnAction[list.size()]); } private void rulesChanged() { ApplicationManager.getApplication().assertIsDispatchThread(); final List<UsageState> states = new ArrayList<UsageState>(); captureUsagesExpandState(new TreePath(myTree.getModel().getRoot()), states); final List<Usage> allUsages = new ArrayList<Usage>(myUsageNodes.keySet()); Collections.sort(allUsages, USAGE_COMPARATOR); final Set<Usage> excludedUsages = getExcludedUsages(); reset(); myBuilder.setGroupingRules(getActiveGroupingRules(myProject)); myBuilder.setFilteringRules(getActiveFilteringRules(myProject)); ApplicationManager.getApplication().runReadAction(new Runnable() { @Override public void run() { for (Usage usage : allUsages) { if (!usage.isValid()) { continue; } if (usage instanceof MergeableUsage) { ((MergeableUsage)usage).reset(); } appendUsage(usage); } } }); excludeUsages(excludedUsages.toArray(new Usage[excludedUsages.size()])); if (myCentralPanel != null) { setupCentralPanel(); } SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (isDisposed) return; restoreUsageExpandState(states); updateImmediately(); } }); } private void captureUsagesExpandState(TreePath pathFrom, final Collection<UsageState> states) { if (!myTree.isExpanded(pathFrom)) { return; } final DefaultMutableTreeNode node = (DefaultMutableTreeNode)pathFrom.getLastPathComponent(); final int childCount = node.getChildCount(); for (int idx = 0; idx < childCount; idx++) { final TreeNode child = node.getChildAt(idx); if (child instanceof UsageNode) { final Usage usage = ((UsageNode)child).getUsage(); states.add(new UsageState(usage, myTree.getSelectionModel().isPathSelected(pathFrom.pathByAddingChild(child)))); } else { captureUsagesExpandState(pathFrom.pathByAddingChild(child), states); } } } private void restoreUsageExpandState(@NotNull Collection<UsageState> states) { //always expand the last level group final DefaultMutableTreeNode root = (DefaultMutableTreeNode)myTree.getModel().getRoot(); for (int i = root.getChildCount() - 1; i >= 0; i--) { final DefaultMutableTreeNode child = (DefaultMutableTreeNode)root.getChildAt(i); if (child instanceof GroupNode){ final TreePath treePath = new TreePath(child.getPath()); myTree.expandPath(treePath); } } myTree.getSelectionModel().clearSelection(); for (final UsageState usageState : states) { usageState.restore(); } } private void expandAll() { expandingAll = true; try { TreeUtil.expandAll(myTree); } finally { expandingAll = false; } } private void collapseAll() { TreeUtil.collapseAll(myTree, 3); TreeUtil.expand(myTree, 2); } public DefaultMutableTreeNode getModelRoot() { return (DefaultMutableTreeNode)myTree.getModel().getRoot(); } public void select() { if (myTree != null) { myTree.requestFocusInWindow(); } } @NotNull public Project getProject() { return myProject; } @Nullable public static KeyboardShortcut getShowUsagesWithSettingsShortcut() { return ActionManager.getInstance().getKeyboardShortcut("ShowSettingsAndFindUsages"); } static KeyboardShortcut getShowUsagesWithSettingsShortcut(@NotNull UsageTarget[] targets) { ConfigurableUsageTarget configurableTarget = getConfigurableTarget(targets); return configurableTarget == null ? getShowUsagesWithSettingsShortcut() : configurableTarget.getShortcut(); } void associateProgress(@NotNull ProgressIndicator indicator) { associatedProgress = indicator; } private class CloseAction extends CloseTabToolbarAction { @Override public void update(AnActionEvent e) { super.update(e); e.getPresentation().setVisible(myContent != null); } @Override public void actionPerformed(AnActionEvent e) { close(); } } private class MergeDupLines extends RuleAction { private MergeDupLines() { super(UsageViewImpl.this, UsageViewBundle.message("action.merge.same.line"), AllIcons.Toolbar.Filterdups); setShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_F, InputEvent.CTRL_DOWN_MASK))); } @Override protected boolean getOptionValue() { return UsageViewSettings.getInstance().isFilterDuplicatedLine(); } @Override protected void setOptionValue(boolean value) { UsageViewSettings.getInstance().setFilterDuplicatedLine(value); } } public void refreshUsages() { reset(); doReRun(); } private void doReRun() { final AtomicInteger usageCountWithoutDefinition = new AtomicInteger(0); final Project project = myProject; Task.Backgroundable task = new Task.Backgroundable(project, UsageViewManagerImpl.getProgressTitle(myPresentation)) { @Override public void run(@NotNull final ProgressIndicator indicator) { final TooManyUsagesStatus tooManyUsagesStatus = TooManyUsagesStatus.createFor(indicator); setSearchInProgress(true); associateProgress(indicator); myChangesDetected = false; UsageSearcher usageSearcher = myUsageSearcherFactory.create(); usageSearcher.generate(new Processor<Usage>() { @Override public boolean process(final Usage usage) { if (searchHasBeenCancelled()) return false; TooManyUsagesStatus.getFrom(indicator).pauseProcessingIfTooManyUsages(); boolean incrementCounter = !com.intellij.usages.UsageViewManager.isSelfUsage(usage, myTargets); if (incrementCounter) { final int usageCount = usageCountWithoutDefinition.incrementAndGet(); if (usageCount > UsageLimitUtil.USAGES_LIMIT) { if (tooManyUsagesStatus.switchTooManyUsagesStatus()) { UsageViewManagerImpl .showTooManyUsagesWarning(project, tooManyUsagesStatus, indicator, getPresentation(), usageCountWithoutDefinition.get(), UsageViewImpl.this); } } ApplicationManager.getApplication().runReadAction(new Runnable() { @Override public void run() { appendUsage(usage); } }); } return !indicator.isCanceled(); } }); drainQueuedUsageNodes(); setSearchInProgress(false); } }; ProgressManager.getInstance().run(task); } private void reset() { ApplicationManager.getApplication().assertIsDispatchThread(); myUsageNodes.clear(); myModel.reset(); if (!myPresentation.isDetachedMode()) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (isDisposed) return; TreeUtil.expand(myTree, 2); } }); } } private final TransferToEDTQueue<Runnable> myTransferToEDTQueue; public void drainQueuedUsageNodes() { assert !ApplicationManager.getApplication().isDispatchThread() : Thread.currentThread(); UIUtil.invokeAndWaitIfNeeded(new Runnable() { @Override public void run() { myTransferToEDTQueue.drain(); } }); } @Override public void appendUsage(@NotNull Usage usage) { doAppendUsage(usage); } @Nullable public UsageNode doAppendUsage(@NotNull Usage usage) { // invoke in ReadAction to be be sure that usages are not invalidated while the tree is being built ApplicationManager.getApplication().assertReadAccessAllowed(); if (!usage.isValid()) { // because the view is built incrementally, the usage may be already invalid, so need to filter such cases return null; } UsageNode node = myBuilder.appendUsage(usage, new Consumer<Runnable>() { @Override public void consume(Runnable runnable) { myTransferToEDTQueue.offer(runnable); } }); if (node != null) { // update and cache flags while the node is still hot node.update(this); } myUsageNodes.put(usage, node == null ? NULL_NODE : node); return node; } @Override public void removeUsage(@NotNull Usage usage) { final UsageNode node = myUsageNodes.remove(usage); if (node != NULL_NODE && node != null && !myPresentation.isDetachedMode()) { UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { if (isDisposed) return; TreeModel treeModel = myTree.getModel(); ((DefaultTreeModel)treeModel).removeNodeFromParent(node); ((GroupNode)myTree.getModel().getRoot()).removeUsage(node); } }); } } @Override public void removeUsagesBulk(@NotNull Collection<Usage> usages) { final Set<UsageNode> nodes = new THashSet<UsageNode>(usages.size()); for (Usage usage : usages) { UsageNode node = myUsageNodes.remove(usage); if (node != null && node != NULL_NODE) { nodes.add(node); } } if (!nodes.isEmpty() && !myPresentation.isDetachedMode()) { UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { if (isDisposed) return; DefaultTreeModel treeModel = (DefaultTreeModel)myTree.getModel(); for (UsageNode node : nodes) { MutableTreeNode parent = (MutableTreeNode)node.getParent(); int childIndex = parent.getIndex(node); if (childIndex != -1) { parent.remove(childIndex); } } ((GroupNode)myTree.getModel().getRoot()).removeUsagesBulk(nodes); treeModel.reload(); } }); } } @Override public void includeUsages(@NotNull Usage[] usages) { List<TreeNode> nodes = new ArrayList<TreeNode>(usages.length); for (Usage usage : usages) { final UsageNode node = myUsageNodes.get(usage); if (node != NULL_NODE && node != null) { node.setUsageExcluded(false); nodes.add(node); } } updateImmediatelyNodesUpToRoot(nodes); } @Override public void excludeUsages(@NotNull Usage[] usages) { List<TreeNode> nodes = new ArrayList<TreeNode>(usages.length); for (Usage usage : usages) { final UsageNode node = myUsageNodes.get(usage); if (node != NULL_NODE && node != null) { node.setUsageExcluded(true); nodes.add(node); } } updateImmediatelyNodesUpToRoot(nodes); } @Override public void selectUsages(@NotNull Usage[] usages) { List<TreePath> paths = new LinkedList<TreePath>(); for (Usage usage : usages) { final UsageNode node = myUsageNodes.get(usage); if (node != NULL_NODE && node != null) { paths.add(new TreePath(node.getPath())); } } myTree.setSelectionPaths(paths.toArray(new TreePath[paths.size()])); if (!paths.isEmpty()) myTree.scrollPathToVisible(paths.get(0)); } @Override @NotNull public JComponent getComponent() { return myRootPanel; } @Override public int getUsagesCount() { return myUsageNodes.size(); } void setContent(@NotNull Content content) { myContent = content; content.setDisposer(this); } private void updateImmediately() { if (myProject.isDisposed()) return; TreeNode root = (TreeNode)myTree.getModel().getRoot(); checkNodeValidity(root, new TreePath(root)); updateOnSelectionChanged(); } private void updateImmediatelyNodesUpToRoot(@NotNull List<TreeNode> nodes) { if (myProject.isDisposed()) return; TreeNode root = (TreeNode)myTree.getModel().getRoot(); for (int i=0; i<nodes.size(); i++) { TreeNode node = nodes.get(i); if (node instanceof Node) { ((Node)node).update(this); TreeNode parent = node.getParent(); if (parent != root && parent != null) { nodes.add(parent); } } } updateImmediately(); } private void updateOnSelectionChanged() { if (myCurrentUsageContextPanel != null) { try { myCurrentUsageContextPanel.updateLayout(getSelectedUsageInfos()); } catch (IndexNotReadyException ignore) { } } } private void checkNodeValidity(@NotNull TreeNode node, @NotNull TreePath path) { boolean shouldCheckChildren = true; if (myTree.isCollapsed(path)) { if (node instanceof Node) { ((Node)node).markNeedUpdate(); } shouldCheckChildren = false; // optimization: do not call expensive update() on invisible node } UsageViewTreeCellRenderer.RowLocation isVisible = myUsageViewTreeCellRenderer.isRowVisible(myTree.getRowForPath(new TreePath(((DefaultMutableTreeNode)node).getPath())), myTree.getVisibleRect()); // if row is below visible rectangle, no sense to update it or any children if (shouldCheckChildren && isVisible != UsageViewTreeCellRenderer.RowLocation.AFTER_VISIBLE_RECT) { for (int i=0; i < node.getChildCount(); i++) { TreeNode child = node.getChildAt(i); checkNodeValidity(child, path.pathByAddingChild(child)); } } // call update last, to let children a chance to update their cache first if (node instanceof Node && node != getModelRoot() && isVisible == UsageViewTreeCellRenderer.RowLocation.INSIDE_VISIBLE_RECT) { try { ((Node)node).update(this); } catch (IndexNotReadyException ignore) { } } } private void updateLater() { myUpdateAlarm.cancelAllRequests(); myUpdateAlarm.addRequest(new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; PsiDocumentManagerBase documentManager = (PsiDocumentManagerBase)PsiDocumentManager.getInstance(myProject); documentManager.cancelAndRunWhenAllCommitted("UpdateUsageView", new Runnable() { @Override public void run() { updateImmediately(); } }); } }, 300); } @Override public void close() { cancelCurrentSearch(); UsageViewManager.getInstance(myProject).closeContent(myContent); } private void saveSplitterProportions() { UsageViewSettings.getInstance().PREVIEW_USAGES_SPLITTER_PROPORTIONS = myPreviewSplitter.getProportion(); } @Override public void dispose() { disposeUsageContextPanels(); synchronized (lock) { isDisposed = true; ToolTipManager.sharedInstance().unregisterComponent(myTree); myModelTracker.removeListener(this); myUpdateAlarm.cancelAllRequests(); } disposeSmartPointers(); } private void disposeSmartPointers() { SmartPointerManager pointerManager = SmartPointerManager.getInstance(getProject()); for (Usage usage : myUsageNodes.keySet()) { if (usage instanceof UsageInfo2UsageAdapter) { SmartPsiElementPointer<?> pointer = ((UsageInfo2UsageAdapter)usage).getUsageInfo().getSmartPointer(); pointerManager.removePointer(pointer); } } } @Override public boolean isSearchInProgress() { return mySearchInProgress; } public void setSearchInProgress(boolean searchInProgress) { mySearchInProgress = searchInProgress; if (!myPresentation.isDetachedMode()) { myTransferToEDTQueue.offer(new Runnable() { @Override public void run() { if (isDisposed) return; final UsageNode firstUsageNode = myModel.getFirstUsageNode(); if (firstUsageNode == null) return; Node node = getSelectedNode(); if (node != null && !Comparing.equal(new TreePath(node.getPath()), TreeUtil.getFirstNodePath(myTree))) { // user has selected node already return; } showNode(firstUsageNode); if (UsageViewSettings.getInstance().isExpanded() && myUsageNodes.size() < 10000) { expandAll(); } } }); } } public boolean isDisposed() { return isDisposed; } private void showNode(@NotNull final UsageNode node) { if (!myPresentation.isDetachedMode()) { UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { if (isDisposed) return; TreePath usagePath = new TreePath(node.getPath()); myTree.expandPath(usagePath.getParentPath()); TreeUtil.selectPath(myTree, usagePath); } }); } } @Override public void addButtonToLowerPane(@NotNull Runnable runnable, @NotNull String text) { int index = myButtonPanel.getComponentCount(); if (!SystemInfo.isMac && index > 0 && myPresentation.isShowCancelButton()) index--; myButtonPanel.addButtonRunnable(index, runnable, text); } @Override public void addButtonToLowerPane(@NotNull final Runnable runnable, @NotNull String text, char mnemonic) { // implemented method is deprecated, so, it just calls non-deprecated overloading one addButtonToLowerPane(runnable, text); } @Override public void addPerformOperationAction(@NotNull final Runnable processRunnable, final String commandName, final String cannotMakeString, @NotNull String shortDescription) { addPerformOperationAction(processRunnable, commandName, cannotMakeString, shortDescription, true); } @Override public void addPerformOperationAction(@NotNull Runnable processRunnable, String commandName, String cannotMakeString, @NotNull String shortDescription, boolean checkReadOnlyStatus) { addButtonToLowerPane(newPerformOperationRunnable(processRunnable, commandName, cannotMakeString, checkReadOnlyStatus), shortDescription); } public MyPerformOperationRunnable newPerformOperationRunnable(Runnable processRunnable, String commandName, String cannotMakeString, boolean checkReadOnlyStatus) { return new MyPerformOperationRunnable(cannotMakeString, processRunnable, commandName, checkReadOnlyStatus); } private boolean allTargetsAreValid() { for (UsageTarget target : myTargets) { if (!target.isValid()) { return false; } } return true; } @NotNull @Override public UsageViewPresentation getPresentation() { return myPresentation; } public boolean canPerformReRun() { return myUsageSearcherFactory != null && allTargetsAreValid(); } private boolean checkReadonlyUsages() { final Set<VirtualFile> readOnlyUsages = getReadOnlyUsagesFiles(); return readOnlyUsages.isEmpty() || !ReadonlyStatusHandler.getInstance(myProject).ensureFilesWritable(VfsUtilCore.toVirtualFileArray(readOnlyUsages)).hasReadonlyFiles(); } @NotNull private Set<Usage> getReadOnlyUsages() { final Set<Usage> result = new THashSet<Usage>(); final Set<Map.Entry<Usage,UsageNode>> usages = myUsageNodes.entrySet(); for (Map.Entry<Usage, UsageNode> entry : usages) { Usage usage = entry.getKey(); UsageNode node = entry.getValue(); if (node != null && node != NULL_NODE && !node.isExcluded() && usage.isReadOnly()) { result.add(usage); } } return result; } @NotNull private Set<VirtualFile> getReadOnlyUsagesFiles() { Set<Usage> usages = getReadOnlyUsages(); Set<VirtualFile> result = new THashSet<VirtualFile>(); for (Usage usage : usages) { if (usage instanceof UsageInFile) { UsageInFile usageInFile = (UsageInFile)usage; VirtualFile file = usageInFile.getFile(); if (file != null) result.add(file); } if (usage instanceof UsageInFiles) { UsageInFiles usageInFiles = (UsageInFiles)usage; ContainerUtil.addAll(result, usageInFiles.getFiles()); } } for (UsageTarget target : myTargets) { VirtualFile[] files = target.getFiles(); if (files == null) continue; ContainerUtil.addAll(result, files); } return result; } @Override @NotNull public Set<Usage> getExcludedUsages() { Set<Usage> result = new THashSet<Usage>(); for (Map.Entry<Usage, UsageNode> entry : myUsageNodes.entrySet()) { UsageNode node = entry.getValue(); Usage usage = entry.getKey(); if (node == NULL_NODE || node == null) { continue; } if (node.isExcluded()) { result.add(usage); } } return result; } @Nullable private Node getSelectedNode() { TreePath leadSelectionPath = myTree.getLeadSelectionPath(); if (leadSelectionPath == null) return null; DefaultMutableTreeNode node = (DefaultMutableTreeNode)leadSelectionPath.getLastPathComponent(); return node instanceof Node ? (Node)node : null; } @Nullable private Node[] getSelectedNodes() { TreePath[] leadSelectionPath = myTree.getSelectionPaths(); if (leadSelectionPath == null || leadSelectionPath.length == 0) return null; final List<Node> result = new ArrayList<Node>(); for (TreePath comp : leadSelectionPath) { final Object lastPathComponent = comp.getLastPathComponent(); if (lastPathComponent instanceof Node) { final Node node = (Node)lastPathComponent; result.add(node); } } return result.isEmpty() ? null : result.toArray(new Node[result.size()]); } @Override @Nullable public Set<Usage> getSelectedUsages() { TreePath[] selectionPaths = myTree.getSelectionPaths(); if (selectionPaths == null) { return null; } Set<Usage> usages = new THashSet<Usage>(); for (TreePath selectionPath : selectionPaths) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)selectionPath.getLastPathComponent(); collectUsages(node, usages); } return usages; } @Override @NotNull public Set<Usage> getUsages() { return myUsageNodes.keySet(); } @Override @NotNull public List<Usage> getSortedUsages() { List<Usage> usages = new ArrayList<Usage>(getUsages()); Collections.sort(usages, USAGE_COMPARATOR); return usages; } private static void collectUsages(@NotNull DefaultMutableTreeNode node, @NotNull Set<Usage> usages) { if (node instanceof UsageNode) { UsageNode usageNode = (UsageNode)node; final Usage usage = usageNode.getUsage(); usages.add(usage); } Enumeration enumeration = node.children(); while (enumeration.hasMoreElements()) { DefaultMutableTreeNode child = (DefaultMutableTreeNode)enumeration.nextElement(); collectUsages(child, usages); } } @Nullable private UsageTarget[] getSelectedUsageTargets() { TreePath[] selectionPaths = myTree.getSelectionPaths(); if (selectionPaths == null) return null; Set<UsageTarget> targets = new THashSet<UsageTarget>(); for (TreePath selectionPath : selectionPaths) { Object lastPathComponent = selectionPath.getLastPathComponent(); if (lastPathComponent instanceof UsageTargetNode) { UsageTargetNode usageTargetNode = (UsageTargetNode)lastPathComponent; UsageTarget target = usageTargetNode.getTarget(); if (target.isValid()) { targets.add(target); } } } return targets.isEmpty() ? null : targets.toArray(new UsageTarget[targets.size()]); } @Nullable private static Navigatable getNavigatableForNode(@NotNull DefaultMutableTreeNode node) { Object userObject = node.getUserObject(); if (userObject instanceof Navigatable) { final Navigatable navigatable = (Navigatable)userObject; return navigatable.canNavigate() ? navigatable : null; } return null; } /* nodes with non-valid data are not included */ private static Navigatable[] getNavigatablesForNodes(Node[] nodes) { if (nodes == null) { return null; } final List<Navigatable> result = new ArrayList<Navigatable>(); for (final Node node : nodes) { /* if (!node.isDataValid()) { continue; } */ Object userObject = node.getUserObject(); if (userObject instanceof Navigatable) { result.add((Navigatable)userObject); } } return result.toArray(new Navigatable[result.size()]); } public boolean areTargetsValid() { return myModel.areTargetsValid(); } private class MyPanel extends JPanel implements TypeSafeDataProvider, OccurenceNavigator,Disposable, CopyProvider { @Nullable private OccurenceNavigatorSupport mySupport; private MyPanel(@NotNull JTree tree) { mySupport = new OccurenceNavigatorSupport(tree) { @Override protected Navigatable createDescriptorForNode(DefaultMutableTreeNode node) { if (node.getChildCount() > 0) return null; if (node instanceof Node && ((Node)node).isExcluded()) return null; return getNavigatableForNode(node); } @Override public String getNextOccurenceActionName() { return UsageViewBundle.message("action.next.occurrence"); } @Override public String getPreviousOccurenceActionName() { return UsageViewBundle.message("action.previous.occurrence"); } }; } @Override public void dispose() { mySupport = null; } @Override public boolean hasNextOccurence() { return mySupport != null && mySupport.hasNextOccurence(); } @Override public boolean hasPreviousOccurence() { return mySupport != null && mySupport.hasPreviousOccurence(); } @Override public OccurenceInfo goNextOccurence() { return mySupport != null ? mySupport.goNextOccurence() : null; } @Override public OccurenceInfo goPreviousOccurence() { return mySupport != null ? mySupport.goPreviousOccurence() : null; } @Override public String getNextOccurenceActionName() { return mySupport != null ? mySupport.getNextOccurenceActionName() : ""; } @Override public String getPreviousOccurenceActionName() { return mySupport != null ? mySupport.getPreviousOccurenceActionName() : ""; } @Override public void performCopy(@NotNull DataContext dataContext) { final Node selectedNode = getSelectedNode(); assert selectedNode != null; final String plainText = selectedNode.getText(UsageViewImpl.this); CopyPasteManager.getInstance().setContents(new StringSelection(plainText.trim())); } @Override public boolean isCopyEnabled(@NotNull DataContext dataContext) { return getSelectedNode() != null; } @Override public boolean isCopyVisible(@NotNull DataContext dataContext) { return true; } @Override public void calcData(final DataKey key, final DataSink sink) { Node node = getSelectedNode(); if (key == CommonDataKeys.PROJECT) { sink.put(CommonDataKeys.PROJECT, myProject); } else if (key == USAGE_VIEW_KEY) { sink.put(USAGE_VIEW_KEY, UsageViewImpl.this); } else if (key == CommonDataKeys.NAVIGATABLE_ARRAY) { sink.put(CommonDataKeys.NAVIGATABLE_ARRAY, getNavigatablesForNodes(getSelectedNodes())); } else if (key == PlatformDataKeys.EXPORTER_TO_TEXT_FILE) { sink.put(PlatformDataKeys.EXPORTER_TO_TEXT_FILE, myTextFileExporter); } else if (key == USAGES_KEY) { final Set<Usage> selectedUsages = getSelectedUsages(); sink.put(USAGES_KEY, selectedUsages != null ? selectedUsages.toArray(new Usage[selectedUsages.size()]) : null); } else if (key == USAGE_TARGETS_KEY) { sink.put(USAGE_TARGETS_KEY, getSelectedUsageTargets()); } else if (key == CommonDataKeys.VIRTUAL_FILE_ARRAY) { final Set<Usage> usages = getSelectedUsages(); Usage[] ua = usages != null ? usages.toArray(new Usage[usages.size()]) : null; VirtualFile[] data = UsageDataUtil.provideVirtualFileArray(ua, getSelectedUsageTargets()); sink.put(CommonDataKeys.VIRTUAL_FILE_ARRAY, data); } else if (key == PlatformDataKeys.HELP_ID) { sink.put(PlatformDataKeys.HELP_ID, HELP_ID); } else if (key == PlatformDataKeys.COPY_PROVIDER) { sink.put(PlatformDataKeys.COPY_PROVIDER, this); } else if (node != null) { Object userObject = node.getUserObject(); if (userObject instanceof TypeSafeDataProvider) { ((TypeSafeDataProvider)userObject).calcData(key, sink); } else if (userObject instanceof DataProvider) { DataProvider dataProvider = (DataProvider)userObject; Object data = dataProvider.getData(key.getName()); if (data != null) { sink.put(key, data); } } } } } private static class MyAutoScrollToSourceOptionProvider implements AutoScrollToSourceOptionProvider { @Override public boolean isAutoScrollMode() { return UsageViewSettings.getInstance().IS_AUTOSCROLL_TO_SOURCE; } @Override public void setAutoScrollMode(boolean state) { UsageViewSettings.getInstance().IS_AUTOSCROLL_TO_SOURCE = state; } } private final class ButtonPanel extends JPanel { private ButtonPanel() { setLayout(new FlowLayout(FlowLayout.LEFT, 8, 0)); } private void addButtonRunnable(int index, final Runnable runnable, String text) { if (getBorder() == null) setBorder(IdeBorderFactory.createBorder(SideBorder.TOP)); final JButton button = new JButton(UIUtil.replaceMnemonicAmpersand(text)); DialogUtil.registerMnemonic(button); button.setFocusable(false); button.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { runnable.run(); } }); add(button, index); invalidate(); if (getParent() != null) { getParent().validate(); } } void update() { for (int i = 0; i < getComponentCount(); ++i) { Component component = getComponent(i); if (component instanceof JButton) { final JButton button = (JButton)component; button.setEnabled(!isSearchInProgress()); } } } } private class UsageState { private final Usage myUsage; private final boolean mySelected; private UsageState(@NotNull Usage usage, boolean isSelected) { myUsage = usage; mySelected = isSelected; } public void restore() { final UsageNode node = myUsageNodes.get(myUsage); if (node == NULL_NODE || node == null) { return; } final DefaultMutableTreeNode parentGroupingNode = (DefaultMutableTreeNode)node.getParent(); if (parentGroupingNode != null) { final TreePath treePath = new TreePath(parentGroupingNode.getPath()); myTree.expandPath(treePath); if (mySelected) { myTree.addSelectionPath(treePath.pathByAddingChild(node)); } } } } private class MyPerformOperationRunnable implements Runnable { private final String myCannotMakeString; private final Runnable myProcessRunnable; private final String myCommandName; private final boolean myCheckReadOnlyStatus; private MyPerformOperationRunnable(final String cannotMakeString, final Runnable processRunnable, final String commandName, boolean checkReadOnlyStatus) { myCannotMakeString = cannotMakeString; myProcessRunnable = processRunnable; myCommandName = commandName; myCheckReadOnlyStatus = checkReadOnlyStatus; } @Override public void run() { if (myCheckReadOnlyStatus && !checkReadonlyUsages()) return; PsiDocumentManager.getInstance(myProject).commitAllDocuments(); if (myCannotMakeString != null && myChangesDetected) { String title = UsageViewBundle.message("changes.detected.error.title"); if (canPerformReRun()) { String[] options = {UsageViewBundle.message("action.description.rerun"), UsageViewBundle.message("usage.view.cancel.button")}; String message = myCannotMakeString + "\n\n" + UsageViewBundle.message("dialog.rerun.search"); int answer = Messages.showOkCancelDialog(myProject, message, title, options[0], options[1], Messages.getErrorIcon()); if (answer == Messages.OK) { refreshUsages(); } } else { Messages.showMessageDialog(myProject, myCannotMakeString, title, Messages.getErrorIcon()); //todo[myakovlev] request focus to tree //myUsageView.getTree().requestFocus(); } return; } close(); CommandProcessor.getInstance().executeCommand( myProject, new Runnable() { @Override public void run() { myProcessRunnable.run(); } }, myCommandName, null ); } } private List<UsageInfo> getSelectedUsageInfos() { return USAGE_INFO_LIST_KEY.getData(DataManager.getInstance().getDataContext(myRootPanel)); } public GroupNode getRoot() { return myRoot; } public boolean isVisible(@NotNull Usage usage) { return myBuilder != null && myBuilder.isVisible(usage); } @NotNull public UsageTarget[] getTargets() { return myTargets; } }
/* * Copyright 2007 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG; import com.intellij.codeHighlighting.Pass; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInspection.*; import com.intellij.codeInspection.htmlInspections.RequiredAttributesInspection; import com.intellij.javaee.ExternalResourceManagerEx; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.io.FileUtil; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.testFramework.ExpectedHighlightingData; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.UsefulTestCase; import com.intellij.testFramework.builders.EmptyModuleFixtureBuilder; import com.intellij.testFramework.fixtures.CodeInsightTestFixture; import com.intellij.testFramework.fixtures.IdeaProjectTestFixture; import com.intellij.testFramework.fixtures.IdeaTestFixtureFactory; import com.intellij.testFramework.fixtures.TestFixtureBuilder; import com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl; import com.intellij.util.ArrayUtil; import org.intellij.plugins.relaxNG.inspections.RngDomInspection; import org.intellij.plugins.testUtil.IdeaCodeInsightTestCase; import org.intellij.plugins.testUtil.ResourceUtil; import org.jetbrains.annotations.NotNull; import java.util.Collection; import java.util.Collections; public abstract class HighlightingTestBase extends UsefulTestCase implements IdeaCodeInsightTestCase { protected CodeInsightTestFixture myTestFixture; @Override protected void setUp() throws Exception { super.setUp(); final IdeaTestFixtureFactory factory = IdeaTestFixtureFactory.getFixtureFactory(); myTestFixture = createFixture(factory); myTestFixture.setTestDataPath(getTestDataBasePath() + getTestDataPath()); Class<? extends LocalInspectionTool>[] inspectionClasses = new DefaultInspectionProvider().getInspectionClasses(); if (getName().contains("Inspection")) { inspectionClasses = ArrayUtil.mergeArrays(inspectionClasses, ApplicationLoader.getInspectionClasses()); } myTestFixture.setUp(); myTestFixture.enableInspections(inspectionClasses); WriteAction.runAndWait(() -> { ResourceUtil.copyFiles(HighlightingTestBase.this); init(); }); } protected static String toAbsolutePath(String relativeTestDataPath) { return FileUtil.toSystemDependentName(getTestDataBasePath() + relativeTestDataPath); } public static String getTestDataBasePath() { return PlatformTestUtil.getCommunityPath() + "/xml/relaxng/testData/"; } protected CodeInsightTestFixture createFixture(@NotNull IdeaTestFixtureFactory factory) { final TestFixtureBuilder<IdeaProjectTestFixture> builder = factory.createLightFixtureBuilder(); final IdeaProjectTestFixture fixture = builder.getFixture(); return factory.createCodeInsightFixture(fixture); } protected CodeInsightTestFixture createContentFixture(IdeaTestFixtureFactory factory) { final TestFixtureBuilder<IdeaProjectTestFixture> builder = factory.createFixtureBuilder(getName()); final EmptyModuleFixtureBuilder moduleBuilder = builder.addModule(EmptyModuleFixtureBuilder.class); final IdeaProjectTestFixture fixture = builder.getFixture(); final CodeInsightTestFixture testFixture = factory.createCodeInsightFixture(fixture); final String root = testFixture.getTempDirPath(); moduleBuilder.addContentRoot(root); moduleBuilder.addSourceRoot("/"); return testFixture; } @Override public CodeInsightTestFixture getFixture() { return myTestFixture; } @Override public abstract String getTestDataPath(); protected void init() { ExternalResourceManagerEx.getInstanceEx().addIgnoredResources(Collections.singletonList("urn:test:undefined"), getTestRootDisposable()); } @Override protected void tearDown() throws Exception { try { myTestFixture.tearDown(); } catch (Throwable e) { addSuppressedException(e); } finally { myTestFixture = null; super.tearDown(); } } protected void doHighlightingTest(String s) { doCustomHighlighting(s, true, false); // myTestFixture.testHighlighting(true, false, true, s); } protected void doExternalToolHighlighting(String name) { doCustomHighlighting(name, true, true); } protected void doCustomHighlighting(String name, final boolean checkWeakWarnings, final Boolean includeExternalToolPass) { myTestFixture.configureByFile(name); doCustomHighlighting(checkWeakWarnings, includeExternalToolPass); } protected void doCustomHighlighting(boolean checkWeakWarnings, Boolean includeExternalToolPass) { final PsiFile file = myTestFixture.getFile(); final Document doc = myTestFixture.getEditor().getDocument(); ExpectedHighlightingData data = new ExpectedHighlightingData(doc, true, checkWeakWarnings, false, file); data.init(); PsiDocumentManager.getInstance(myTestFixture.getProject()).commitAllDocuments(); Collection<HighlightInfo> highlights1 = doHighlighting(includeExternalToolPass); data.checkResult(highlights1, doc.getText()); } @NotNull protected Collection<HighlightInfo> doHighlighting(final Boolean externalToolPass) { final Project project = myTestFixture.getProject(); PsiDocumentManager.getInstance(project).commitAllDocuments(); final Editor editor = myTestFixture.getEditor(); int[] ignore = externalToolPass == null || externalToolPass ? new int[]{ Pass.LINE_MARKERS, Pass.LOCAL_INSPECTIONS, Pass.POPUP_HINTS, Pass.UPDATE_ALL, Pass.UPDATE_FOLDING, } : new int[]{Pass.EXTERNAL_TOOLS}; return CodeInsightTestFixtureImpl.instantiateAndRun(myTestFixture.getFile(), editor, ignore, false); } protected void doTestCompletion(String name, String ext) { myTestFixture.testCompletion(name + "." + ext, name + "_after." + ext); } protected void doTestCompletion(String before, String... variants) { myTestFixture.testCompletionVariants(before, variants); } protected void doTestCompletion(String before) { doTestCompletion(before, "xml"); } protected void doTestRename(String name, String ext, String newName) { myTestFixture.testRename(name + "." + ext, name + "_after." + ext, newName); } @SuppressWarnings({ "deprecation"}) protected void doTestQuickFix(String file, String ext) { final PsiReference psiReference = myTestFixture.getReferenceAtCaretPositionWithAssertion(file + "." + ext); assertNull("Reference", psiReference.resolve()); assertTrue(psiReference.getClass().getName() + " is not a QuickFixProvider", psiReference instanceof LocalQuickFixProvider); final LocalQuickFix[] fixes = ((LocalQuickFixProvider)psiReference).getQuickFixes(); assertTrue("One action expected", fixes != null && fixes.length == 1); final Project project = myTestFixture.getProject(); final ProblemDescriptor problemDescriptor = InspectionManager.getInstance(project).createProblemDescriptor(psiReference.getElement(), "foo", fixes, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, true); WriteCommandAction.writeCommandAction(project, myTestFixture.getFile()).run(() -> fixes[0].applyFix(project, problemDescriptor)); myTestFixture.checkResultByFile(file + "_after." + ext); } private static class DefaultInspectionProvider implements InspectionToolProvider { @NotNull @Override public Class[] getInspectionClasses() { return new Class[]{ RngDomInspection.class, RequiredAttributesInspection.class }; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.testing.mock.sling; import static org.apache.sling.api.adapter.AdapterFactory.ADAPTABLE_CLASSES; import static org.apache.sling.api.adapter.AdapterFactory.ADAPTER_CLASSES; import java.util.ArrayList; import java.util.Dictionary; import java.util.HashMap; import java.util.Hashtable; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.ReferencePolicy; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.SlingConstants; import org.apache.sling.api.adapter.AdapterFactory; import org.apache.sling.api.adapter.AdapterManager; import org.apache.sling.commons.osgi.PropertiesUtil; import org.osgi.framework.ServiceReference; import org.osgi.service.component.ComponentContext; import org.osgi.service.event.Event; import org.osgi.service.event.EventAdmin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a copy of org.apache.sling.adapter.internal.AdpaterManagerImpl from Sling Adapter 2.1.0, * with all calls to SyntheticResource.setAdapterManager/unsetAdapterManager disabled, because this would * break the {@link ThreadsafeMockAdapterManagerWrapper} concept. */ @Component(immediate=true) @Service @Reference(name="AdapterFactory", referenceInterface=AdapterFactory.class, cardinality=ReferenceCardinality.OPTIONAL_MULTIPLE, policy=ReferencePolicy.DYNAMIC) public class MockAdapterManagerImpl implements AdapterManager { private final Logger log = LoggerFactory.getLogger(getClass()); /** * The OSGi <code>ComponentContext</code> to retrieve * {@link AdapterFactory} service instances. */ private volatile ComponentContext context; /** * A list of {@link AdapterFactory} services bound to this manager before * the manager has been activated. These bound services will be accessed as * soon as the manager is being activated. */ private final List<ServiceReference> boundAdapterFactories = new LinkedList<ServiceReference>(); /** * A map of {@link AdapterFactoryDescriptorMap} instances. The map is * indexed by the fully qualified class names listed in the * {@link AdapterFactory#ADAPTABLE_CLASSES} property of the * {@link AdapterFactory} services. * * @see AdapterFactoryDescriptorMap */ private final Map<String, AdapterFactoryDescriptorMap> descriptors = new HashMap<String, AdapterFactoryDescriptorMap>(); /** * Matrix of {@link AdapterFactoryDescriptor} instances primarily indexed by the fully * qualified name of the class to be adapted and secondarily indexed by the * fully qualified name of the class to adapt to (the target class). * <p> * This cache is built on demand by calling the * {@link #getAdapterFactories(Class)} method. It is cleared * whenever an adapter factory is registered on unregistered. */ private final ConcurrentMap<String, Map<String, List<AdapterFactoryDescriptor>>> factoryCache = new ConcurrentHashMap<String, Map<String, List<AdapterFactoryDescriptor>>>(); /** * The service tracker for the event admin */ @Reference(cardinality=ReferenceCardinality.OPTIONAL_UNARY, policy=ReferencePolicy.DYNAMIC) private volatile EventAdmin eventAdmin; // ---------- AdapterManager interface ------------------------------------- /** * Returns the adapted <code>adaptable</code> or <code>null</code> if * the object cannot be adapted. * * @see org.apache.sling.api.adapter.AdapterManager#getAdapter(java.lang.Object, java.lang.Class) */ public <AdapterType> AdapterType getAdapter(final Object adaptable, final Class<AdapterType> type) { // get the adapter factories for the type of adaptable object final Map<String, List<AdapterFactoryDescriptor>> factories = getAdapterFactories(adaptable.getClass()); // get the factory for the target type final List<AdapterFactoryDescriptor> descList = factories.get(type.getName()); if (descList != null && descList.size() > 0) { for (AdapterFactoryDescriptor desc : descList) { final AdapterFactory factory = desc == null ? null : desc.getFactory(); // have the factory adapt the adaptable if the factory exists if (factory != null) { log.debug("Trying adapter factory {} to map {} to {}", new Object [] { factory, adaptable, type }); AdapterType adaptedObject = factory.getAdapter(adaptable, type); if (adaptedObject != null) { log.debug("Using adapter factory {} to map {} to {}", new Object [] { factory, adaptable, type }); return adaptedObject; } } } } // no factory has been found, so we cannot adapt log.debug("No adapter factory found to map {} to {}", adaptable, type); return null; } // ----------- SCR integration --------------------------------------------- /** * Activate the manager. * Bind all already registered factories * @param context Component context */ protected void activate(final ComponentContext context) { this.context = context; // register all adapter factories bound before activation final List<ServiceReference> refs; synchronized ( this.boundAdapterFactories ) { refs = new ArrayList<ServiceReference>(this.boundAdapterFactories); boundAdapterFactories.clear(); } for (final ServiceReference reference : refs) { registerAdapterFactory(context, reference); } // final "enable" this manager by setting the instance // DISABLED IN THIS COPY OF CLASS //SyntheticResource.setAdapterManager(this); } /** * Deactivate * @param context Not used */ protected void deactivate(final ComponentContext context) { // DISABLED IN THIS COPY OF CLASS //SyntheticResource.unsetAdapterManager(this); this.context = null; } /** * Bind a new adapter factory. * @param reference Service reference */ protected void bindAdapterFactory(final ServiceReference reference) { boolean create = true; if (context == null) { synchronized ( this.boundAdapterFactories ) { if (context == null) { boundAdapterFactories.add(reference); create = false; } } } if ( create ) { registerAdapterFactory(context, reference); } } /** * Unbind an adapter factory. * @param reference Service reference */ protected void unbindAdapterFactory(final ServiceReference reference) { unregisterAdapterFactory(reference); } // ---------- unit testing stuff only -------------------------------------- /** * Returns the active adapter factories of this manager. * <p> * <strong><em>THIS METHOD IS FOR UNIT TESTING ONLY. IT MAY BE REMOVED OR * MODIFIED WITHOUT NOTICE.</em></strong> */ Map<String, AdapterFactoryDescriptorMap> getFactories() { return descriptors; } /** * Returns the current adapter factory cache. * <p> * <strong><em>THIS METHOD IS FOR UNIT TESTING ONLY. IT MAY BE REMOVED OR * MODIFIED WITHOUT NOTICE.</em></strong> */ Map<String, Map<String, List<AdapterFactoryDescriptor>>> getFactoryCache() { return factoryCache; } /** * Unregisters the {@link AdapterFactory} referred to by the service * <code>reference</code> from the registry. */ private void registerAdapterFactory(final ComponentContext context, final ServiceReference reference) { final String[] adaptables = PropertiesUtil.toStringArray(reference.getProperty(ADAPTABLE_CLASSES)); final String[] adapters = PropertiesUtil.toStringArray(reference.getProperty(ADAPTER_CLASSES)); if (adaptables == null || adaptables.length == 0 || adapters == null || adapters.length == 0) { return; } final AdapterFactoryDescriptor factoryDesc = new AdapterFactoryDescriptor(context, reference, adapters); for (final String adaptable : adaptables) { AdapterFactoryDescriptorMap adfMap = null; synchronized ( this.descriptors ) { adfMap = descriptors.get(adaptable); if (adfMap == null) { adfMap = new AdapterFactoryDescriptorMap(); descriptors.put(adaptable, adfMap); } } synchronized ( adfMap ) { adfMap.put(reference, factoryDesc); } } // clear the factory cache to force rebuild on next access this.factoryCache.clear(); // send event final EventAdmin localEA = this.eventAdmin; if ( localEA != null ) { final Dictionary<String, Object> props = new Hashtable<String, Object>(); props.put(SlingConstants.PROPERTY_ADAPTABLE_CLASSES, adaptables); props.put(SlingConstants.PROPERTY_ADAPTER_CLASSES, adapters); localEA.postEvent(new Event(SlingConstants.TOPIC_ADAPTER_FACTORY_ADDED, props)); } } /** * Unregisters the {@link AdapterFactory} referred to by the service * <code>reference</code> from the registry. */ private void unregisterAdapterFactory(final ServiceReference reference) { synchronized ( this.boundAdapterFactories ) { boundAdapterFactories.remove(reference); } final String[] adaptables = PropertiesUtil.toStringArray(reference.getProperty(ADAPTABLE_CLASSES)); final String[] adapters = PropertiesUtil.toStringArray(reference.getProperty(ADAPTER_CLASSES)); if (adaptables == null || adaptables.length == 0 || adapters == null || adapters.length == 0) { return; } boolean factoriesModified = false; AdapterFactoryDescriptorMap adfMap = null; for (final String adaptable : adaptables) { synchronized ( this.descriptors ) { adfMap = this.descriptors.get(adaptable); } if (adfMap != null) { synchronized ( adfMap ) { factoriesModified |= (adfMap.remove(reference) != null); } } } // only remove cache if some adapter factories have actually been // removed if (factoriesModified) { this.factoryCache.clear(); } // send event final EventAdmin localEA = this.eventAdmin; if ( localEA != null ) { final Dictionary<String, Object> props = new Hashtable<String, Object>(); props.put(SlingConstants.PROPERTY_ADAPTABLE_CLASSES, adaptables); props.put(SlingConstants.PROPERTY_ADAPTER_CLASSES, adapters); localEA.postEvent(new Event(SlingConstants.TOPIC_ADAPTER_FACTORY_REMOVED, props)); } } /** * Returns the map of adapter factories index by adapter (target) class name * for the given adaptable <code>clazz</code>. If no adapter exists for * the <code>clazz</code> and empty map is returned. * * @param clazz The adaptable <code>Class</code> for which to return the * adapter factory map by target class name. * @return The map of adapter factories by target class name. The map may be * empty if there is no adapter factory for the adaptable * <code>clazz</code>. */ private Map<String, List<AdapterFactoryDescriptor>> getAdapterFactories(final Class<?> clazz) { final String className = clazz.getName(); Map<String, List<AdapterFactoryDescriptor>> entry = this.factoryCache.get(className); if (entry == null) { // create entry entry = createAdapterFactoryMap(clazz); this.factoryCache.put(className, entry); } return entry; } /** * Creates a new target adapter factory map for the given <code>clazz</code>. * First all factories defined to support the adaptable class by * registration are taken. Next all factories for the implemented interfaces * and finally all base class factories are copied. * * @param clazz The adaptable <code>Class</code> for which to build the * adapter factory map by target class name. * @return The map of adapter factories by target class name. The map may be * empty if there is no adapter factory for the adaptable * <code>clazz</code>. */ private Map<String, List<AdapterFactoryDescriptor>> createAdapterFactoryMap(final Class<?> clazz) { final Map<String, List<AdapterFactoryDescriptor>> afm = new HashMap<String, List<AdapterFactoryDescriptor>>(); // AdapterFactories for this class AdapterFactoryDescriptorMap afdMap = null; synchronized ( this.descriptors ) { afdMap = this.descriptors.get(clazz.getName()); } if (afdMap != null) { final List<AdapterFactoryDescriptor> afdSet; synchronized ( afdMap ) { afdSet = new ArrayList<AdapterFactoryDescriptor>(afdMap.values()); } for (final AdapterFactoryDescriptor afd : afdSet) { final String[] adapters = afd.getAdapters(); for (final String adapter : adapters) { // to handle service ranking, we add to the end of the list or create a new list List<AdapterFactoryDescriptor> factoryDescriptors = afm.get(adapter); if (factoryDescriptors == null) { factoryDescriptors = new ArrayList<AdapterFactoryDescriptor>(); afm.put(adapter, factoryDescriptors); } factoryDescriptors.add(afd); } } } // AdapterFactories for the interfaces final Class<?>[] interfaces = clazz.getInterfaces(); for (final Class<?> iFace : interfaces) { copyAdapterFactories(afm, iFace); } // AdapterFactories for the super class final Class<?> superClazz = clazz.getSuperclass(); if (superClazz != null) { copyAdapterFactories(afm, superClazz); } return afm; } /** * Copies all adapter factories for the given <code>clazz</code> from the * <code>cache</code> to the <code>dest</code> map except for those * factories whose target class already exists in the <code>dest</code> * map. * * @param dest The map of target class name to adapter factory into which * additional factories are copied. Existing factories are not * replaced. * @param clazz The adaptable class whose adapter factories are considered * for adding into <code>dest</code>. */ private void copyAdapterFactories(final Map<String, List<AdapterFactoryDescriptor>> dest, final Class<?> clazz) { // get the adapter factories for the adaptable clazz final Map<String, List<AdapterFactoryDescriptor>> scMap = getAdapterFactories(clazz); // for each target class copy the entry to dest and put it in the list or create the list for (Map.Entry<String, List<AdapterFactoryDescriptor>> entry : scMap.entrySet()) { List<AdapterFactoryDescriptor> factoryDescriptors = dest.get(entry.getKey()); if (factoryDescriptors == null) { factoryDescriptors = new ArrayList<AdapterFactoryDescriptor>(); dest.put(entry.getKey(), factoryDescriptors); } for (AdapterFactoryDescriptor descriptor : entry.getValue()) { factoryDescriptors.add(descriptor); } } } /** * The <code>AdapterFactoryDescriptor</code> is an entry in the * {@link AdapterFactoryDescriptorMap} conveying the list of adapter (target) * types and the respective {@link AdapterFactory}. */ private static class AdapterFactoryDescriptor { private volatile AdapterFactory factory; private final String[] adapters; private final ServiceReference reference; private final ComponentContext context; public AdapterFactoryDescriptor( final ComponentContext context, final ServiceReference reference, final String[] adapters) { this.reference = reference; this.context = context; this.adapters = adapters; } public AdapterFactory getFactory() { if ( factory == null ) { factory = (AdapterFactory) context.locateService( "AdapterFactory", reference); } return factory; } public String[] getAdapters() { return adapters; } } /** * The <code>AdapterFactoryDescriptorMap</code> is a sorted map of * {@link AdapterFactoryDescriptor} instances indexed (and ordered) by their * {@link ServiceReference}. This map is used to organize the * registered {@link org.apache.sling.api.adapter.AdapterFactory} services for * a given adaptable type. * <p> * Each entry in the map is a {@link AdapterFactoryDescriptor} thus enabling the * registration of multiple factories for the same (adaptable, adapter) type * tuple. Of course only the first entry (this is the reason for having a sorted * map) for such a given tuple is actually being used. If that first instance is * removed the eventual second instance may actually be used instead. */ private static class AdapterFactoryDescriptorMap extends TreeMap<ServiceReference, AdapterFactoryDescriptor> { private static final long serialVersionUID = 2L; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.memory; import com.facebook.presto.ExceededCpuLimitException; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.QueryExecution; import com.facebook.presto.execution.QueryId; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.execution.QueryManagerConfig; import com.facebook.presto.server.ServerConfig; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.NodeManager; import com.facebook.presto.spi.PrestoException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.http.client.HttpClient; import io.airlift.json.JsonCodec; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.weakref.jmx.JmxException; import org.weakref.jmx.MBeanExporter; import org.weakref.jmx.Managed; import org.weakref.jmx.ObjectNames; import javax.annotation.PreDestroy; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import static com.facebook.presto.ExceededMemoryLimitException.exceededGlobalLimit; import static com.facebook.presto.SystemSessionProperties.RESOURCE_OVERCOMMIT; import static com.facebook.presto.SystemSessionProperties.getQueryMaxCpuTime; import static com.facebook.presto.SystemSessionProperties.getQueryMaxMemory; import static com.facebook.presto.SystemSessionProperties.resourceOvercommit; import static com.facebook.presto.memory.LocalMemoryManager.GENERAL_POOL; import static com.facebook.presto.memory.LocalMemoryManager.RESERVED_POOL; import static com.facebook.presto.spi.NodeState.ACTIVE; import static com.facebook.presto.spi.NodeState.SHUTTING_DOWN; import static com.facebook.presto.spi.StandardErrorCode.CLUSTER_OUT_OF_MEMORY; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet; import static com.google.common.collect.Sets.difference; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.DataSize.succinctDataSize; import static io.airlift.units.Duration.nanosSince; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class ClusterMemoryManager { private static final Logger log = Logger.get(ClusterMemoryManager.class); private final NodeManager nodeManager; private final LocationFactory locationFactory; private final HttpClient httpClient; private final MBeanExporter exporter; private final JsonCodec<MemoryInfo> memoryInfoCodec; private final JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec; private final DataSize maxQueryMemory; private final Duration maxQueryCpuTime; private final boolean enabled; private final boolean killOnOutOfMemory; private final Duration killOnOutOfMemoryDelay; private final String coordinatorId; private final AtomicLong memoryPoolAssignmentsVersion = new AtomicLong(); private final AtomicLong clusterMemoryUsageBytes = new AtomicLong(); private final AtomicLong clusterMemoryBytes = new AtomicLong(); private final AtomicLong queriesKilledDueToOutOfMemory = new AtomicLong(); private final Map<String, RemoteNodeMemory> nodes = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, ClusterMemoryPool> pools = new HashMap<>(); @GuardedBy("this") private long lastTimeNotOutOfMemory = System.nanoTime(); @GuardedBy("this") private QueryId lastKilledQuery; @Inject public ClusterMemoryManager( @ForMemoryManager HttpClient httpClient, NodeManager nodeManager, LocationFactory locationFactory, MBeanExporter exporter, JsonCodec<MemoryInfo> memoryInfoCodec, JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec, QueryIdGenerator queryIdGenerator, ServerConfig serverConfig, MemoryManagerConfig config, QueryManagerConfig queryManagerConfig) { requireNonNull(config, "config is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); this.exporter = requireNonNull(exporter, "exporter is null"); this.memoryInfoCodec = requireNonNull(memoryInfoCodec, "memoryInfoCodec is null"); this.assignmentsRequestJsonCodec = requireNonNull(assignmentsRequestJsonCodec, "assignmentsRequestJsonCodec is null"); this.maxQueryMemory = config.getMaxQueryMemory(); this.maxQueryCpuTime = queryManagerConfig.getQueryMaxCpuTime(); this.coordinatorId = queryIdGenerator.getCoordinatorId(); this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); this.killOnOutOfMemory = config.isKillOnOutOfMemory(); } public synchronized void process(Iterable<QueryExecution> queries) { if (!enabled) { return; } boolean outOfMemory = isClusterOutOfMemory(); if (!outOfMemory) { lastTimeNotOutOfMemory = System.nanoTime(); } boolean queryKilled = false; long totalBytes = 0; for (QueryExecution query : queries) { long bytes = query.getTotalMemoryReservation(); DataSize sessionMaxQueryMemory = getQueryMaxMemory(query.getSession()); long queryMemoryLimit = Math.min(maxQueryMemory.toBytes(), sessionMaxQueryMemory.toBytes()); totalBytes += bytes; if (bytes > queryMemoryLimit) { if (resourceOvercommit(query.getSession())) { // If a query has requested resource overcommit, only kill it if the cluster has run out of memory if (outOfMemory) { DataSize memory = succinctDataSize(bytes, BYTE); query.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, format("The cluster is out of memory, you set %s=true, and your query is using %s of memory, so it was killed.", RESOURCE_OVERCOMMIT, memory))); queryKilled = true; } } else { DataSize maxMemory = succinctDataSize(queryMemoryLimit, BYTE); query.fail(exceededGlobalLimit(maxMemory)); queryKilled = true; } } } clusterMemoryUsageBytes.set(totalBytes); if (killOnOutOfMemory) { boolean shouldKillQuery = nanosSince(lastTimeNotOutOfMemory).compareTo(killOnOutOfMemoryDelay) > 0 && outOfMemory; boolean lastKilledQueryIsGone = (lastKilledQuery == null); if (!lastKilledQueryIsGone) { ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); if (generalPool != null) { lastKilledQueryIsGone = generalPool.getQueryMemoryReservations().containsKey(lastKilledQuery); } } if (shouldKillQuery && lastKilledQueryIsGone && !queryKilled) { // Kill the biggest query in the general pool QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution query : queries) { long bytesUsed = query.getTotalMemoryReservation(); if (bytesUsed > maxMemory && query.getMemoryPool().getId().equals(GENERAL_POOL)) { biggestQuery = query; maxMemory = bytesUsed; } } if (biggestQuery != null) { biggestQuery.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, "The cluster is out of memory, and your query was killed. Please try again in a few minutes.")); queriesKilledDueToOutOfMemory.incrementAndGet(); lastKilledQuery = biggestQuery.getQueryId(); } } } Map<MemoryPoolId, Integer> countByPool = new HashMap<>(); for (QueryExecution query : queries) { MemoryPoolId id = query.getMemoryPool().getId(); countByPool.put(id, countByPool.getOrDefault(id, 0) + 1); } updatePools(countByPool); updateNodes(updateAssignments(queries)); // check if CPU usage is over limit for (QueryExecution query : queries) { Duration cpuTime = query.getTotalCpuTime(); Duration sessionLimit = getQueryMaxCpuTime(query.getSession()); Duration limit = maxQueryCpuTime.compareTo(sessionLimit) < 0 ? maxQueryCpuTime : sessionLimit; if (cpuTime.compareTo(limit) > 0) { query.fail(new ExceededCpuLimitException(limit)); } } } @VisibleForTesting synchronized Map<MemoryPoolId, ClusterMemoryPool> getPools() { return ImmutableMap.copyOf(pools); } private boolean isClusterOutOfMemory() { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); return reservedPool != null && generalPool != null && reservedPool.getAssignedQueries() > 0 && generalPool.getBlockedNodes() > 0; } private MemoryPoolAssignmentsRequest updateAssignments(Iterable<QueryExecution> queries) { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); long version = memoryPoolAssignmentsVersion.incrementAndGet(); // Check that all previous assignments have propagated to the visible nodes. This doesn't account for temporary network issues, // and is more of a safety check than a guarantee if (reservedPool != null && generalPool != null && allAssignmentsHavePropagated(queries)) { if (reservedPool.getAssignedQueries() == 0 && generalPool.getBlockedNodes() > 0) { QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution queryExecution : queries) { if (resourceOvercommit(queryExecution.getSession())) { // Don't promote queries that requested resource overcommit to the reserved pool, // since their memory usage is unbounded. continue; } long bytesUsed = queryExecution.getTotalMemoryReservation(); if (bytesUsed > maxMemory) { biggestQuery = queryExecution; maxMemory = bytesUsed; } } if (biggestQuery != null) { biggestQuery.setMemoryPool(new VersionedMemoryPoolId(RESERVED_POOL, version)); } } } ImmutableList.Builder<MemoryPoolAssignment> assignments = ImmutableList.builder(); for (QueryExecution queryExecution : queries) { assignments.add(new MemoryPoolAssignment(queryExecution.getQueryId(), queryExecution.getMemoryPool().getId())); } return new MemoryPoolAssignmentsRequest(coordinatorId, version, assignments.build()); } private boolean allAssignmentsHavePropagated(Iterable<QueryExecution> queries) { if (nodes.isEmpty()) { // Assignments can't have propagated, if there are no visible nodes. return false; } long newestAssignment = ImmutableList.copyOf(queries).stream() .map(QueryExecution::getMemoryPool) .mapToLong(VersionedMemoryPoolId::getVersion) .min() .orElse(-1); long mostOutOfDateNode = nodes.values().stream() .mapToLong(RemoteNodeMemory::getCurrentAssignmentVersion) .min() .orElse(Long.MAX_VALUE); return newestAssignment <= mostOutOfDateNode; } private void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder builder = new ImmutableSet.Builder(); Set<Node> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream() .map(Node::getNodeIdentifier) .collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (Node node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put(node.getNodeIdentifier(), new RemoteNodeMemory(httpClient, memoryInfoCodec, assignmentsRequestJsonCodec, locationFactory.createMemoryInfoLocation(node))); } } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } } private synchronized void updatePools(Map<MemoryPoolId, Integer> queryCounts) { // Update view of cluster memory and pools List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); long totalClusterMemory = nodeMemoryInfos.stream() .map(MemoryInfo::getTotalNodeMemory) .mapToLong(DataSize::toBytes) .sum(); clusterMemoryBytes.set(totalClusterMemory); Set<MemoryPoolId> activePoolIds = nodeMemoryInfos.stream() .flatMap(info -> info.getPools().keySet().stream()) .collect(toImmutableSet()); // Make a copy to materialize the set difference Set<MemoryPoolId> removedPools = ImmutableSet.copyOf(difference(pools.keySet(), activePoolIds)); for (MemoryPoolId removed : removedPools) { unexport(pools.get(removed)); pools.remove(removed); } for (MemoryPoolId id : activePoolIds) { ClusterMemoryPool pool = pools.computeIfAbsent(id, poolId -> { ClusterMemoryPool newPool = new ClusterMemoryPool(poolId); String objectName = ObjectNames.builder(ClusterMemoryPool.class, newPool.getId().toString()).build(); try { exporter.export(objectName, newPool); } catch (JmxException e) { log.error(e, "Error exporting memory pool %s", poolId); } return newPool; }); pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); } } @PreDestroy public synchronized void destroy() { for (ClusterMemoryPool pool : pools.values()) { unexport(pool); } pools.clear(); } private void unexport(ClusterMemoryPool pool) { try { String objectName = ObjectNames.builder(ClusterMemoryPool.class, pool.getId().toString()).build(); exporter.unexport(objectName); } catch (JmxException e) { log.error(e, "Failed to unexport pool %s", pool.getId()); } } @Managed public long getClusterMemoryUsageBytes() { return clusterMemoryUsageBytes.get(); } @Managed public long getClusterMemoryBytes() { return clusterMemoryBytes.get(); } @Managed public long getQueriesKilledDueToOutOfMemory() { return queriesKilledDueToOutOfMemory.get(); } }
/* * Copyright 2011-2019 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.snomed.datastore.taxonomy; import java.io.IOException; import java.util.Map; import org.eclipse.emf.cdo.common.id.CDOID; import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDelta; import org.eclipse.emf.cdo.common.revision.delta.CDORevisionDelta; import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta; import org.eclipse.emf.cdo.transaction.CDOTransaction; import org.eclipse.emf.ecore.EObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.b2international.commons.CompareUtils; import com.b2international.index.revision.RevisionSearcher; import com.b2international.snowowl.core.api.SnowowlRuntimeException; import com.b2international.snowowl.datastore.CDOCommitChangeSet; import com.b2international.snowowl.datastore.ICDOCommitChangeSet; import com.b2international.snowowl.datastore.cdo.CDOIDUtils; import com.b2international.snowowl.snomed.Concept; import com.b2international.snowowl.snomed.Relationship; import com.b2international.snowowl.snomed.SnomedConstants.Concepts; import com.b2international.snowowl.snomed.SnomedPackage; import com.b2international.snowowl.snomed.datastore.index.entry.SnomedConceptDocument; import com.b2international.snowowl.snomed.datastore.index.entry.SnomedOWLRelationshipDocument; import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRefSetMemberIndexEntry; import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRelationshipIndexEntry; import com.b2international.snowowl.snomed.datastore.request.SnomedOWLExpressionConverter; import com.b2international.snowowl.snomed.datastore.request.SnomedOWLExpressionConverterResult; import com.b2international.snowowl.snomed.snomedrefset.SnomedOWLExpressionRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetPackage; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; /** * @since 4.7 */ public class TaxonomyGraphUpdater { private static final Logger LOGGER = LoggerFactory.getLogger("repository"); private final ICDOCommitChangeSet commitChangeSet; private final String characteristicTypeId; private final RevisionSearcher searcher; private final SnomedOWLExpressionConverter expressionConverter; public TaxonomyGraphUpdater(RevisionSearcher searcher, SnomedOWLExpressionConverter expressionConverter, CDOTransaction transaction, String characteristicTypeId) { this(searcher, expressionConverter, new CDOCommitChangeSet(transaction, transaction.getSession().getUserID(), transaction.getCommitComment(), transaction.getNewObjects().values(), transaction.getDirtyObjects().values(), Maps.transformValues(transaction.getDetachedObjects(), EObject::eClass), transaction.getRevisionDeltas(), -1L), characteristicTypeId); } public TaxonomyGraphUpdater(RevisionSearcher searcher, SnomedOWLExpressionConverter expressionConverter, ICDOCommitChangeSet commitChangeSet, String characteristicTypeId) { this.searcher = searcher; this.expressionConverter = expressionConverter; this.commitChangeSet = commitChangeSet; this.characteristicTypeId = characteristicTypeId; } public TaxonomyGraphStatus update(final TaxonomyGraph graphToUpdate) { LOGGER.trace("Processing changes taxonomic information."); //here we have to consider changes triggered by repository state revert //this point the following might happen: //SNOMED CT concept and/or relationship will be contained by both deleted and new collections //with same business (SCT ID) but different primary ID (CDO ID) [this is the way how we handle object resurrection] //we decided, to order changes by primary keys. as primary IDs are provided in sequence, one could assume //that the larger primary ID happens later, and that is the truth //but as deletion always happens later than addition, we only have to take care of deletion //so if the deletion is about to erase something that has the same SCT ID but more recent (larger) //primary key, we just ignore it when building the taxonomy. final Iterable<Concept> newConcepts = commitChangeSet.getNewComponents(Concept.class); final Iterable<Concept> dirtyConcepts = commitChangeSet.getDirtyComponents(Concept.class); final Iterable<CDOID> deletedConceptStorageKeys = commitChangeSet.getDetachedComponents(SnomedPackage.Literals.CONCEPT); final Iterable<Relationship> newRelationships = commitChangeSet.getNewComponents(Relationship.class); final Iterable<Relationship> dirtyRelationships = commitChangeSet.getDirtyComponents(Relationship.class); final Iterable<CDOID> deletedRelationships = commitChangeSet.getDetachedComponents(SnomedPackage.Literals.RELATIONSHIP); //SCT ID - relationships final Map<String, Relationship> _newRelationships = Maps.newHashMap(Maps.uniqueIndex(newRelationships, Relationship::getId)); //SCT ID - concepts final Map<String, Concept> _newConcepts = Maps.newHashMap(Maps.uniqueIndex(newConcepts, Concept::getId)); for (final Relationship newRelationship : newRelationships) { updateEdge(newRelationship, graphToUpdate); } for (final Relationship dirtyRelationship : dirtyRelationships) { updateEdge(dirtyRelationship, graphToUpdate); } // lookup all deleted relationship documents final Iterable<SnomedRelationshipIndexEntry> deletedRelationshipEntries; try { deletedRelationshipEntries = searcher.get(SnomedRelationshipIndexEntry.class, CDOIDUtils.createCdoIdToLong(deletedRelationships)); } catch (IOException e) { throw new SnowowlRuntimeException(e); } for (final SnomedRelationshipIndexEntry relationship : deletedRelationshipEntries) { final String relationshipId = relationship.getId(); //same relationship as new and detached if (_newRelationships.containsKey(relationshipId)) { final Relationship newRelationship = _newRelationships.get(relationshipId); final String typeId = newRelationship.getType().getId(); //ignore everything but IS_As if (Concepts.IS_A.equals(typeId)) { //check source and destination as well if (relationship.getSourceId().equals(newRelationship.getSource().getId()) && relationship.getDestinationId().equals(newRelationship.getDestination().getId())) { //and if the new relationship has more recent (larger CDO ID), ignore deletion if (CDOIDUtils.asLong(newRelationship.cdoID()) > relationship.getStorageKey()) { continue; } } } } graphToUpdate.removeEdge(relationship.getId()); } if (Concepts.STATED_RELATIONSHIP.equals(characteristicTypeId)) { final Iterable<SnomedOWLExpressionRefSetMember> newOwlMembers = commitChangeSet.getNewComponents(SnomedOWLExpressionRefSetMember.class); final Iterable<SnomedOWLExpressionRefSetMember> dirtyOwlMembers = commitChangeSet.getDirtyComponents(SnomedOWLExpressionRefSetMember.class); for (SnomedOWLExpressionRefSetMember owlMember : Iterables.concat(newOwlMembers, dirtyOwlMembers)) { updateEdge(owlMember, graphToUpdate); } final Iterable<CDOID> deletedOwlAxioms = commitChangeSet.getDetachedComponents(SnomedRefSetPackage.Literals.SNOMED_OWL_EXPRESSION_REF_SET_MEMBER); // look up all deleted owl axioms final Iterable<SnomedRefSetMemberIndexEntry> deletedAxiomEntries; try { deletedAxiomEntries = searcher.get(SnomedRefSetMemberIndexEntry.class, CDOIDUtils.createCdoIdToLong(deletedOwlAxioms)); } catch (IOException e) { throw new SnowowlRuntimeException(e); } for (final SnomedRefSetMemberIndexEntry detachedOwlMember : deletedAxiomEntries) { graphToUpdate.removeEdge(detachedOwlMember.getId()); } } for (final Concept newConcept : newConcepts) { updateConcept(newConcept, graphToUpdate); } try { final Iterable<SnomedConceptDocument> deletedConcepts = searcher.get(SnomedConceptDocument.class, CDOIDUtils.createCdoIdToLong(deletedConceptStorageKeys)); for (final SnomedConceptDocument concept : deletedConcepts) { //consider the same as for relationship //we have to decide if deletion is the 'stronger' modification or not final String conceptId = concept.getId(); //same concept as addition and deletion if (_newConcepts.containsKey(conceptId)) { final Concept newConcept = _newConcepts.get(conceptId); //check whether new concept has more recent (larger CDO ID) or not, ignore deletion if (CDOIDUtils.asLong(newConcept.cdoID()) > concept.getStorageKey()) { continue; } } //else delete it graphToUpdate.removeNode(conceptId); } } catch (IOException e) { throw new SnowowlRuntimeException(e); } for (final Concept dirtyConcept : dirtyConcepts) { final CDORevisionDelta revisionDelta = commitChangeSet.getRevisionDeltas().get(dirtyConcept.cdoID()); if (revisionDelta == null) { continue; } final CDOFeatureDelta changeStatusDelta = revisionDelta.getFeatureDelta(SnomedPackage.Literals.COMPONENT__ACTIVE); if (changeStatusDelta instanceof CDOSetFeatureDelta) { CDOSetFeatureDelta delta = (CDOSetFeatureDelta) changeStatusDelta; final Boolean oldValue; if (delta.getOldValue() instanceof Boolean) { oldValue = (Boolean) delta.getOldValue(); } else if (CDOSetFeatureDelta.UNSPECIFIED == delta.getOldValue()) { oldValue = false; } else { throw new RuntimeException("Unknown old value type: " + delta.getOldValue()); } final Boolean newValue = (Boolean) delta.getValue(); if (Boolean.FALSE == oldValue && Boolean.TRUE == newValue) { // make sure the node is part of the new tree graphToUpdate.addNode(dirtyConcept.getId()); } } } LOGGER.trace("Rebuilding taxonomic information based on the changes."); return graphToUpdate.update(); } private void updateEdge(SnomedOWLExpressionRefSetMember owlMember, TaxonomyGraph graphToUpdate) { if (owlMember.isActive()) { SnomedOWLExpressionConverterResult result = expressionConverter.toSnomedOWLRelationships(owlMember.getReferencedComponentId(), owlMember.getOwlExpression()); if (!CompareUtils.isEmpty(result.getClassAxiomRelationships())) { final long[] destinationIds = result.getClassAxiomRelationships().stream() .filter(r -> Concepts.IS_A.equals(r.getTypeId())) .map(SnomedOWLRelationshipDocument::getDestinationId) .mapToLong(Long::parseLong) .toArray(); graphToUpdate.addEdge(owlMember.getUuid(), Long.parseLong(owlMember.getReferencedComponentId()), destinationIds); } else { graphToUpdate.removeEdge(owlMember.getUuid()); } } else { graphToUpdate.removeEdge(owlMember.getUuid()); } } private void updateConcept(Concept concept, TaxonomyGraph graphToUpdate) { if (concept.isActive()) { graphToUpdate.addNode(concept.getId()); } } private void updateEdge(final Relationship relationship, TaxonomyGraph graphToUpdate) { if (!relationship.isActive()) { graphToUpdate.removeEdge(relationship.getId()); } else if (Concepts.IS_A.equals(relationship.getType().getId()) && characteristicTypeId.equals(relationship.getCharacteristicType().getId())) { graphToUpdate.addEdge( relationship.getId(), Long.parseLong(relationship.getSource().getId()), new long[] { Long.parseLong(relationship.getDestination().getId()) } ); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.eventbridge.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/eventbridge-2015-10-07/ListReplays" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListReplaysResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * An array of <code>Replay</code> objects that contain information about the replay. * </p> */ private java.util.List<Replay> replays; /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * An array of <code>Replay</code> objects that contain information about the replay. * </p> * * @return An array of <code>Replay</code> objects that contain information about the replay. */ public java.util.List<Replay> getReplays() { return replays; } /** * <p> * An array of <code>Replay</code> objects that contain information about the replay. * </p> * * @param replays * An array of <code>Replay</code> objects that contain information about the replay. */ public void setReplays(java.util.Collection<Replay> replays) { if (replays == null) { this.replays = null; return; } this.replays = new java.util.ArrayList<Replay>(replays); } /** * <p> * An array of <code>Replay</code> objects that contain information about the replay. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setReplays(java.util.Collection)} or {@link #withReplays(java.util.Collection)} if you want to override * the existing values. * </p> * * @param replays * An array of <code>Replay</code> objects that contain information about the replay. * @return Returns a reference to this object so that method calls can be chained together. */ public ListReplaysResult withReplays(Replay... replays) { if (this.replays == null) { setReplays(new java.util.ArrayList<Replay>(replays.length)); } for (Replay ele : replays) { this.replays.add(ele); } return this; } /** * <p> * An array of <code>Replay</code> objects that contain information about the replay. * </p> * * @param replays * An array of <code>Replay</code> objects that contain information about the replay. * @return Returns a reference to this object so that method calls can be chained together. */ public ListReplaysResult withReplays(java.util.Collection<Replay> replays) { setReplays(replays); return this; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @param nextToken * The token returned by a previous call to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @return The token returned by a previous call to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token returned by a previous call to retrieve the next set of results. * </p> * * @param nextToken * The token returned by a previous call to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListReplaysResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getReplays() != null) sb.append("Replays: ").append(getReplays()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListReplaysResult == false) return false; ListReplaysResult other = (ListReplaysResult) obj; if (other.getReplays() == null ^ this.getReplays() == null) return false; if (other.getReplays() != null && other.getReplays().equals(this.getReplays()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getReplays() == null) ? 0 : getReplays().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListReplaysResult clone() { try { return (ListReplaysResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.bouncycastle.jce.provider.test; import java.math.BigInteger; import java.security.KeyFactory; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.Security; import java.security.Signature; import java.security.spec.RSAPrivateCrtKeySpec; import java.security.spec.RSAPublicKeySpec; import java.util.Hashtable; import java.util.Vector; import org.bouncycastle.asn1.DERObjectIdentifier; import org.bouncycastle.asn1.DEROctetString; import org.bouncycastle.asn1.DERSet; import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers; import org.bouncycastle.asn1.pkcs.Attribute; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x509.BasicConstraints; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.SubjectKeyIdentifier; import org.bouncycastle.asn1.x509.X509Extension; import org.bouncycastle.asn1.x509.X509Extensions; import org.bouncycastle.asn1.x509.X509Name; import org.bouncycastle.asn1.x9.X9ObjectIdentifiers; import org.bouncycastle.jce.ECGOST3410NamedCurveTable; import org.bouncycastle.jce.ECNamedCurveTable; import org.bouncycastle.jce.PKCS10CertificationRequest; import org.bouncycastle.jce.X509Principal; import org.bouncycastle.jce.interfaces.ECPointEncoder; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.jce.spec.ECNamedCurveParameterSpec; import org.bouncycastle.jce.spec.ECParameterSpec; import org.bouncycastle.jce.spec.ECPrivateKeySpec; import org.bouncycastle.jce.spec.ECPublicKeySpec; import org.bouncycastle.math.ec.ECCurve; import org.bouncycastle.util.encoders.Base64; import org.bouncycastle.util.encoders.Hex; import org.bouncycastle.util.test.SimpleTest; import org.bouncycastle.x509.extension.SubjectKeyIdentifierStructure; /** **/ public class PKCS10CertRequestTest extends SimpleTest { private byte[] gost3410EC_A = Base64.decode( "MIIBOzCB6wIBADB/MQ0wCwYDVQQDEwR0ZXN0MRUwEwYDVQQKEwxEZW1vcyBDbyBMdGQxHjAcBgNV" +"BAsTFUNyeXB0b2dyYXBoeSBkaXZpc2lvbjEPMA0GA1UEBxMGTW9zY293MQswCQYDVQQGEwJydTEZ" +"MBcGCSqGSIb3DQEJARYKc2RiQGRvbC5ydTBjMBwGBiqFAwICEzASBgcqhQMCAiMBBgcqhQMCAh4B" +"A0MABEBYx0P2D7YuuZo5HgdIAUKAXcLBDZ+4LYFgbKjrfStVfH59lc40BQ2FZ7M703hLpXK8GiBQ" +"GEYpKaAuQZnMIpByoAAwCAYGKoUDAgIDA0EAgXMcTrhdOY2Er2tHOSAgnMezqrYxocZTWhxmW5Rl" +"JY6lbXH5rndCn4swFzXU+YhgAsJv1wQBaoZEWRl5WV4/nA=="); private byte[] gost3410EC_B = Base64.decode( "MIIBPTCB7QIBADCBgDENMAsGA1UEAxMEdGVzdDEWMBQGA1UEChMNRGVtb3MgQ28gTHRkLjEeMBwG" +"A1UECxMVQ3J5cHRvZ3JhcGh5IGRpdmlzaW9uMQ8wDQYDVQQHEwZNb3Njb3cxCzAJBgNVBAYTAnJ1" +"MRkwFwYJKoZIhvcNAQkBFgpzZGJAZG9sLnJ1MGMwHAYGKoUDAgITMBIGByqFAwICIwIGByqFAwIC" +"HgEDQwAEQI5SLoWT7dZVilbV9j5B/fyIDuDs6x4pjqNC2TtFYbpRHrk/Wc5g/mcHvD80tsm5o1C7" +"7cizNzkvAVUM4VT4Dz6gADAIBgYqhQMCAgMDQQAoT5TwJ8o+bSrxckymyo3diwG7ZbSytX4sRiKy" +"wXPWRS9LlBvPO2NqwpS2HUnxSU8rzfL9fJcybATf7Yt1OEVq"); private byte[] gost3410EC_C = Base64.decode( "MIIBRDCB9AIBADCBhzEVMBMGA1UEAxMMdGVzdCByZXF1ZXN0MRUwEwYDVQQKEwxEZW1vcyBDbyBM" +"dGQxHjAcBgNVBAsTFUNyeXB0b2dyYXBoeSBkaXZpc2lvbjEPMA0GA1UEBxMGTW9zY293MQswCQYD" +"VQQGEwJydTEZMBcGCSqGSIb3DQEJARYKc2RiQGRvbC5ydTBjMBwGBiqFAwICEzASBgcqhQMCAiMD" +"BgcqhQMCAh4BA0MABEBcmGh7OmR4iqqj+ycYo1S1fS7r5PhisSQU2Ezuz8wmmmR2zeTZkdMYCOBa" +"UTMNms0msW3wuYDho7nTDNscHTB5oAAwCAYGKoUDAgIDA0EAVoOMbfyo1Un4Ss7WQrUjHJoiaYW8" +"Ime5LeGGU2iW3ieAv6es/FdMrwTKkqn5dhd3aL/itFg5oQbhyfXw5yw/QQ=="); private byte[] gost3410EC_ExA = Base64.decode( "MIIBOzCB6wIBADB/MQ0wCwYDVQQDEwR0ZXN0MRUwEwYDVQQKEwxEZW1vcyBDbyBMdGQxHjAcBgNV" + "BAsTFUNyeXB0b2dyYXBoeSBkaXZpc2lvbjEPMA0GA1UEBxMGTW9zY293MQswCQYDVQQGEwJydTEZ" + "MBcGCSqGSIb3DQEJARYKc2RiQGRvbC5ydTBjMBwGBiqFAwICEzASBgcqhQMCAiQABgcqhQMCAh4B" + "A0MABEDkqNT/3f8NHj6EUiWnK4JbVZBh31bEpkwq9z3jf0u8ZndG56Vt+K1ZB6EpFxLT7hSIos0w" + "weZ2YuTZ4w43OgodoAAwCAYGKoUDAgIDA0EASk/IUXWxoi6NtcUGVF23VRV1L3undB4sRZLp4Vho" + "gQ7m3CMbZFfJ2cPu6QyarseXGYHmazoirH5lGjEo535c1g=="); private byte[] gost3410EC_ExB = Base64.decode( "MIIBPTCB7QIBADCBgDENMAsGA1UEAxMEdGVzdDEWMBQGA1UEChMNRGVtb3MgQ28gTHRkLjEeMBwG" + "A1UECxMVQ3J5cHRvZ3JhcGh5IGRpdmlzaW9uMQ8wDQYDVQQHEwZNb3Njb3cxCzAJBgNVBAYTAnJ1" + "MRkwFwYJKoZIhvcNAQkBFgpzZGJAZG9sLnJ1MGMwHAYGKoUDAgITMBIGByqFAwICJAEGByqFAwIC" + "HgEDQwAEQMBWYUKPy/1Kxad9ChAmgoSWSYOQxRnXo7KEGLU5RNSXA4qMUvArWzvhav+EYUfTbWLh" + "09nELDyHt2XQcvgQHnSgADAIBgYqhQMCAgMDQQAdaNhgH/ElHp64mbMaEo1tPCg9Q22McxpH8rCz" + "E0QBpF4H5mSSQVGI5OAXHToetnNuh7gHHSynyCupYDEHTbkZ"); public String getName() { return "PKCS10CertRequest"; } private void generationTest(int keySize, String keyName, String sigName, String provider) throws Exception { KeyPairGenerator kpg = KeyPairGenerator.getInstance(keyName, "BC"); kpg.initialize(keySize); KeyPair kp = kpg.genKeyPair(); Hashtable attrs = new Hashtable(); attrs.put(X509Principal.C, "AU"); attrs.put(X509Principal.O, "The Legion of the Bouncy Castle"); attrs.put(X509Principal.L, "Melbourne"); attrs.put(X509Principal.ST, "Victoria"); attrs.put(X509Principal.EmailAddress, "feedback-crypto@bouncycastle.org"); Vector order = new Vector(); order.addElement(X509Principal.C); order.addElement(X509Principal.O); order.addElement(X509Principal.L); order.addElement(X509Principal.ST); order.addElement(X509Principal.EmailAddress); X509Name subject = new X509Name(order, attrs); PKCS10CertificationRequest req1 = new PKCS10CertificationRequest( sigName, subject, kp.getPublic(), null, kp.getPrivate(), provider); byte[] bytes = req1.getEncoded(); PKCS10CertificationRequest req2 = new PKCS10CertificationRequest(bytes); if (!req2.verify(provider)) { fail(sigName + ": Failed verify check."); } if (!req2.getPublicKey(provider).equals(req1.getPublicKey(provider))) { fail(keyName + ": Failed public key check."); } } /* * we generate a self signed certificate for the sake of testing - SHA224withECDSA */ private void createECRequest(String algorithm, DERObjectIdentifier algOid, DERObjectIdentifier curveOid) throws Exception { ECNamedCurveParameterSpec spec = ECNamedCurveTable.getParameterSpec(curveOid.getId()); KeyPairGenerator ecGen = KeyPairGenerator.getInstance("ECDSA", "BC"); ecGen.initialize(spec); // // set up the keys // PrivateKey privKey; PublicKey pubKey; KeyPair pair = ecGen.generateKeyPair(); privKey = pair.getPrivate(); pubKey = pair.getPublic(); PKCS10CertificationRequest req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check EC."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check EC encoded."); } // // try with point compression turned off // ((ECPointEncoder)pubKey).setPointFormat("UNCOMPRESSED"); req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check EC uncompressed."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check EC uncompressed encoded."); } if (!req.getSignatureAlgorithm().getObjectId().equals(algOid)) { fail("ECDSA oid incorrect."); } if (req.getSignatureAlgorithm().getParameters() != null) { fail("ECDSA parameters incorrect."); } Signature sig = Signature.getInstance(algorithm, "BC"); sig.initVerify(pubKey); sig.update(req.getCertificationRequestInfo().getEncoded()); if (!sig.verify(req.getSignature().getBytes())) { fail("signature not mapped correctly."); } } private void createECRequest(String algorithm, DERObjectIdentifier algOid) throws Exception { ECCurve.Fp curve = new ECCurve.Fp( new BigInteger("6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151"), // q (or p) new BigInteger("01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC", 16), // a new BigInteger("0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00", 16)); // b ECParameterSpec spec = new ECParameterSpec( curve, curve.decodePoint(Hex.decode("0200C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66")), // G new BigInteger("01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409", 16)); // n ECPrivateKeySpec privKeySpec = new ECPrivateKeySpec( new BigInteger("5769183828869504557786041598510887460263120754767955773309066354712783118202294874205844512909370791582896372147797293913785865682804434049019366394746072023"), // d spec); ECPublicKeySpec pubKeySpec = new ECPublicKeySpec( curve.decodePoint(Hex.decode("02006BFDD2C9278B63C92D6624F151C9D7A822CC75BD983B17D25D74C26740380022D3D8FAF304781E416175EADF4ED6E2B47142D2454A7AC7801DD803CF44A4D1F0AC")), // Q spec); // // set up the keys // PrivateKey privKey; PublicKey pubKey; KeyFactory fact = KeyFactory.getInstance("ECDSA", "BC"); privKey = fact.generatePrivate(privKeySpec); pubKey = fact.generatePublic(pubKeySpec); PKCS10CertificationRequest req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check EC."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check EC encoded."); } // // try with point compression turned off // ((ECPointEncoder)pubKey).setPointFormat("UNCOMPRESSED"); req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check EC uncompressed."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check EC uncompressed encoded."); } if (!req.getSignatureAlgorithm().getObjectId().equals(algOid)) { fail("ECDSA oid incorrect."); } if (req.getSignatureAlgorithm().getParameters() != null) { fail("ECDSA parameters incorrect."); } Signature sig = Signature.getInstance(algorithm, "BC"); sig.initVerify(pubKey); sig.update(req.getCertificationRequestInfo().getEncoded()); if (!sig.verify(req.getSignature().getBytes())) { fail("signature not mapped correctly."); } } private void createECGOSTRequest() throws Exception { String algorithm = "GOST3411withECGOST3410"; KeyPairGenerator ecGostKpg = KeyPairGenerator.getInstance("ECGOST3410", "BC"); ecGostKpg.initialize(ECGOST3410NamedCurveTable.getParameterSpec("GostR3410-2001-CryptoPro-A"), new SecureRandom()); // // set up the keys // KeyPair pair = ecGostKpg.generateKeyPair(); PrivateKey privKey = pair.getPrivate(); PublicKey pubKey = pair.getPublic(); PKCS10CertificationRequest req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check EC."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check EC encoded."); } if (!req.getSignatureAlgorithm().getObjectId().equals(CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001)) { fail("ECGOST oid incorrect."); } if (req.getSignatureAlgorithm().getParameters() != null) { fail("ECGOST parameters incorrect."); } Signature sig = Signature.getInstance(algorithm, "BC"); sig.initVerify(pubKey); sig.update(req.getCertificationRequestInfo().getEncoded()); if (!sig.verify(req.getSignature().getBytes())) { fail("signature not mapped correctly."); } } private void createPSSTest(String algorithm) throws Exception { RSAPublicKeySpec pubKeySpec = new RSAPublicKeySpec( new BigInteger("a56e4a0e701017589a5187dc7ea841d156f2ec0e36ad52a44dfeb1e61f7ad991d8c51056ffedb162b4c0f283a12a88a394dff526ab7291cbb307ceabfce0b1dfd5cd9508096d5b2b8b6df5d671ef6377c0921cb23c270a70e2598e6ff89d19f105acc2d3f0cb35f29280e1386b6f64c4ef22e1e1f20d0ce8cffb2249bd9a2137",16), new BigInteger("010001",16)); RSAPrivateCrtKeySpec privKeySpec = new RSAPrivateCrtKeySpec( new BigInteger("a56e4a0e701017589a5187dc7ea841d156f2ec0e36ad52a44dfeb1e61f7ad991d8c51056ffedb162b4c0f283a12a88a394dff526ab7291cbb307ceabfce0b1dfd5cd9508096d5b2b8b6df5d671ef6377c0921cb23c270a70e2598e6ff89d19f105acc2d3f0cb35f29280e1386b6f64c4ef22e1e1f20d0ce8cffb2249bd9a2137",16), new BigInteger("010001",16), new BigInteger("33a5042a90b27d4f5451ca9bbbd0b44771a101af884340aef9885f2a4bbe92e894a724ac3c568c8f97853ad07c0266c8c6a3ca0929f1e8f11231884429fc4d9ae55fee896a10ce707c3ed7e734e44727a39574501a532683109c2abacaba283c31b4bd2f53c3ee37e352cee34f9e503bd80c0622ad79c6dcee883547c6a3b325",16), new BigInteger("e7e8942720a877517273a356053ea2a1bc0c94aa72d55c6e86296b2dfc967948c0a72cbccca7eacb35706e09a1df55a1535bd9b3cc34160b3b6dcd3eda8e6443",16), new BigInteger("b69dca1cf7d4d7ec81e75b90fcca874abcde123fd2700180aa90479b6e48de8d67ed24f9f19d85ba275874f542cd20dc723e6963364a1f9425452b269a6799fd",16), new BigInteger("28fa13938655be1f8a159cbaca5a72ea190c30089e19cd274a556f36c4f6e19f554b34c077790427bbdd8dd3ede2448328f385d81b30e8e43b2fffa027861979",16), new BigInteger("1a8b38f398fa712049898d7fb79ee0a77668791299cdfa09efc0e507acb21ed74301ef5bfd48be455eaeb6e1678255827580a8e4e8e14151d1510a82a3f2e729",16), new BigInteger("27156aba4126d24a81f3a528cbfb27f56886f840a9f6e86e17a44b94fe9319584b8e22fdde1e5a2e3bd8aa5ba8d8584194eb2190acf832b847f13a3d24a79f4d",16)); KeyFactory fact = KeyFactory.getInstance("RSA", "BC"); PrivateKey privKey = fact.generatePrivate(privKeySpec); PublicKey pubKey = fact.generatePublic(pubKeySpec); PKCS10CertificationRequest req = new PKCS10CertificationRequest( algorithm, new X509Name("CN=XXX"), pubKey, null, privKey); if (!req.verify()) { fail("Failed verify check PSS."); } req = new PKCS10CertificationRequest(req.getEncoded()); if (!req.verify()) { fail("Failed verify check PSS encoded."); } if (!req.getSignatureAlgorithm().getObjectId().equals(PKCSObjectIdentifiers.id_RSASSA_PSS)) { fail("PSS oid incorrect."); } if (req.getSignatureAlgorithm().getParameters() == null) { fail("PSS parameters incorrect."); } Signature sig = Signature.getInstance(algorithm, "BC"); sig.initVerify(pubKey); sig.update(req.getCertificationRequestInfo().getEncoded()); if (!sig.verify(req.getSignature().getBytes())) { fail("signature not mapped correctly."); } } // previous code found to cause a NullPointerException private void nullPointerTest() throws Exception { KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA", "BC"); keyGen.initialize(1024, new SecureRandom()); KeyPair pair = keyGen.generateKeyPair(); Vector oids = new Vector(); Vector values = new Vector(); oids.add(X509Extensions.BasicConstraints); values.add(new X509Extension(true, new DEROctetString(new BasicConstraints(true)))); oids.add(X509Extensions.KeyUsage); values.add(new X509Extension(true, new DEROctetString( new KeyUsage(KeyUsage.keyCertSign | KeyUsage.cRLSign)))); SubjectKeyIdentifier subjectKeyIdentifier = new SubjectKeyIdentifierStructure(pair.getPublic()); X509Extension ski = new X509Extension(false, new DEROctetString(subjectKeyIdentifier)); oids.add(X509Extensions.SubjectKeyIdentifier); values.add(ski); Attribute attribute = new Attribute(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest, new DERSet(new X509Extensions(oids, values))); PKCS10CertificationRequest p1 = new PKCS10CertificationRequest( "SHA1WithRSA", new X509Principal("cn=csr"), pair.getPublic(), new DERSet(attribute), pair.getPrivate(), "BC"); PKCS10CertificationRequest p2 = new PKCS10CertificationRequest( "SHA1WithRSA", new X509Principal("cn=csr"), pair.getPublic(), new DERSet(attribute), pair.getPrivate(), "BC"); if (!p1.equals(p2)) { fail("cert request comparison failed"); } } public void performTest() throws Exception { generationTest(512, "RSA", "SHA1withRSA", "BC"); generationTest(512, "GOST3410", "GOST3411withGOST3410", "BC"); if (Security.getProvider("SunRsaSign") != null) { generationTest(512, "RSA", "SHA1withRSA", "SunRsaSign"); } // elliptic curve GOST A parameter set PKCS10CertificationRequest req = new PKCS10CertificationRequest(gost3410EC_A); if (!req.verify()) { fail("Failed verify check gost3410EC_A."); } // elliptic curve GOST B parameter set req = new PKCS10CertificationRequest(gost3410EC_B); if (!req.verify()) { fail("Failed verify check gost3410EC_B."); } // elliptic curve GOST C parameter set req = new PKCS10CertificationRequest(gost3410EC_C); if (!req.verify()) { fail("Failed verify check gost3410EC_C."); } // elliptic curve GOST ExA parameter set req = new PKCS10CertificationRequest(gost3410EC_ExA); if (!req.verify()) { fail("Failed verify check gost3410EC_ExA."); } // elliptic curve GOST ExB parameter set req = new PKCS10CertificationRequest(gost3410EC_ExB); if (!req.verify()) { fail("Failed verify check gost3410EC_ExA."); } // elliptic curve openSSL KeyPairGenerator g = KeyPairGenerator.getInstance("ECDSA", "BC"); ECCurve curve = new ECCurve.Fp( new BigInteger("883423532389192164791648750360308885314476597252960362792450860609699839"), // q new BigInteger("7fffffffffffffffffffffff7fffffffffff8000000000007ffffffffffc", 16), // a new BigInteger("6b016c3bdcf18941d0d654921475ca71a9db2fb27d1d37796185c2942c0a", 16)); // b ECParameterSpec ecSpec = new ECParameterSpec( curve, curve.decodePoint(Hex.decode("020ffa963cdca8816ccc33b8642bedf905c3d358573d3f27fbbd3b3cb9aaaf")), // G new BigInteger("883423532389192164791648750360308884807550341691627752275345424702807307")); // n g.initialize(ecSpec, new SecureRandom()); KeyPair kp = g.generateKeyPair(); req = new PKCS10CertificationRequest( "ECDSAWITHSHA1", new X509Name("CN=XXX"), kp.getPublic(), null, kp.getPrivate()); if (!req.verify()) { fail("Failed verify check EC."); } createECRequest("SHA1withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA1); createECRequest("SHA224withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA224); createECRequest("SHA256withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA256); createECRequest("SHA384withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA384); createECRequest("SHA512withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA512); createECRequest("SHA1withECDSA", X9ObjectIdentifiers.ecdsa_with_SHA1, new DERObjectIdentifier("1.3.132.0.34")); createECGOSTRequest(); createPSSTest("SHA1withRSAandMGF1"); createPSSTest("SHA224withRSAandMGF1"); createPSSTest("SHA256withRSAandMGF1"); createPSSTest("SHA384withRSAandMGF1"); nullPointerTest(); } public static void main( String[] args) { Security.addProvider(new BouncyCastleProvider()); runTest(new PKCS10CertRequestTest()); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.logging; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufHolder; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.util.internal.logging.InternalLogLevel; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.net.SocketAddress; import static io.netty.buffer.ByteBufUtil.appendPrettyHexDump; import static io.netty.util.internal.StringUtil.NEWLINE; /** * A {@link ChannelHandler} that logs all events using a logging framework. * By default, all events are logged at <tt>DEBUG</tt> level. */ @Sharable public class LoggingHandler extends ChannelDuplexHandler { private static final LogLevel DEFAULT_LEVEL = LogLevel.DEBUG; protected final InternalLogger logger; protected final InternalLogLevel internalLevel; private final LogLevel level; /** * Creates a new instance whose logger name is the fully qualified class * name of the instance with hex dump enabled. */ public LoggingHandler() { this(DEFAULT_LEVEL); } /** * Creates a new instance whose logger name is the fully qualified class * name of the instance. * * @param level the log level */ public LoggingHandler(LogLevel level) { if (level == null) { throw new NullPointerException("level"); } logger = InternalLoggerFactory.getInstance(getClass()); this.level = level; internalLevel = level.toInternalLevel(); } /** * Creates a new instance with the specified logger name and with hex dump * enabled. */ public LoggingHandler(Class<?> clazz) { this(clazz, DEFAULT_LEVEL); } /** * Creates a new instance with the specified logger name. * * @param level the log level */ public LoggingHandler(Class<?> clazz, LogLevel level) { if (clazz == null) { throw new NullPointerException("clazz"); } if (level == null) { throw new NullPointerException("level"); } logger = InternalLoggerFactory.getInstance(clazz); this.level = level; internalLevel = level.toInternalLevel(); } /** * Creates a new instance with the specified logger name. */ public LoggingHandler(String name) { this(name, DEFAULT_LEVEL); } /** * Creates a new instance with the specified logger name. * * @param level the log level */ public LoggingHandler(String name, LogLevel level) { if (name == null) { throw new NullPointerException("name"); } if (level == null) { throw new NullPointerException("level"); } logger = InternalLoggerFactory.getInstance(name); this.level = level; internalLevel = level.toInternalLevel(); } /** * Returns the {@link LogLevel} that this handler uses to log */ public LogLevel level() { return level; } protected String format(ChannelHandlerContext ctx, String message) { String chStr = ctx.channel().toString(); return new StringBuilder(chStr.length() + message.length() + 1) .append(chStr) .append(' ') .append(message) .toString(); } @Override public void channelRegistered(ChannelHandlerContext ctx) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "REGISTERED")); } super.channelRegistered(ctx); } @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "UNREGISTERED")); } super.channelUnregistered(ctx); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "ACTIVE")); } super.channelActive(ctx); } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "INACTIVE")); } super.channelInactive(ctx); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "EXCEPTION: " + cause), cause); } super.exceptionCaught(ctx, cause); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "USER_EVENT: " + evt)); } super.userEventTriggered(ctx, evt); } @Override public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "BIND(" + localAddress + ')')); } super.bind(ctx, localAddress, promise); } @Override public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "CONNECT(" + remoteAddress + ", " + localAddress + ')')); } super.connect(ctx, remoteAddress, localAddress, promise); } @Override public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "DISCONNECT()")); } super.disconnect(ctx, promise); } @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "CLOSE()")); } super.close(ctx, promise); } @Override public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "DEREGISTER()")); } super.deregister(ctx, promise); } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { logMessage(ctx, "RECEIVED", msg); ctx.fireChannelRead(msg); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { logMessage(ctx, "WRITE", msg); ctx.write(msg, promise); } @Override public void flush(ChannelHandlerContext ctx) throws Exception { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, "FLUSH")); } ctx.flush(); } private void logMessage(ChannelHandlerContext ctx, String eventName, Object msg) { if (logger.isEnabled(internalLevel)) { logger.log(internalLevel, format(ctx, formatMessage(eventName, msg))); } } protected String formatMessage(String eventName, Object msg) { if (msg instanceof ByteBuf) { return formatByteBuf(eventName, (ByteBuf) msg); } else if (msg instanceof ByteBufHolder) { return formatByteBufHolder(eventName, (ByteBufHolder) msg); } else { return formatNonByteBuf(eventName, msg); } } /** * Returns a String which contains all details to log the {@link ByteBuf} */ protected String formatByteBuf(String eventName, ByteBuf msg) { int length = msg.readableBytes(); if (length == 0) { StringBuilder buf = new StringBuilder(eventName.length() + 4); buf.append(eventName).append(": 0B"); return buf.toString(); } else { int rows = length / 16 + (length % 15 == 0? 0 : 1) + 4; StringBuilder buf = new StringBuilder(eventName.length() + 2 + 10 + 1 + 2 + rows * 80); buf.append(eventName).append(": ").append(length).append('B').append(NEWLINE); appendPrettyHexDump(buf, msg); return buf.toString(); } } /** * Returns a String which contains all details to log the {@link Object} */ protected String formatNonByteBuf(String eventName, Object msg) { return eventName + ": " + msg; } /** * Returns a String which contains all details to log the {@link ByteBufHolder}. * * By default this method just delegates to {@link #formatByteBuf(String, ByteBuf)}, * using the content of the {@link ByteBufHolder}. Sub-classes may override this. */ protected String formatByteBufHolder(String eventName, ByteBufHolder msg) { String msgStr = msg.toString(); ByteBuf content = msg.content(); int length = content.readableBytes(); if (length == 0) { StringBuilder buf = new StringBuilder(eventName.length() + 2 + msgStr.length() + 4); buf.append(eventName).append(", ").append(msgStr).append(", 0B"); return buf.toString(); } else { int rows = length / 16 + (length % 15 == 0? 0 : 1) + 4; StringBuilder buf = new StringBuilder( eventName.length() + 2 + msgStr.length() + 2 + 10 + 1 + 2 + rows * 80); buf.append(eventName).append(": ") .append(msgStr).append(", ").append(length).append('B').append(NEWLINE); appendPrettyHexDump(buf, content); return buf.toString(); } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.inline; import com.intellij.JavaTestUtil; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiReference; import com.intellij.psi.PsiReferenceExpression; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.LightRefactoringTestCase; import com.intellij.refactoring.MockInlineMethodOptions; import com.intellij.refactoring.util.InlineUtil; import com.intellij.testFramework.IdeaTestUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; public class InlineMethodTest extends LightRefactoringTestCase { @NotNull @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } public void testInlineParms() throws Exception { doTest(); } public void testInlineWithQualifier() throws Exception { doTest(); } public void testInlineWithQualifierFromSuper() throws Exception { doTest(); } public void testTry() throws Exception { doTest(); } public void testTrySynchronized() throws Exception { doTest(); } public void testStaticSynchronized() throws Exception { doTest(); } public void testSuperInsideHierarchy() throws Exception { doTest(); } public void testSideEffect() throws Exception { doTest(); } public void testInlineWithTry() throws Exception { doTest(); } public void testVoidWithReturn() throws Exception { doTest(); } public void testVoidWithReturn1() throws Exception { doTest(); } public void testScr10884() throws Exception { doTest(); } public void testFinalParameters() throws Exception { doTest(); } public void testFinalParameters1() throws Exception { doTest(); } public void testScr13831() throws Exception { doTest(); } public void testNameClash() throws Exception { doTest(); } public void testArrayAccess() throws Exception { doTest(); } public void testConflictingField() throws Exception { doTest(); } public void testCallInFor() throws Exception { doTest(); } public void testSCR20655() throws Exception { doTest(); } public void testFieldInitializer() throws Exception { doTest(); } public void testMethodCallInOtherAnonymousOrInner() throws Exception { doTest(); } public void testStaticFieldInitializer() throws Exception { doTest(); } public void testSCR22644() throws Exception { doTest(); } public void testCallUnderIf() throws Exception { doTest(); } //This gives extra 'result' local variable, currently I don't see a way to cope with it, todo: think about addional inline possibilities //public void testLocalVariableResult() throws Exception { doTest(); } public void testSCR31093() throws Exception { doTest(); } public void testSCR37742() throws Exception { doTest(); } public void testChainingConstructor() throws Exception { doTest(); } public void testChainingConstructor1() throws Exception { doTest(); } public void testNestedCall() throws Exception { doTest(); } public void testIDEADEV3672() throws Exception { doTest(); } public void testIDEADEV5806() throws Exception { doTest(); } public void testIDEADEV6807() throws Exception { doTest(); } public void testIDEADEV12616() throws Exception { doTest(); } public void testVarargs() throws Exception { doTest(); } public void testVarargs1() throws Exception { doTest(); } public void testFlatVarargs() throws Exception {doTest();} public void testFlatVarargs1() throws Exception {doTest();} public void testEnumConstructor() throws Exception { doTest(); } public void testEnumConstantConstructorParameter() throws Exception { // IDEADEV-26133 doTest(); } public void testEnumConstantConstructorParameterComplex() throws Exception { // IDEADEV-26133 doTest(); } public void testEnumConstantConstructorParameterComplex2() throws Exception { // IDEADEV-26133 doTest(); } public void testConstantInChainingConstructor() throws Exception { // IDEADEV-28136 doTest(); } public void testReplaceParameterWithArgumentForConstructor() throws Exception { // IDEADEV-23652 doTest(); } public void testTailCallReturn() throws Exception { // IDEADEV-27983 doTest(); } public void testTailCallSimple() throws Exception { // IDEADEV-27983 doTest(); } public void testTailComment() throws Exception { //IDEADEV-33638 doTest(); } public void testInferredType() throws Exception { setLanguageLevel(LanguageLevel.JDK_1_7); doTest(); } public void testReplaceGenericsInside() throws Exception { doTest(); } public void testStaticMethodWithoutParams() throws Exception { doTest(); } public void testWithSuperInside() throws Exception { doTest(); } public void testRawSubstitution() throws Exception { doTest(); } public void testSubstitution() throws Exception { doTest(); } public void testParamNameConflictsWithLocalVar() throws Exception { doTest(); } public void testArrayTypeInferenceFromVarargs() throws Exception { doTest(); } public void testSuperMethodInAnonymousClass() throws Exception { doTest(); } public void testInlineAnonymousClassWithPrivateMethodInside() throws Exception { doTest(); } public void testChainedConstructor() throws Exception { doTestInlineThisOnly(); } public void testChainedConstructor1() throws Exception { doTest(); } public void testMethodUsedInJavadoc() throws Exception { doTestConflict("Inlined method is used in javadoc"); } public void testNotAStatement() throws Exception { doTestConflict("Inlined result would contain parse errors"); } public void testInSuperCall() throws Exception { doTestConflict("Inline cannot be applied to multiline method in constructor call"); } private void doTestConflict(final String conflict) throws Exception { try { doTest(); fail("Conflict was not detected"); } catch (BaseRefactoringProcessor.ConflictsInTestsException e) { assertEquals(conflict, e.getMessage()); } } public void testInlineRunnableRun() throws Exception { doTestInlineThisOnly(); } public void testOneLineLambdaVoidCompatibleToBlock() throws Exception { doTestInlineThisOnly(); } public void testOneLineLambdaValueCompatibleToBlock() throws Exception { doTestInlineThisOnly(); } public void testOneLineLambdaVoidCompatibleOneLine() throws Exception { doTestInlineThisOnly(); } public void testOneLineLambdaValueCompatibleOneLine() throws Exception { doTestInlineThisOnly(); } public void testOnMethodReference() throws Exception { doTestInlineThisOnly(); } public void testNonCodeUsage() throws Exception { doTest(true); } public void testMethodInsideChangeIfStatement() throws Exception { doTest(); } public void testSameVarMethodNames() throws Exception { doTest(); } public void testThisNameConflict() throws Exception { doTest(); } public void testReturnStatementWithoutBraces() throws Exception { doTestInlineThisOnly(); } private void doTestInlineThisOnly() { @NonNls String fileName = "/refactoring/inlineMethod/" + getTestName(false) + ".java"; configureByFile(fileName); performAction(new MockInlineMethodOptions(){ @Override public boolean isInlineThisOnly() { return true; } }, false); checkResultByFile(fileName + ".after"); } private void doTest() throws Exception { doTest(false); } private void doTest(final boolean nonCode) throws Exception { String name = getTestName(false); @NonNls String fileName = "/refactoring/inlineMethod/" + name + ".java"; configureByFile(fileName); performAction(nonCode); checkResultByFile(fileName + ".after"); } private void performAction(final boolean nonCode) { performAction(new MockInlineMethodOptions(), nonCode); } private void performAction(final InlineOptions options, final boolean nonCode) { PsiElement element = TargetElementUtil .findTargetElement(myEditor, TargetElementUtil.ELEMENT_NAME_ACCEPTED | TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED); final PsiReference ref = myFile.findReferenceAt(myEditor.getCaretModel().getOffset()); PsiReferenceExpression refExpr = ref instanceof PsiReferenceExpression ? (PsiReferenceExpression)ref : null; assertTrue(element instanceof PsiMethod); PsiMethod method = (PsiMethod)element; final boolean condition = InlineMethodProcessor.checkBadReturns(method) && !InlineUtil.allUsagesAreTailCalls(method); assertFalse("Bad returns found", condition); final InlineMethodProcessor processor = new InlineMethodProcessor(getProject(), method, refExpr, myEditor, options.isInlineThisOnly(), nonCode, nonCode); processor.run(); } @Override protected Sdk getProjectJDK() { return IdeaTestUtil.getMockJdk18(); } }
/** * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package backport.java.util.concurrent.atomic; /* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ * * From http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/ */ import java.util.Random; /** * A package-local class holding common representation and mechanics for classes supporting dynamic striping on 64bit * values. The class extends Number so that concrete subclasses must publicly do so. */ abstract class Striped64 extends Number { /* * This class maintains a lazily-initialized table of atomically updated variables, plus an extra "base" field. The * table size is a power of two. Indexing uses masked per-thread hash codes. Nearly all declarations in this class * are package-private, accessed directly by subclasses. * * Table entries are of class Cell; a variant of AtomicLong padded to reduce cache contention on most processors. * Padding is overkill for most Atomics because they are usually irregularly scattered in memory and thus don't * interfere much with each other. But Atomic objects residing in arrays will tend to be placed adjacent to each * other, and so will most often share cache lines (with a huge negative performance impact) without this * precaution. * * In part because Cells are relatively large, we avoid creating them until they are needed. When there is no * contention, all updates are made to the base field. Upon first contention (a failed CAS on base update), the * table is initialized to size 2. The table size is doubled upon further contention until reaching the nearest * power of two greater than or equal to the number of CPUS. Table slots remain empty (null) until they are needed. * * A single spinlock ("busy") is used for initializing and resizing the table, as well as populating slots with new * Cells. There is no need for a blocking lock: When the lock is not available, threads try other slots (or the * base). During these retries, there is increased contention and reduced locality, which is still better than * alternatives. * * Per-thread hash codes are initialized to random values. Contention and/or table collisions are indicated by * failed CASes when performing an update operation (see method retryUpdate). Upon a collision, if the table size is * less than the capacity, it is doubled in size unless some other thread holds the lock. If a hashed slot is empty, * and lock is available, a new Cell is created. Otherwise, if the slot exists, a CAS is tried. Retries proceed by * "double hashing", using a secondary hash (Marsaglia XorShift) to try to find a free slot. * * The table size is capped because, when there are more threads than CPUs, supposing that each thread were bound to * a CPU, there would exist a perfect hash function mapping threads to slots that eliminates collisions. When we * reach capacity, we search for this mapping by randomly varying the hash codes of colliding threads. Because * search is random, and collisions only become known via CAS failures, convergence can be slow, and because threads * are typically not bound to CPUS forever, may not occur at all. However, despite these limitations, observed * contention rates are typically low in these cases. * * It is possible for a Cell to become unused when threads that once hashed to it terminate, as well as in the case * where doubling the table causes no thread to hash to it under expanded mask. We do not try to detect or remove * such cells, under the assumption that for long-running instances, observed contention levels will recur, so the * cells will eventually be needed again; and for short-lived ones, it does not matter. */ private static final long serialVersionUID = -3403386352761423917L; /** * Padded variant of AtomicLong supporting only raw accesses plus CAS. The value field is placed between pads, * hoping that the JVM doesn't reorder them. * * JVM intrinsics note: It would be possible to use a release-only form of CAS here, if it were provided. */ static final class Cell { volatile long p0, p1, p2, p3, p4, p5, p6; volatile long value; volatile long q0, q1, q2, q3, q4, q5, q6; Cell(long x) { value = x; } final boolean cas(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, valueOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long valueOffset; static { try { UNSAFE = getUnsafe(); Class<?> ak = Cell.class; valueOffset = UNSAFE.objectFieldOffset(ak.getDeclaredField("value")); } catch (Exception e) { throw new Error(e); } } } /** * Holder for the thread-local hash code. The code is initially random, but may be set to a different value upon * collisions. */ static final class HashCode { static final Random rng = new Random(); int code; HashCode() { int h = rng.nextInt(); // Avoid zero to allow xorShift rehash code = (h == 0) ? 1 : h; } } /** * The corresponding ThreadLocal class */ static final class ThreadHashCode extends ThreadLocal<HashCode> { @Override public HashCode initialValue() { return new HashCode(); } } /** * Static per-thread hash codes. Shared across all instances to reduce ThreadLocal pollution and because adjustments * due to collisions in one table are likely to be appropriate for others. */ static final ThreadHashCode threadHashCode = new ThreadHashCode(); /** Number of CPUS, to place bound on table size */ static final int NCPU = Runtime.getRuntime().availableProcessors(); /** * Table of cells. When non-null, size is a power of 2. */ transient volatile Cell[] cells; /** * Base value, used mainly when there is no contention, but also as a fallback during table initialization races. * Updated via CAS. */ transient volatile long base; /** * Spinlock (locked via CAS) used when resizing and/or creating Cells. */ transient volatile int busy; /** * Package-private default constructor */ Striped64() { } /** * CASes the base field. */ final boolean casBase(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, baseOffset, cmp, val); } /** * CASes the busy field from 0 to 1 to acquire lock. */ final boolean casBusy() { return UNSAFE.compareAndSwapInt(this, busyOffset, 0, 1); } /** * Computes the function of current and new value. Subclasses should open-code this update function for most uses, * but the virtualized form is needed within retryUpdate. * * @param currentValue the current value (of either base or a cell) * @param newValue the argument from a user update call * @return result of the update function */ abstract long fn(long currentValue, long newValue); /** * Handles cases of updates involving initialization, resizing, creating new Cells, and/or contention. See above for * explanation. This method suffers the usual non-modularity problems of optimistic retry code, relying on rechecked * sets of reads. * * @param x the value * @param hc the hash code holder * @param wasUncontended false if CAS failed before call */ final void retryUpdate(long x, HashCode hc, boolean wasUncontended) { int h = hc.code; boolean collide = false; // True if last slot nonempty for (;;) { Cell[] as; Cell a; int n; long v; if ((as = cells) != null && (n = as.length) > 0) { if ((a = as[(n - 1) & h]) == null) { if (busy == 0) { // Try to attach new Cell Cell r = new Cell(x); // Optimistically create if (busy == 0 && casBusy()) { boolean created = false; try { // Recheck under lock Cell[] rs; int m, j; if ((rs = cells) != null && (m = rs.length) > 0 && rs[j = (m - 1) & h] == null) { rs[j] = r; created = true; } } finally { busy = 0; } if (created) break; continue; // Slot is now non-empty } } collide = false; } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash else if (a.cas(v = a.value, fn(v, x))) break; else if (n >= NCPU || cells != as) collide = false; // At max size or stale else if (!collide) collide = true; else if (busy == 0 && casBusy()) { try { if (cells == as) { // Expand table unless stale Cell[] rs = new Cell[n << 1]; for (int i = 0; i < n; ++i) rs[i] = as[i]; cells = rs; } } finally { busy = 0; } collide = false; continue; // Retry with expanded table } h ^= h << 13; // Rehash h ^= h >>> 17; h ^= h << 5; } else if (busy == 0 && cells == as && casBusy()) { boolean init = false; try { // Initialize table if (cells == as) { Cell[] rs = new Cell[2]; rs[h & 1] = new Cell(x); cells = rs; init = true; } } finally { busy = 0; } if (init) break; } else if (casBase(v = base, fn(v, x))) break; // Fall back on using base } hc.code = h; // Record index for next time } /** * Sets base and all cells to the given value. */ final void internalReset(long initialValue) { Cell[] as = cells; base = initialValue; if (as != null) { int n = as.length; for (int i = 0; i < n; ++i) { Cell a = as[i]; if (a != null) a.value = initialValue; } } } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long baseOffset; private static final long busyOffset; static { try { UNSAFE = getUnsafe(); Class<?> sk = Striped64.class; baseOffset = UNSAFE.objectFieldOffset(sk.getDeclaredField("base")); busyOffset = UNSAFE.objectFieldOffset(sk.getDeclaredField("busy")); } catch (Exception e) { throw new Error(e); } } /** * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package. Replace with a simple call to * Unsafe.getUnsafe when integrating into a jdk. * * @return a sun.misc.Unsafe */ private static sun.misc.Unsafe getUnsafe() { try { return sun.misc.Unsafe.getUnsafe(); } catch (SecurityException se) { try { return java.security.AccessController .doPrivileged(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() { @Override public sun.misc.Unsafe run() throws Exception { java.lang.reflect.Field f = sun.misc.Unsafe.class.getDeclaredField("theUnsafe"); f.setAccessible(true); return (sun.misc.Unsafe) f.get(null); } }); } catch (java.security.PrivilegedActionException e) { throw new RuntimeException("Could not initialize intrinsics", e.getCause()); } } } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.message; import java.util.NoSuchElementException; import org.apache.http.HeaderIterator; import org.apache.http.ParseException; import org.apache.http.TokenIterator; import org.apache.http.annotation.NotThreadSafe; import org.apache.http.util.Args; /** * Basic implementation of a {@link TokenIterator}. * This implementation parses <tt>#token<tt> sequences as * defined by RFC 2616, section 2. * It extends that definition somewhat beyond US-ASCII. * * @since 4.0 */ @NotThreadSafe public class BasicTokenIterator implements TokenIterator { /** The HTTP separator characters. Defined in RFC 2616, section 2.2. */ // the order of the characters here is adjusted to put the // most likely candidates at the beginning of the collection public final static String HTTP_SEPARATORS = " ,;=()<>@:\\\"/[]?{}\t"; /** The iterator from which to obtain the next header. */ protected final HeaderIterator headerIt; /** * The value of the current header. * This is the header value that includes {@link #currentToken}. * Undefined if the iteration is over. */ protected String currentHeader; /** * The token to be returned by the next call to {@link #nextToken()}. * <code>null</code> if the iteration is over. */ protected String currentToken; /** * The position after {@link #currentToken} in {@link #currentHeader}. * Undefined if the iteration is over. */ protected int searchPos; /** * Creates a new instance of {@link BasicTokenIterator}. * * @param headerIterator the iterator for the headers to tokenize */ public BasicTokenIterator(final HeaderIterator headerIterator) { super(); this.headerIt = Args.notNull(headerIterator, "Header iterator"); this.searchPos = findNext(-1); } // non-javadoc, see interface TokenIterator public boolean hasNext() { return (this.currentToken != null); } /** * Obtains the next token from this iteration. * * @return the next token in this iteration * * @throws NoSuchElementException if the iteration is already over * @throws ParseException if an invalid header value is encountered */ public String nextToken() throws NoSuchElementException, ParseException { if (this.currentToken == null) { throw new NoSuchElementException("Iteration already finished."); } final String result = this.currentToken; // updates currentToken, may trigger ParseException: this.searchPos = findNext(this.searchPos); return result; } /** * Returns the next token. * Same as {@link #nextToken}, but with generic return type. * * @return the next token in this iteration * * @throws NoSuchElementException if there are no more tokens * @throws ParseException if an invalid header value is encountered */ public final Object next() throws NoSuchElementException, ParseException { return nextToken(); } /** * Removing tokens is not supported. * * @throws UnsupportedOperationException always */ public final void remove() throws UnsupportedOperationException { throw new UnsupportedOperationException ("Removing tokens is not supported."); } /** * Determines the next token. * If found, the token is stored in {@link #currentToken}. * The return value indicates the position after the token * in {@link #currentHeader}. If necessary, the next header * will be obtained from {@link #headerIt}. * If not found, {@link #currentToken} is set to <code>null</code>. * * @param pos the position in the current header at which to * start the search, -1 to search in the first header * * @return the position after the found token in the current header, or * negative if there was no next token * * @throws ParseException if an invalid header value is encountered */ protected int findNext(final int pos) throws ParseException { int from = pos; if (from < 0) { // called from the constructor, initialize the first header if (!this.headerIt.hasNext()) { return -1; } this.currentHeader = this.headerIt.nextHeader().getValue(); from = 0; } else { // called after a token, make sure there is a separator from = findTokenSeparator(from); } final int start = findTokenStart(from); if (start < 0) { this.currentToken = null; return -1; // nothing found } final int end = findTokenEnd(start); this.currentToken = createToken(this.currentHeader, start, end); return end; } /** * Creates a new token to be returned. * Called from {@link #findNext findNext} after the token is identified. * The default implementation simply calls * {@link java.lang.String#substring String.substring}. * <br/> * If header values are significantly longer than tokens, and some * tokens are permanently referenced by the application, there can * be problems with garbage collection. A substring will hold a * reference to the full characters of the original string and * therefore occupies more memory than might be expected. * To avoid this, override this method and create a new string * instead of a substring. * * @param value the full header value from which to create a token * @param start the index of the first token character * @param end the index after the last token character * * @return a string representing the token identified by the arguments */ protected String createToken(final String value, final int start, final int end) { return value.substring(start, end); } /** * Determines the starting position of the next token. * This method will iterate over headers if necessary. * * @param pos the position in the current header at which to * start the search * * @return the position of the token start in the current header, * negative if no token start could be found */ protected int findTokenStart(final int pos) { int from = Args.notNegative(pos, "Search position"); boolean found = false; while (!found && (this.currentHeader != null)) { final int to = this.currentHeader.length(); while (!found && (from < to)) { final char ch = this.currentHeader.charAt(from); if (isTokenSeparator(ch) || isWhitespace(ch)) { // whitspace and token separators are skipped from++; } else if (isTokenChar(this.currentHeader.charAt(from))) { // found the start of a token found = true; } else { throw new ParseException ("Invalid character before token (pos " + from + "): " + this.currentHeader); } } if (!found) { if (this.headerIt.hasNext()) { this.currentHeader = this.headerIt.nextHeader().getValue(); from = 0; } else { this.currentHeader = null; } } } // while headers return found ? from : -1; } /** * Determines the position of the next token separator. * Because of multi-header joining rules, the end of a * header value is a token separator. This method does * therefore not need to iterate over headers. * * @param pos the position in the current header at which to * start the search * * @return the position of a token separator in the current header, * or at the end * * @throws ParseException * if a new token is found before a token separator. * RFC 2616, section 2.1 explicitly requires a comma between * tokens for <tt>#</tt>. */ protected int findTokenSeparator(final int pos) { int from = Args.notNegative(pos, "Search position"); boolean found = false; final int to = this.currentHeader.length(); while (!found && (from < to)) { final char ch = this.currentHeader.charAt(from); if (isTokenSeparator(ch)) { found = true; } else if (isWhitespace(ch)) { from++; } else if (isTokenChar(ch)) { throw new ParseException ("Tokens without separator (pos " + from + "): " + this.currentHeader); } else { throw new ParseException ("Invalid character after token (pos " + from + "): " + this.currentHeader); } } return from; } /** * Determines the ending position of the current token. * This method will not leave the current header value, * since the end of the header value is a token boundary. * * @param from the position of the first character of the token * * @return the position after the last character of the token. * The behavior is undefined if <code>from</code> does not * point to a token character in the current header value. */ protected int findTokenEnd(final int from) { Args.notNegative(from, "Search position"); final int to = this.currentHeader.length(); int end = from+1; while ((end < to) && isTokenChar(this.currentHeader.charAt(end))) { end++; } return end; } /** * Checks whether a character is a token separator. * RFC 2616, section 2.1 defines comma as the separator for * <tt>#token</tt> sequences. The end of a header value will * also separate tokens, but that is not a character check. * * @param ch the character to check * * @return <code>true</code> if the character is a token separator, * <code>false</code> otherwise */ protected boolean isTokenSeparator(final char ch) { return (ch == ','); } /** * Checks whether a character is a whitespace character. * RFC 2616, section 2.2 defines space and horizontal tab as whitespace. * The optional preceeding line break is irrelevant, since header * continuation is handled transparently when parsing messages. * * @param ch the character to check * * @return <code>true</code> if the character is whitespace, * <code>false</code> otherwise */ protected boolean isWhitespace(final char ch) { // we do not use Character.isWhitspace(ch) here, since that allows // many control characters which are not whitespace as per RFC 2616 return ((ch == '\t') || Character.isSpaceChar(ch)); } /** * Checks whether a character is a valid token character. * Whitespace, control characters, and HTTP separators are not * valid token characters. The HTTP specification (RFC 2616, section 2.2) * defines tokens only for the US-ASCII character set, this * method extends the definition to other character sets. * * @param ch the character to check * * @return <code>true</code> if the character is a valid token start, * <code>false</code> otherwise */ protected boolean isTokenChar(final char ch) { // common sense extension of ALPHA + DIGIT if (Character.isLetterOrDigit(ch)) { return true; } // common sense extension of CTL if (Character.isISOControl(ch)) { return false; } // no common sense extension for this if (isHttpSeparator(ch)) { return false; } // RFC 2616, section 2.2 defines a token character as // "any CHAR except CTLs or separators". The controls // and separators are included in the checks above. // This will yield unexpected results for Unicode format characters. // If that is a problem, overwrite isHttpSeparator(char) to filter // out the false positives. return true; } /** * Checks whether a character is an HTTP separator. * The implementation in this class checks only for the HTTP separators * defined in RFC 2616, section 2.2. If you need to detect other * separators beyond the US-ASCII character set, override this method. * * @param ch the character to check * * @return <code>true</code> if the character is an HTTP separator */ protected boolean isHttpSeparator(final char ch) { return (HTTP_SEPARATORS.indexOf(ch) >= 0); } } // class BasicTokenIterator
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.CollectPreconditions.checkNonnegative; import static com.google.common.collect.CollectPreconditions.checkRemove; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.MoreObjects; import com.google.common.primitives.Ints; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Comparator; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.function.ObjIntConsumer; import org.checkerframework.checker.nullness.qual.Nullable; /** * A multiset which maintains the ordering of its elements, according to either their natural order * or an explicit {@link Comparator}. In all cases, this implementation uses {@link * Comparable#compareTo} or {@link Comparator#compare} instead of {@link Object#equals} to determine * equivalence of instances. * * <p><b>Warning:</b> The comparison must be <i>consistent with equals</i> as explained by the * {@link Comparable} class specification. Otherwise, the resulting multiset will violate the {@link * java.util.Collection} contract, which is specified in terms of {@link Object#equals}. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/NewCollectionTypesExplained#multiset"> {@code * Multiset}</a>. * * @author Louis Wasserman * @author Jared Levy * @since 2.0 */ @GwtCompatible(emulated = true) public final class TreeMultiset<E> extends AbstractSortedMultiset<E> implements Serializable { /** * Creates a new, empty multiset, sorted according to the elements' natural order. All elements * inserted into the multiset must implement the {@code Comparable} interface. Furthermore, all * such elements must be <i>mutually comparable</i>: {@code e1.compareTo(e2)} must not throw a * {@code ClassCastException} for any elements {@code e1} and {@code e2} in the multiset. If the * user attempts to add an element to the multiset that violates this constraint (for example, the * user attempts to add a string element to a set whose elements are integers), the {@code * add(Object)} call will throw a {@code ClassCastException}. * * <p>The type specification is {@code <E extends Comparable>}, instead of the more specific * {@code <E extends Comparable<? super E>>}, to support classes defined without generics. */ public static <E extends Comparable> TreeMultiset<E> create() { return new TreeMultiset<E>(Ordering.natural()); } /** * Creates a new, empty multiset, sorted according to the specified comparator. All elements * inserted into the multiset must be <i>mutually comparable</i> by the specified comparator: * {@code comparator.compare(e1, e2)} must not throw a {@code ClassCastException} for any elements * {@code e1} and {@code e2} in the multiset. If the user attempts to add an element to the * multiset that violates this constraint, the {@code add(Object)} call will throw a {@code * ClassCastException}. * * @param comparator the comparator that will be used to sort this multiset. A null value * indicates that the elements' <i>natural ordering</i> should be used. */ @SuppressWarnings("unchecked") public static <E> TreeMultiset<E> create(@Nullable Comparator<? super E> comparator) { return (comparator == null) ? new TreeMultiset<E>((Comparator) Ordering.natural()) : new TreeMultiset<E>(comparator); } /** * Creates an empty multiset containing the given initial elements, sorted according to the * elements' natural order. * * <p>This implementation is highly efficient when {@code elements} is itself a {@link Multiset}. * * <p>The type specification is {@code <E extends Comparable>}, instead of the more specific * {@code <E extends Comparable<? super E>>}, to support classes defined without generics. */ public static <E extends Comparable> TreeMultiset<E> create(Iterable<? extends E> elements) { TreeMultiset<E> multiset = create(); Iterables.addAll(multiset, elements); return multiset; } private final transient Reference<AvlNode<E>> rootReference; private final transient GeneralRange<E> range; private final transient AvlNode<E> header; TreeMultiset(Reference<AvlNode<E>> rootReference, GeneralRange<E> range, AvlNode<E> endLink) { super(range.comparator()); this.rootReference = rootReference; this.range = range; this.header = endLink; } TreeMultiset(Comparator<? super E> comparator) { super(comparator); this.range = GeneralRange.all(comparator); this.header = new AvlNode<E>(null, 1); successor(header, header); this.rootReference = new Reference<>(); } /** A function which can be summed across a subtree. */ private enum Aggregate { SIZE { @Override int nodeAggregate(AvlNode<?> node) { return node.elemCount; } @Override long treeAggregate(@Nullable AvlNode<?> root) { return (root == null) ? 0 : root.totalCount; } }, DISTINCT { @Override int nodeAggregate(AvlNode<?> node) { return 1; } @Override long treeAggregate(@Nullable AvlNode<?> root) { return (root == null) ? 0 : root.distinctElements; } }; abstract int nodeAggregate(AvlNode<?> node); abstract long treeAggregate(@Nullable AvlNode<?> root); } private long aggregateForEntries(Aggregate aggr) { AvlNode<E> root = rootReference.get(); long total = aggr.treeAggregate(root); if (range.hasLowerBound()) { total -= aggregateBelowRange(aggr, root); } if (range.hasUpperBound()) { total -= aggregateAboveRange(aggr, root); } return total; } private long aggregateBelowRange(Aggregate aggr, @Nullable AvlNode<E> node) { if (node == null) { return 0; } int cmp = comparator().compare(range.getLowerEndpoint(), node.elem); if (cmp < 0) { return aggregateBelowRange(aggr, node.left); } else if (cmp == 0) { switch (range.getLowerBoundType()) { case OPEN: return aggr.nodeAggregate(node) + aggr.treeAggregate(node.left); case CLOSED: return aggr.treeAggregate(node.left); default: throw new AssertionError(); } } else { return aggr.treeAggregate(node.left) + aggr.nodeAggregate(node) + aggregateBelowRange(aggr, node.right); } } private long aggregateAboveRange(Aggregate aggr, @Nullable AvlNode<E> node) { if (node == null) { return 0; } int cmp = comparator().compare(range.getUpperEndpoint(), node.elem); if (cmp > 0) { return aggregateAboveRange(aggr, node.right); } else if (cmp == 0) { switch (range.getUpperBoundType()) { case OPEN: return aggr.nodeAggregate(node) + aggr.treeAggregate(node.right); case CLOSED: return aggr.treeAggregate(node.right); default: throw new AssertionError(); } } else { return aggr.treeAggregate(node.right) + aggr.nodeAggregate(node) + aggregateAboveRange(aggr, node.left); } } @Override public int size() { return Ints.saturatedCast(aggregateForEntries(Aggregate.SIZE)); } @Override int distinctElements() { return Ints.saturatedCast(aggregateForEntries(Aggregate.DISTINCT)); } static int distinctElements(@Nullable AvlNode<?> node) { return (node == null) ? 0 : node.distinctElements; } @Override public int count(@Nullable Object element) { try { @SuppressWarnings("unchecked") E e = (E) element; AvlNode<E> root = rootReference.get(); if (!range.contains(e) || root == null) { return 0; } return root.count(comparator(), e); } catch (ClassCastException | NullPointerException e) { return 0; } } @CanIgnoreReturnValue @Override public int add(@Nullable E element, int occurrences) { checkNonnegative(occurrences, "occurrences"); if (occurrences == 0) { return count(element); } checkArgument(range.contains(element)); AvlNode<E> root = rootReference.get(); if (root == null) { comparator().compare(element, element); AvlNode<E> newRoot = new AvlNode<E>(element, occurrences); successor(header, newRoot, header); rootReference.checkAndSet(root, newRoot); return 0; } int[] result = new int[1]; // used as a mutable int reference to hold result AvlNode<E> newRoot = root.add(comparator(), element, occurrences, result); rootReference.checkAndSet(root, newRoot); return result[0]; } @CanIgnoreReturnValue @Override public int remove(@Nullable Object element, int occurrences) { checkNonnegative(occurrences, "occurrences"); if (occurrences == 0) { return count(element); } AvlNode<E> root = rootReference.get(); int[] result = new int[1]; // used as a mutable int reference to hold result AvlNode<E> newRoot; try { @SuppressWarnings("unchecked") E e = (E) element; if (!range.contains(e) || root == null) { return 0; } newRoot = root.remove(comparator(), e, occurrences, result); } catch (ClassCastException | NullPointerException e) { return 0; } rootReference.checkAndSet(root, newRoot); return result[0]; } @CanIgnoreReturnValue @Override public int setCount(@Nullable E element, int count) { checkNonnegative(count, "count"); if (!range.contains(element)) { checkArgument(count == 0); return 0; } AvlNode<E> root = rootReference.get(); if (root == null) { if (count > 0) { add(element, count); } return 0; } int[] result = new int[1]; // used as a mutable int reference to hold result AvlNode<E> newRoot = root.setCount(comparator(), element, count, result); rootReference.checkAndSet(root, newRoot); return result[0]; } @CanIgnoreReturnValue @Override public boolean setCount(@Nullable E element, int oldCount, int newCount) { checkNonnegative(newCount, "newCount"); checkNonnegative(oldCount, "oldCount"); checkArgument(range.contains(element)); AvlNode<E> root = rootReference.get(); if (root == null) { if (oldCount == 0) { if (newCount > 0) { add(element, newCount); } return true; } else { return false; } } int[] result = new int[1]; // used as a mutable int reference to hold result AvlNode<E> newRoot = root.setCount(comparator(), element, oldCount, newCount, result); rootReference.checkAndSet(root, newRoot); return result[0] == oldCount; } @Override public void clear() { if (!range.hasLowerBound() && !range.hasUpperBound()) { // We can do this in O(n) rather than removing one by one, which could force rebalancing. for (AvlNode<E> current = header.succ; current != header; ) { AvlNode<E> next = current.succ; current.elemCount = 0; // Also clear these fields so that one deleted Entry doesn't retain all elements. current.left = null; current.right = null; current.pred = null; current.succ = null; current = next; } successor(header, header); rootReference.clear(); } else { // TODO(cpovirk): Perhaps we can optimize in this case, too? Iterators.clear(entryIterator()); } } private Entry<E> wrapEntry(final AvlNode<E> baseEntry) { return new Multisets.AbstractEntry<E>() { @Override public E getElement() { return baseEntry.getElement(); } @Override public int getCount() { int result = baseEntry.getCount(); if (result == 0) { return count(getElement()); } else { return result; } } }; } /** Returns the first node in the tree that is in range. */ private @Nullable AvlNode<E> firstNode() { AvlNode<E> root = rootReference.get(); if (root == null) { return null; } AvlNode<E> node; if (range.hasLowerBound()) { E endpoint = range.getLowerEndpoint(); node = rootReference.get().ceiling(comparator(), endpoint); if (node == null) { return null; } if (range.getLowerBoundType() == BoundType.OPEN && comparator().compare(endpoint, node.getElement()) == 0) { node = node.succ; } } else { node = header.succ; } return (node == header || !range.contains(node.getElement())) ? null : node; } private @Nullable AvlNode<E> lastNode() { AvlNode<E> root = rootReference.get(); if (root == null) { return null; } AvlNode<E> node; if (range.hasUpperBound()) { E endpoint = range.getUpperEndpoint(); node = rootReference.get().floor(comparator(), endpoint); if (node == null) { return null; } if (range.getUpperBoundType() == BoundType.OPEN && comparator().compare(endpoint, node.getElement()) == 0) { node = node.pred; } } else { node = header.pred; } return (node == header || !range.contains(node.getElement())) ? null : node; } @Override Iterator<E> elementIterator() { return Multisets.elementIterator(entryIterator()); } @Override Iterator<Entry<E>> entryIterator() { return new Iterator<Entry<E>>() { AvlNode<E> current = firstNode(); @Nullable Entry<E> prevEntry; @Override public boolean hasNext() { if (current == null) { return false; } else if (range.tooHigh(current.getElement())) { current = null; return false; } else { return true; } } @Override public Entry<E> next() { if (!hasNext()) { throw new NoSuchElementException(); } Entry<E> result = wrapEntry(current); prevEntry = result; if (current.succ == header) { current = null; } else { current = current.succ; } return result; } @Override public void remove() { checkRemove(prevEntry != null); setCount(prevEntry.getElement(), 0); prevEntry = null; } }; } @Override Iterator<Entry<E>> descendingEntryIterator() { return new Iterator<Entry<E>>() { AvlNode<E> current = lastNode(); Entry<E> prevEntry = null; @Override public boolean hasNext() { if (current == null) { return false; } else if (range.tooLow(current.getElement())) { current = null; return false; } else { return true; } } @Override public Entry<E> next() { if (!hasNext()) { throw new NoSuchElementException(); } Entry<E> result = wrapEntry(current); prevEntry = result; if (current.pred == header) { current = null; } else { current = current.pred; } return result; } @Override public void remove() { checkRemove(prevEntry != null); setCount(prevEntry.getElement(), 0); prevEntry = null; } }; } @Override public void forEachEntry(ObjIntConsumer<? super E> action) { checkNotNull(action); for (AvlNode<E> node = firstNode(); node != header && node != null && !range.tooHigh(node.getElement()); node = node.succ) { action.accept(node.getElement(), node.getCount()); } } @Override public Iterator<E> iterator() { return Multisets.iteratorImpl(this); } @Override public SortedMultiset<E> headMultiset(@Nullable E upperBound, BoundType boundType) { return new TreeMultiset<E>( rootReference, range.intersect(GeneralRange.upTo(comparator(), upperBound, boundType)), header); } @Override public SortedMultiset<E> tailMultiset(@Nullable E lowerBound, BoundType boundType) { return new TreeMultiset<E>( rootReference, range.intersect(GeneralRange.downTo(comparator(), lowerBound, boundType)), header); } private static final class Reference<T> { private @Nullable T value; public @Nullable T get() { return value; } public void checkAndSet(@Nullable T expected, T newValue) { if (value != expected) { throw new ConcurrentModificationException(); } value = newValue; } void clear() { value = null; } } private static final class AvlNode<E> { private final @Nullable E elem; // elemCount is 0 iff this node has been deleted. private int elemCount; private int distinctElements; private long totalCount; private int height; private @Nullable AvlNode<E> left; private @Nullable AvlNode<E> right; private @Nullable AvlNode<E> pred; private @Nullable AvlNode<E> succ; AvlNode(@Nullable E elem, int elemCount) { checkArgument(elemCount > 0); this.elem = elem; this.elemCount = elemCount; this.totalCount = elemCount; this.distinctElements = 1; this.height = 1; this.left = null; this.right = null; } public int count(Comparator<? super E> comparator, E e) { int cmp = comparator.compare(e, elem); if (cmp < 0) { return (left == null) ? 0 : left.count(comparator, e); } else if (cmp > 0) { return (right == null) ? 0 : right.count(comparator, e); } else { return elemCount; } } private AvlNode<E> addRightChild(E e, int count) { right = new AvlNode<E>(e, count); successor(this, right, succ); height = Math.max(2, height); distinctElements++; totalCount += count; return this; } private AvlNode<E> addLeftChild(E e, int count) { left = new AvlNode<E>(e, count); successor(pred, left, this); height = Math.max(2, height); distinctElements++; totalCount += count; return this; } AvlNode<E> add(Comparator<? super E> comparator, @Nullable E e, int count, int[] result) { /* * It speeds things up considerably to unconditionally add count to totalCount here, * but that destroys failure atomicity in the case of count overflow. =( */ int cmp = comparator.compare(e, elem); if (cmp < 0) { AvlNode<E> initLeft = left; if (initLeft == null) { result[0] = 0; return addLeftChild(e, count); } int initHeight = initLeft.height; left = initLeft.add(comparator, e, count, result); if (result[0] == 0) { distinctElements++; } this.totalCount += count; return (left.height == initHeight) ? this : rebalance(); } else if (cmp > 0) { AvlNode<E> initRight = right; if (initRight == null) { result[0] = 0; return addRightChild(e, count); } int initHeight = initRight.height; right = initRight.add(comparator, e, count, result); if (result[0] == 0) { distinctElements++; } this.totalCount += count; return (right.height == initHeight) ? this : rebalance(); } // adding count to me! No rebalance possible. result[0] = elemCount; long resultCount = (long) elemCount + count; checkArgument(resultCount <= Integer.MAX_VALUE); this.elemCount += count; this.totalCount += count; return this; } AvlNode<E> remove(Comparator<? super E> comparator, @Nullable E e, int count, int[] result) { int cmp = comparator.compare(e, elem); if (cmp < 0) { AvlNode<E> initLeft = left; if (initLeft == null) { result[0] = 0; return this; } left = initLeft.remove(comparator, e, count, result); if (result[0] > 0) { if (count >= result[0]) { this.distinctElements--; this.totalCount -= result[0]; } else { this.totalCount -= count; } } return (result[0] == 0) ? this : rebalance(); } else if (cmp > 0) { AvlNode<E> initRight = right; if (initRight == null) { result[0] = 0; return this; } right = initRight.remove(comparator, e, count, result); if (result[0] > 0) { if (count >= result[0]) { this.distinctElements--; this.totalCount -= result[0]; } else { this.totalCount -= count; } } return rebalance(); } // removing count from me! result[0] = elemCount; if (count >= elemCount) { return deleteMe(); } else { this.elemCount -= count; this.totalCount -= count; return this; } } AvlNode<E> setCount(Comparator<? super E> comparator, @Nullable E e, int count, int[] result) { int cmp = comparator.compare(e, elem); if (cmp < 0) { AvlNode<E> initLeft = left; if (initLeft == null) { result[0] = 0; return (count > 0) ? addLeftChild(e, count) : this; } left = initLeft.setCount(comparator, e, count, result); if (count == 0 && result[0] != 0) { this.distinctElements--; } else if (count > 0 && result[0] == 0) { this.distinctElements++; } this.totalCount += count - result[0]; return rebalance(); } else if (cmp > 0) { AvlNode<E> initRight = right; if (initRight == null) { result[0] = 0; return (count > 0) ? addRightChild(e, count) : this; } right = initRight.setCount(comparator, e, count, result); if (count == 0 && result[0] != 0) { this.distinctElements--; } else if (count > 0 && result[0] == 0) { this.distinctElements++; } this.totalCount += count - result[0]; return rebalance(); } // setting my count result[0] = elemCount; if (count == 0) { return deleteMe(); } this.totalCount += count - elemCount; this.elemCount = count; return this; } AvlNode<E> setCount( Comparator<? super E> comparator, @Nullable E e, int expectedCount, int newCount, int[] result) { int cmp = comparator.compare(e, elem); if (cmp < 0) { AvlNode<E> initLeft = left; if (initLeft == null) { result[0] = 0; if (expectedCount == 0 && newCount > 0) { return addLeftChild(e, newCount); } return this; } left = initLeft.setCount(comparator, e, expectedCount, newCount, result); if (result[0] == expectedCount) { if (newCount == 0 && result[0] != 0) { this.distinctElements--; } else if (newCount > 0 && result[0] == 0) { this.distinctElements++; } this.totalCount += newCount - result[0]; } return rebalance(); } else if (cmp > 0) { AvlNode<E> initRight = right; if (initRight == null) { result[0] = 0; if (expectedCount == 0 && newCount > 0) { return addRightChild(e, newCount); } return this; } right = initRight.setCount(comparator, e, expectedCount, newCount, result); if (result[0] == expectedCount) { if (newCount == 0 && result[0] != 0) { this.distinctElements--; } else if (newCount > 0 && result[0] == 0) { this.distinctElements++; } this.totalCount += newCount - result[0]; } return rebalance(); } // setting my count result[0] = elemCount; if (expectedCount == elemCount) { if (newCount == 0) { return deleteMe(); } this.totalCount += newCount - elemCount; this.elemCount = newCount; } return this; } private AvlNode<E> deleteMe() { int oldElemCount = this.elemCount; this.elemCount = 0; successor(pred, succ); if (left == null) { return right; } else if (right == null) { return left; } else if (left.height >= right.height) { AvlNode<E> newTop = pred; // newTop is the maximum node in my left subtree newTop.left = left.removeMax(newTop); newTop.right = right; newTop.distinctElements = distinctElements - 1; newTop.totalCount = totalCount - oldElemCount; return newTop.rebalance(); } else { AvlNode<E> newTop = succ; newTop.right = right.removeMin(newTop); newTop.left = left; newTop.distinctElements = distinctElements - 1; newTop.totalCount = totalCount - oldElemCount; return newTop.rebalance(); } } // Removes the minimum node from this subtree to be reused elsewhere private AvlNode<E> removeMin(AvlNode<E> node) { if (left == null) { return right; } else { left = left.removeMin(node); distinctElements--; totalCount -= node.elemCount; return rebalance(); } } // Removes the maximum node from this subtree to be reused elsewhere private AvlNode<E> removeMax(AvlNode<E> node) { if (right == null) { return left; } else { right = right.removeMax(node); distinctElements--; totalCount -= node.elemCount; return rebalance(); } } private void recomputeMultiset() { this.distinctElements = 1 + TreeMultiset.distinctElements(left) + TreeMultiset.distinctElements(right); this.totalCount = elemCount + totalCount(left) + totalCount(right); } private void recomputeHeight() { this.height = 1 + Math.max(height(left), height(right)); } private void recompute() { recomputeMultiset(); recomputeHeight(); } private AvlNode<E> rebalance() { switch (balanceFactor()) { case -2: if (right.balanceFactor() > 0) { right = right.rotateRight(); } return rotateLeft(); case 2: if (left.balanceFactor() < 0) { left = left.rotateLeft(); } return rotateRight(); default: recomputeHeight(); return this; } } private int balanceFactor() { return height(left) - height(right); } private AvlNode<E> rotateLeft() { checkState(right != null); AvlNode<E> newTop = right; this.right = newTop.left; newTop.left = this; newTop.totalCount = this.totalCount; newTop.distinctElements = this.distinctElements; this.recompute(); newTop.recomputeHeight(); return newTop; } private AvlNode<E> rotateRight() { checkState(left != null); AvlNode<E> newTop = left; this.left = newTop.right; newTop.right = this; newTop.totalCount = this.totalCount; newTop.distinctElements = this.distinctElements; this.recompute(); newTop.recomputeHeight(); return newTop; } private static long totalCount(@Nullable AvlNode<?> node) { return (node == null) ? 0 : node.totalCount; } private static int height(@Nullable AvlNode<?> node) { return (node == null) ? 0 : node.height; } private @Nullable AvlNode<E> ceiling(Comparator<? super E> comparator, E e) { int cmp = comparator.compare(e, elem); if (cmp < 0) { return (left == null) ? this : MoreObjects.firstNonNull(left.ceiling(comparator, e), this); } else if (cmp == 0) { return this; } else { return (right == null) ? null : right.ceiling(comparator, e); } } private @Nullable AvlNode<E> floor(Comparator<? super E> comparator, E e) { int cmp = comparator.compare(e, elem); if (cmp > 0) { return (right == null) ? this : MoreObjects.firstNonNull(right.floor(comparator, e), this); } else if (cmp == 0) { return this; } else { return (left == null) ? null : left.floor(comparator, e); } } E getElement() { return elem; } int getCount() { return elemCount; } @Override public String toString() { return Multisets.immutableEntry(getElement(), getCount()).toString(); } } private static <T> void successor(AvlNode<T> a, AvlNode<T> b) { a.succ = b; b.pred = a; } private static <T> void successor(AvlNode<T> a, AvlNode<T> b, AvlNode<T> c) { successor(a, b); successor(b, c); } /* * TODO(jlevy): Decide whether entrySet() should return entries with an equals() method that * calls the comparator to compare the two keys. If that change is made, * AbstractMultiset.equals() can simply check whether two multisets have equal entry sets. */ /** * @serialData the comparator, the number of distinct elements, the first element, its count, the * second element, its count, and so on */ @GwtIncompatible // java.io.ObjectOutputStream private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); stream.writeObject(elementSet().comparator()); Serialization.writeMultiset(this, stream); } @GwtIncompatible // java.io.ObjectInputStream private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); @SuppressWarnings("unchecked") // reading data stored by writeObject Comparator<? super E> comparator = (Comparator<? super E>) stream.readObject(); Serialization.getFieldSetter(AbstractSortedMultiset.class, "comparator").set(this, comparator); Serialization.getFieldSetter(TreeMultiset.class, "range") .set(this, GeneralRange.all(comparator)); Serialization.getFieldSetter(TreeMultiset.class, "rootReference") .set(this, new Reference<AvlNode<E>>()); AvlNode<E> header = new AvlNode<E>(null, 1); Serialization.getFieldSetter(TreeMultiset.class, "header").set(this, header); successor(header, header); Serialization.populateMultiset(this, stream); } @GwtIncompatible // not needed in emulated source private static final long serialVersionUID = 1; }
package org.apache.lucene.benchmark.byTask.feeds; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.benchmark.byTask.utils.Config; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.Locale; import java.util.Stack; /** * A {@link ContentSource} using the Dir collection for its input. Supports * the following configuration parameters (on top of {@link ContentSource}): * <ul> * <li><b>work.dir</b> - specifies the working directory. Required if "docs.dir" * denotes a relative path (<b>default=work</b>). * <li><b>docs.dir</b> - specifies the directory the Dir collection. Can be set * to a relative path if "work.dir" is also specified (<b>default=dir-out</b>). * </ul> */ public class DirContentSource extends ContentSource { private static final class DateFormatInfo { DateFormat df; ParsePosition pos; } /** * Iterator over the files in the directory */ public static class Iterator implements java.util.Iterator<File> { static class Comparator implements java.util.Comparator<File> { @Override public int compare(File _a, File _b) { String a = _a.toString(); String b = _b.toString(); int diff = a.length() - b.length(); if (diff > 0) { while (diff-- > 0) { b = "0" + b; } } else if (diff < 0) { diff = -diff; while (diff-- > 0) { a = "0" + a; } } /* note it's reversed because we're going to push, which reverses again */ return b.compareTo(a); } } int count = 0; Stack<File> stack = new Stack<>(); /* this seems silly ... there must be a better way ... not that this is good, but can it matter? */ Comparator c = new Comparator(); public Iterator(File f) { push(f); } void find() { if (stack.empty()) { return; } if (!(stack.peek()).isDirectory()) { return; } File f = stack.pop(); push(f); } void push(File f) { push(f.listFiles(new FileFilter() { @Override public boolean accept(File file) { return file.isDirectory(); } })); push(f.listFiles(new FileFilter() { @Override public boolean accept(File file) { return file.getName().endsWith(".txt"); } })); find(); } void push(File[] files) { Arrays.sort(files, c); for(int i = 0; i < files.length; i++) { // System.err.println("push " + files[i]); stack.push(files[i]); } } public int getCount(){ return count; } @Override public boolean hasNext() { return stack.size() > 0; } @Override public File next() { assert hasNext(); count++; File object = stack.pop(); // System.err.println("pop " + object); find(); return object; } @Override public void remove() { throw new RuntimeException("cannot"); } } private ThreadLocal<DateFormatInfo> dateFormat = new ThreadLocal<>(); private File dataDir = null; private int iteration = 0; private Iterator inputFiles = null; // get/initiate a thread-local simple date format (must do so // because SimpleDateFormat is not thread-safe). private DateFormatInfo getDateFormatInfo() { DateFormatInfo dfi = dateFormat.get(); if (dfi == null) { dfi = new DateFormatInfo(); dfi.pos = new ParsePosition(0); // date format: 30-MAR-1987 14:22:36.87 dfi.df = new SimpleDateFormat("dd-MMM-yyyy kk:mm:ss.SSS", Locale.ROOT); dfi.df.setLenient(true); dateFormat.set(dfi); } return dfi; } private Date parseDate(String dateStr) { DateFormatInfo dfi = getDateFormatInfo(); dfi.pos.setIndex(0); dfi.pos.setErrorIndex(-1); return dfi.df.parse(dateStr.trim(), dfi.pos); } @Override public void close() throws IOException { inputFiles = null; } @Override public DocData getNextDocData(DocData docData) throws NoMoreDataException, IOException { File f = null; String name = null; synchronized (this) { if (!inputFiles.hasNext()) { // exhausted files, start a new round, unless forever set to false. if (!forever) { throw new NoMoreDataException(); } inputFiles = new Iterator(dataDir); iteration++; } f = inputFiles.next(); // System.err.println(f); name = f.getCanonicalPath()+"_"+iteration; } BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8)); String line = null; //First line is the date, 3rd is the title, rest is body String dateStr = reader.readLine(); reader.readLine();//skip an empty line String title = reader.readLine(); reader.readLine();//skip an empty line StringBuilder bodyBuf = new StringBuilder(1024); while ((line = reader.readLine()) != null) { bodyBuf.append(line).append(' '); } reader.close(); addBytes(f.length()); Date date = parseDate(dateStr); docData.clear(); docData.setName(name); docData.setBody(bodyBuf.toString()); docData.setTitle(title); docData.setDate(date); return docData; } @Override public synchronized void resetInputs() throws IOException { super.resetInputs(); inputFiles = new Iterator(dataDir); iteration = 0; } @Override public void setConfig(Config config) { super.setConfig(config); File workDir = new File(config.get("work.dir", "work")); String d = config.get("docs.dir", "dir-out"); dataDir = new File(d); if (!dataDir.isAbsolute()) { dataDir = new File(workDir, d); } inputFiles = new Iterator(dataDir); if (inputFiles == null) { throw new RuntimeException("No txt files in dataDir: " + dataDir.getAbsolutePath()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.snowflake.test; import com.google.api.gax.paging.Page; import com.google.cloud.storage.Blob; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import javax.sql.DataSource; import org.apache.beam.sdk.io.common.IOTestPipelineOptions; import org.apache.beam.sdk.io.common.TestRow; import org.apache.beam.sdk.io.snowflake.SnowflakeIO; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.values.KV; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({ "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556) }) public class TestUtils { private static final Logger LOG = LoggerFactory.getLogger(TestUtils.class); private static final String VALID_PRIVATE_KEY_FILE_NAME = "valid_test_rsa_key.p8"; private static final String INVALID_PRIVATE_KEY_FILE_NAME = "invalid_test_rsa_key.p8"; private static final String PRIVATE_KEY_PASSPHRASE = "snowflake"; public interface SnowflakeIOITPipelineOptions extends IOTestPipelineOptions, TestSnowflakePipelineOptions {} public static ResultSet runConnectionWithStatement(DataSource dataSource, String query) throws SQLException { Connection connection = dataSource.getConnection(); return runStatement(query, connection); } public static ResultSet runStatement(String query, Connection connection) throws SQLException { PreparedStatement statement = connection.prepareStatement(query); try { return statement.executeQuery(); } finally { statement.close(); connection.close(); } } public static void removeTempDir(String dir) { Path path = Paths.get(dir); try (Stream<Path> stream = Files.walk(path)) { stream.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete); } catch (IOException e) { LOG.info("Not able to remove files"); } } public static boolean areListsEqual(List<?> expected, List<?> actual) { return expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected); } public static String toSnowflakeRow(String[] strings) { int iMax = strings.length - 1; StringBuilder b = new StringBuilder(); for (int i = 0; ; i++) { if (strings[i] != null) { b.append(String.format("'%s'", strings[i])); } if (i == iMax) { return b.toString(); } b.append(","); } } public static SnowflakeIO.UserDataMapper<Long> getCsvMapper() { return (SnowflakeIO.UserDataMapper<Long>) recordLine -> new String[] {recordLine.toString()}; } public static SnowflakeIO.UserDataMapper<KV<String, Long>> getLongCsvMapperKV() { return (SnowflakeIO.UserDataMapper<KV<String, Long>>) recordLine -> new Long[] {recordLine.getValue()}; } public static SnowflakeIO.UserDataMapper<Long> getLongCsvMapper() { return (SnowflakeIO.UserDataMapper<Long>) recordLine -> new Long[] {recordLine}; } public static SnowflakeIO.CsvMapper<TestRow> getTestRowCsvMapper() { return (SnowflakeIO.CsvMapper<TestRow>) parts -> TestRow.create(Integer.valueOf(parts[0]), parts[1]); } public static SnowflakeIO.UserDataMapper<TestRow> getTestRowDataMapper() { return (SnowflakeIO.UserDataMapper<TestRow>) (TestRow element) -> new Object[] {element.id(), element.name()}; } public static SnowflakeIO.UserDataMapper<String[]> getLStringCsvMapper() { return (SnowflakeIO.UserDataMapper<String[]>) recordLine -> recordLine; } public static SnowflakeIO.UserDataMapper<String> getStringCsvMapper() { return (SnowflakeIO.UserDataMapper<String>) recordLine -> new String[] {recordLine}; } public static class ParseToKv extends DoFn<Long, KV<String, Long>> { @ProcessElement public void processElement(ProcessContext c) { KV stringIntKV = KV.of(c.element().toString(), c.element().longValue()); c.output(stringIntKV); } } public static List<String> readGZIPFile(String file) { List<String> lines = new ArrayList<>(); try { GZIPInputStream gzip = new GZIPInputStream(new FileInputStream(file)); BufferedReader br = new BufferedReader(new InputStreamReader(gzip, StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { lines.add(line); } } catch (IOException e) { throw new RuntimeException("Failed to read file", e); } return lines; } public static String getInvalidPrivateKeyPath(Class c) { return getPrivateKeyPath(c, INVALID_PRIVATE_KEY_FILE_NAME); } public static String getValidPrivateKeyPath(Class c) { return getPrivateKeyPath(c, VALID_PRIVATE_KEY_FILE_NAME); } public static String getRawValidPrivateKey(Class c) throws IOException { byte[] keyBytes = Files.readAllBytes(Paths.get(getValidPrivateKeyPath(c))); return new String(keyBytes, StandardCharsets.UTF_8); } public static String getPrivateKeyPassphrase() { return PRIVATE_KEY_PASSPHRASE; } private static String getPrivateKeyPath(Class c, String path) { ClassLoader classLoader = c.getClassLoader(); File file = new File(classLoader.getResource(path).getFile()); return file.getAbsolutePath(); } public static void clearStagingBucket(String stagingBucketName, String directory) { Storage storage = StorageOptions.getDefaultInstance().getService(); Page<Blob> blobs; if (directory != null) { blobs = storage.list(stagingBucketName, Storage.BlobListOption.prefix(directory)); } else { blobs = storage.list(stagingBucketName); } for (Blob blob : blobs.iterateAll()) { storage.delete(blob.getBlobId()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.mapreduce.db.netezza; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.SQLException; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.sqoop.config.ConfigurationHelper; import org.apache.sqoop.io.NamedFifo; import org.apache.sqoop.lib.DelimiterSet; import org.apache.sqoop.manager.DirectNetezzaManager; import org.apache.sqoop.mapreduce.db.DBConfiguration; import org.apache.sqoop.util.FileUploader; import org.apache.sqoop.util.PerfCounters; import org.apache.sqoop.util.TaskId; import com.cloudera.sqoop.mapreduce.AutoProgressMapper; /** * Netezza import mapper using external tables. */ public abstract class NetezzaExternalTableImportMapper<K, V> extends AutoProgressMapper<Integer, NullWritable, K, V> { /** * Create a named FIFO, and start Netezza import connected to that FIFO. A * File object representing the FIFO is in 'fifoFile'. */ private Configuration conf; private DBConfiguration dbc; private File fifoFile; private int numMappers; private Connection con; private BufferedReader recordReader; public static final Log LOG = LogFactory .getLog(NetezzaExternalTableImportMapper.class.getName()); private NetezzaJDBCStatementRunner extTableThread; private PerfCounters counter; private String localLogDir = null; private String logDir = null; private File taskAttemptDir = null; private String getSqlStatement(int myId) throws IOException { char fd = (char) conf.getInt(DelimiterSet.OUTPUT_FIELD_DELIM_KEY, ','); char qc = (char) conf.getInt(DelimiterSet.OUTPUT_ENCLOSED_BY_KEY, 0); char ec = (char) conf.getInt(DelimiterSet.OUTPUT_ESCAPED_BY_KEY, 0); String nullValue = conf.get(DirectNetezzaManager.NETEZZA_NULL_VALUE); boolean ctrlChars = conf.getBoolean(DirectNetezzaManager.NETEZZA_CTRL_CHARS_OPT, false); boolean truncString = conf.getBoolean(DirectNetezzaManager.NETEZZA_TRUNC_STRING_OPT, false); int errorThreshold = conf.getInt( DirectNetezzaManager.NETEZZA_ERROR_THRESHOLD_OPT, 1); String logDir = conf.get(DirectNetezzaManager.NETEZZA_LOG_DIR_OPT); String[] cols = dbc.getOutputFieldNames(); String inputConds = dbc.getInputConditions(); StringBuilder sqlStmt = new StringBuilder(2048); sqlStmt.append("CREATE EXTERNAL TABLE '"); sqlStmt.append(fifoFile.getAbsolutePath()); sqlStmt.append("' USING (REMOTESOURCE 'JDBC' "); sqlStmt.append(" BOOLSTYLE 'T_F' "); sqlStmt.append(" CRINSTRING FALSE "); if (ctrlChars) { sqlStmt.append(" CTRLCHARS TRUE "); } if (truncString) { sqlStmt.append(" TRUNCSTRING TRUE "); } sqlStmt.append(" DELIMITER "); sqlStmt.append(Integer.toString(fd)); sqlStmt.append(" ENCODING 'internal' "); if (ec > 0) { sqlStmt.append(" ESCAPECHAR '\\' "); } sqlStmt.append(" FORMAT 'Text' "); sqlStmt.append(" INCLUDEZEROSECONDS TRUE "); sqlStmt.append(" NULLVALUE '"); if (nullValue != null) { sqlStmt.append(nullValue); } else { sqlStmt.append("null"); } sqlStmt.append("' "); if (qc > 0) { switch (qc) { case '\'': sqlStmt.append(" QUOTEDVALUE SINGLE "); break; case '\"': sqlStmt.append(" QUOTEDVALUE DOUBLE "); break; default: LOG.warn("Unsupported enclosed by character: " + qc + " - ignoring."); } } sqlStmt.append(" MAXERRORS ").append(errorThreshold); File logDirPath = new File(taskAttemptDir, localLogDir); logDirPath.mkdirs(); if (logDirPath.canWrite() && logDirPath.isDirectory()) { sqlStmt.append(" LOGDIR ").append(logDirPath.getAbsolutePath()).append(' '); } else { throw new IOException("Unable to create log directory specified"); } sqlStmt.append(") AS SELECT "); if (cols == null || cols.length == 0) { sqlStmt.append('*'); } else { sqlStmt.append(cols[0]).append(' '); for (int i = 0; i < cols.length; ++i) { sqlStmt.append(',').append(cols[i]); } } sqlStmt.append(" FROM ").append(dbc.getInputTableName()).append(' '); sqlStmt.append("WHERE (DATASLICEID % "); sqlStmt.append(numMappers).append(") = ").append(myId); if (inputConds != null && inputConds.length() > 0) { sqlStmt.append(" AND ( ").append(inputConds).append(')'); } String stmt = sqlStmt.toString(); LOG.debug("SQL generated for external table import for data slice " + myId + "=" + stmt); return stmt; } private void initNetezzaExternalTableImport(int myId) throws IOException { taskAttemptDir = TaskId.getLocalWorkPath(conf); this.fifoFile = new File(taskAttemptDir, ("nzexttable-" + myId + ".txt")); String filename = fifoFile.toString(); NamedFifo nf; // Create the FIFO itself. try { nf = new NamedFifo(this.fifoFile); nf.create(); } catch (IOException ioe) { // Command failed. LOG.error("Could not create FIFO file " + filename); this.fifoFile = null; throw new IOException( "Could not create FIFO for netezza external table import", ioe); } String sqlStmt = getSqlStatement(myId); boolean cleanup = false; try { con = dbc.getConnection(); extTableThread = new NetezzaJDBCStatementRunner(Thread.currentThread(), con, sqlStmt); } catch (SQLException sqle) { cleanup = true; throw new IOException(sqle); } catch (ClassNotFoundException cnfe) { throw new IOException(cnfe); } finally { if (con != null && cleanup) { try { con.close(); } catch (Exception e) { LOG.debug("Exception closing connection " + e.getMessage()); } } con = null; } extTableThread.start(); // We need to start the reader end first recordReader = new BufferedReader(new InputStreamReader( new FileInputStream(nf.getFile()))); } abstract protected void writeRecord(Text text, Context context) throws IOException, InterruptedException; public void map(Integer dataSliceId, NullWritable val, Context context) throws IOException, InterruptedException { conf = context.getConfiguration(); localLogDir = DirectNetezzaManager.getLocalLogDir(context.getTaskAttemptID()); logDir = conf.get(DirectNetezzaManager.NETEZZA_LOG_DIR_OPT); dbc = new DBConfiguration(conf); numMappers = ConfigurationHelper.getConfNumMaps(conf); char rd = (char) conf.getInt(DelimiterSet.OUTPUT_RECORD_DELIM_KEY, '\n'); initNetezzaExternalTableImport(dataSliceId); counter = new PerfCounters(); counter.startClock(); Text outputRecord = new Text(); if (extTableThread.isAlive()) { try { String inputRecord = recordReader.readLine(); while (inputRecord != null) { if (Thread.interrupted()) { if (!extTableThread.isAlive()) { break; } } outputRecord.set(inputRecord + rd); // May be we should set the output to be String for faster performance // There is no real benefit in changing it to Text and then // converting it back in our case writeRecord(outputRecord, context); counter.addBytes(1 + inputRecord.length()); inputRecord = recordReader.readLine(); } } finally { recordReader.close(); extTableThread.join(); counter.stopClock(); LOG.info("Transferred " + counter.toString()); if (extTableThread.hasExceptions()) { extTableThread.printException(); throw new IOException(extTableThread.getException()); } FileUploader.uploadFilesToDFS(taskAttemptDir.getAbsolutePath(), localLogDir, logDir, context.getJobID().toString(), conf); } } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.devkit.actions; import com.intellij.CommonBundle; import com.intellij.ide.actions.CreateFileAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.OrderEntry; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.ui.configuration.ChooseModulesDialog; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.JavaDirectoryService; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiDirectory; import com.intellij.psi.xml.XmlFile; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.devkit.DevKitBundle; import org.jetbrains.idea.devkit.module.PluginModuleType; import java.io.File; import java.util.*; public final class DevkitActionsUtil { private static final Logger LOG = Logger.getInstance(DevkitActionsUtil.class); private DevkitActionsUtil() { } /** * Searches plugin descriptors that belong to modules having dependencies on the specified directory.<br> * If the directory belongs to a plugin module, its plugin descriptor is returned immediately.<br> * Otherwise dependencies on this directory are analysed. In case of multiple plugin descriptors found, a dialog is shown to * select the interesting ones. * * @param directory directory to analyse dependencies on. * @return null if the selection dialog has been cancelled, selected plugin descriptor otherwise. */ @Nullable public static XmlFile choosePluginModuleDescriptor(@NotNull PsiDirectory directory) { Project project = directory.getProject(); Module module = getModule(directory); if (module != null) { List<XmlFile> xmlFiles = choosePluginModuleDescriptors(module); if (xmlFiles == null) { return null; } if (!xmlFiles.isEmpty()) { assert xmlFiles.size() == 1; return xmlFiles.get(0); } } Messages.showMessageDialog(project, DevKitBundle.message("error.no.plugin.xml"), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); return null; } @Nullable private static List<XmlFile> choosePluginModuleDescriptors(@NotNull Module module) { List<Module> pluginModules = getCandidatePluginModules(module); if (pluginModules.isEmpty()) { return Collections.emptyList(); } if (pluginModules.size() == 1) { XmlFile pluginXml = PluginModuleType.getPluginXml(pluginModules.get(0)); if (pluginXml != null) { return Collections.singletonList(pluginXml); } return Collections.emptyList(); } List<Module> selectedModules = showPluginModuleSelectionDialog(module.getProject(), pluginModules); if (selectedModules != null) { return ContainerUtil.mapNotNull(selectedModules, m -> PluginModuleType.getPluginXml(m)); } return null; } @Nullable private static List<Module> showPluginModuleSelectionDialog(@NotNull Project project, @NotNull List<Module> pluginModules) { String message = DevKitBundle.message("select.plugin.module.to.patch"); ChoosePluginModuleDialog chooseModulesDialog = new ChoosePluginModuleDialog(project, pluginModules, message, null); chooseModulesDialog.setSingleSelectionMode(); chooseModulesDialog.show(); List<Module> selectedModules = chooseModulesDialog.getChosenElements(); if (selectedModules.isEmpty()) { return null; // Dialog has been cancelled } return selectedModules; } /** * Returns all modules that depend on the current one and have plugin descriptors.<br> * If the module itself is a plugin module, it is returned immediately. */ @NotNull public static List<Module> getCandidatePluginModules(@NotNull Module module) { XmlFile currentModulePluginXml = PluginModuleType.getPluginXml(module); if (currentModulePluginXml != null) { return Collections.singletonList(module); } List<Module> candidateModules = PluginModuleType.getCandidateModules(module); Iterator<Module> it = candidateModules.iterator(); while (it.hasNext()) { Module m = it.next(); if (PluginModuleType.getPluginXml(m) == null) { it.remove(); } } return candidateModules; } /** * @throws IncorrectOperationException */ public static void checkCanCreateClass(PsiDirectory directory, String name) { PsiDirectory currentDir = directory; String packageName = StringUtil.getPackageName(name); if (!packageName.isEmpty()) { for (String dir : packageName.split("\\.")) { PsiDirectory childDir = currentDir.findSubdirectory(dir); if (childDir == null) { return; } currentDir = childDir; } } JavaDirectoryService.getInstance().checkCreateClass(currentDir, StringUtil.getShortName(name)); } public static PsiClass createSingleClass(String name, String classTemplateName, PsiDirectory directory) { return createSingleClass(name, classTemplateName, directory, Collections.emptyMap()); } public static PsiClass createSingleClass(String name, String classTemplateName, PsiDirectory directory, @NotNull Map<String, String> properties) { if (name.contains(".")) { String[] names = name.split("\\."); for (int i = 0; i < names.length - 1; i++) { directory = CreateFileAction.findOrCreateSubdirectory(directory, names[i]); } name = names[names.length - 1]; } return JavaDirectoryService.getInstance().createClass(directory, name, classTemplateName, false, properties); } @Nullable private static Module getModule(PsiDirectory dir) { Project project = dir.getProject(); ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex(); VirtualFile vFile = dir.getVirtualFile(); if (fileIndex.isInLibrary(vFile)) { List<OrderEntry> orderEntries = fileIndex.getOrderEntriesForFile(vFile); if (orderEntries.isEmpty()) { return null; } Set<Module> modules = new HashSet<>(); for (OrderEntry orderEntry : orderEntries) { modules.add(orderEntry.getOwnerModule()); } Module[] candidates = modules.toArray(Module.EMPTY_ARRAY); Arrays.sort(candidates, ModuleManager.getInstance(project).moduleDependencyComparator()); return candidates[0]; } return fileIndex.getModuleForFile(vFile); } private static class ChoosePluginModuleDialog extends ChooseModulesDialog { ChoosePluginModuleDialog(Project project, List<? extends Module> items, String title, @Nullable String description) { super(project, items, title, description); } @Override protected String getItemLocation(Module item) { XmlFile pluginXml = PluginModuleType.getPluginXml(item); if (pluginXml == null) { return null; } VirtualFile virtualFile = pluginXml.getVirtualFile(); VirtualFile projectPath = item.getProject().getBaseDir(); if (virtualFile == null) { LOG.warn("Unexpected null plugin.xml VirtualFile for module: " + item); } if (projectPath == null) { LOG.warn("Unexpected null project basedir VirtualFile for module: " + item); } if (virtualFile == null || projectPath == null) { return null; } if (VfsUtilCore.isAncestor(projectPath, virtualFile, false)) { return VfsUtilCore.getRelativePath(virtualFile, projectPath, File.separatorChar); } return virtualFile.getPresentableUrl(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSInputStream; import org.apache.hadoop.hdfs.DFSOutputStream; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.client.impl.CorruptFileBlockIterator; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.util.Progressable; @InterfaceAudience.Private @InterfaceStability.Evolving public class Hdfs extends AbstractFileSystem { DFSClient dfs; private boolean verifyChecksum = true; static { HdfsConfiguration.init(); } /** * This constructor has the signature needed by * {@link AbstractFileSystem#createFileSystem(URI, Configuration)} * * @param theUri which must be that of Hdfs * @param conf configuration * @throws IOException */ Hdfs(final URI theUri, final Configuration conf) throws IOException, URISyntaxException { super(theUri, HdfsConstants.HDFS_URI_SCHEME, true, HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT); if (!theUri.getScheme().equalsIgnoreCase(HdfsConstants.HDFS_URI_SCHEME)) { throw new IllegalArgumentException("Passed URI's scheme is not for Hdfs"); } String host = theUri.getHost(); if (host == null) { throw new IOException("Incomplete HDFS URI, no host: " + theUri); } this.dfs = new DFSClient(theUri, conf, getStatistics()); } @Override public int getUriDefaultPort() { return HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT; } @Override public HdfsDataOutputStream createInternal(Path f, EnumSet<CreateFlag> createFlag, FsPermission absolutePermission, int bufferSize, short replication, long blockSize, Progressable progress, ChecksumOpt checksumOpt, boolean createParent) throws IOException { final DFSOutputStream dfsos = dfs.primitiveCreate(getUriPath(f), absolutePermission, createFlag, createParent, replication, blockSize, progress, bufferSize, checksumOpt); return dfs.createWrappedOutputStream(dfsos, statistics, dfsos.getInitialLen()); } @Override public boolean delete(Path f, boolean recursive) throws IOException, UnresolvedLinkException { return dfs.delete(getUriPath(f), recursive); } /** * The returned BlockLocation will have different formats for replicated * and erasure coded file. * * Please refer to * {@link FileContext#getFileBlockLocations(Path, long, long)} * for more details. */ @Override public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getUriPath(p), start, len); } @Override public FileChecksum getFileChecksum(Path f) throws IOException, UnresolvedLinkException { return dfs.getFileChecksumWithCombineMode(getUriPath(f), Long.MAX_VALUE); } /** * {@inheritDoc} * * If the given path is a symlink, the path will be resolved to a target path * and it will get the resolved path's FileStatus object. It will not be * represented as a symlink and isDirectory API returns true if the resolved * path is a directory, false otherwise. */ @Override public FileStatus getFileStatus(Path f) throws IOException, UnresolvedLinkException { HdfsFileStatus fi = dfs.getFileInfo(getUriPath(f)); if (fi != null) { return fi.makeQualified(getUri(), f); } else { throw new FileNotFoundException("File does not exist: " + f.toString()); } } /** * Synchronize client metadata state with Active NameNode. * <p> * In HA the client synchronizes its state with the Active NameNode * in order to guarantee subsequent read consistency from Observer Nodes. * @throws IOException */ @Override public void msync() throws IOException { dfs.msync(); } @Override public FileStatus getFileLinkStatus(Path f) throws IOException, UnresolvedLinkException { HdfsFileStatus fi = dfs.getFileLinkInfo(getUriPath(f)); if (fi != null) { return fi.makeQualified(getUri(), f); } else { throw new FileNotFoundException("File does not exist: " + f); } } @Override public FsStatus getFsStatus() throws IOException { return dfs.getDiskStatus(); } @Override @Deprecated public FsServerDefaults getServerDefaults() throws IOException { return dfs.getServerDefaults(); } @Override public FsServerDefaults getServerDefaults(final Path f) throws IOException { return dfs.getServerDefaults(); } /** * The BlockLocation of returned LocatedFileStatus will have different * formats for replicated and erasure coded file. * Please refer to * {@link FileContext#getFileBlockLocations(Path, long, long)} for * more details. */ @Override public RemoteIterator<LocatedFileStatus> listLocatedStatus( final Path p) throws FileNotFoundException, IOException { return new DirListingIterator<LocatedFileStatus>(p, true) { @Override public LocatedFileStatus next() throws IOException { return ((HdfsLocatedFileStatus)getNext()).makeQualifiedLocated( getUri(), p); } }; } @Override public RemoteIterator<FileStatus> listStatusIterator(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { return new DirListingIterator<FileStatus>(f, false) { @Override public FileStatus next() throws IOException { return getNext().makeQualified(getUri(), f); } }; } /** * This class defines an iterator that returns * the file status of each file/subdirectory of a directory * * if needLocation, status contains block location if it is a file * throws a RuntimeException with the error as its cause. * * @param <T> the type of the file status */ abstract private class DirListingIterator<T extends FileStatus> implements RemoteIterator<T> { private DirectoryListing thisListing; private int i; final private String src; final private boolean needLocation; // if status private DirListingIterator(Path p, boolean needLocation) throws IOException { this.src = Hdfs.this.getUriPath(p); this.needLocation = needLocation; // fetch the first batch of entries in the directory thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME, needLocation); if (thisListing == null) { // the directory does not exist throw new FileNotFoundException("File " + src + " does not exist."); } } @Override public boolean hasNext() throws IOException { if (thisListing == null) { return false; } if (i>=thisListing.getPartialListing().length && thisListing.hasMore()) { // current listing is exhausted & fetch a new listing thisListing = dfs.listPaths(src, thisListing.getLastName(), needLocation); if (thisListing == null) { throw new FileNotFoundException("File " + src + " does not exist."); } i = 0; } return (i<thisListing.getPartialListing().length); } /** * Get the next item in the list * @return the next item in the list * * @throws IOException if there is any error * @throws NoSuchElementException if no more entry is available */ public HdfsFileStatus getNext() throws IOException { if (hasNext()) { return thisListing.getPartialListing()[i++]; } throw new NoSuchElementException("No more entry in " + src); } } /** * {@inheritDoc} * * If any of the the immediate children of the given path f is a symlink, the * returned FileStatus object of that children would be represented as a * symlink. It will not be resolved to the target path and will not get the * target path FileStatus object. The target path will be available via * getSymlink on that children's FileStatus object. Since it represents as * symlink, isDirectory on that children's FileStatus will return false. * * If you want to get the FileStatus of target path for that children, you may * want to use GetFileStatus API with that children's symlink path. Please see * {@link Hdfs#getFileStatus(Path f)} */ @Override public FileStatus[] listStatus(Path f) throws IOException, UnresolvedLinkException { String src = getUriPath(f); // fetch the first batch of entries in the directory DirectoryListing thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME); if (thisListing == null) { // the directory does not exist throw new FileNotFoundException("File " + f + " does not exist."); } HdfsFileStatus[] partialListing = thisListing.getPartialListing(); if (!thisListing.hasMore()) { // got all entries of the directory FileStatus[] stats = new FileStatus[partialListing.length]; for (int i = 0; i < partialListing.length; i++) { stats[i] = partialListing[i].makeQualified(getUri(), f); } return stats; } // The directory size is too big that it needs to fetch more // estimate the total number of entries in the directory int totalNumEntries = partialListing.length + thisListing.getRemainingEntries(); ArrayList<FileStatus> listing = new ArrayList<FileStatus>(totalNumEntries); // add the first batch of entries to the array list for (HdfsFileStatus fileStatus : partialListing) { listing.add(fileStatus.makeQualified(getUri(), f)); } // now fetch more entries do { thisListing = dfs.listPaths(src, thisListing.getLastName()); if (thisListing == null) { // the directory is deleted throw new FileNotFoundException("File " + f + " does not exist."); } partialListing = thisListing.getPartialListing(); for (HdfsFileStatus fileStatus : partialListing) { listing.add(fileStatus.makeQualified(getUri(), f)); } } while (thisListing.hasMore()); return listing.toArray(new FileStatus[listing.size()]); } @Override public RemoteIterator<Path> listCorruptFileBlocks(Path path) throws IOException { return new CorruptFileBlockIterator(dfs, path); } @Override public void mkdir(Path dir, FsPermission permission, boolean createParent) throws IOException, UnresolvedLinkException { dfs.primitiveMkdir(getUriPath(dir), permission, createParent); } @SuppressWarnings("deprecation") @Override public HdfsDataInputStream open(Path f, int bufferSize) throws IOException, UnresolvedLinkException { final DFSInputStream dfsis = dfs.open(getUriPath(f), bufferSize, verifyChecksum); return dfs.createWrappedInputStream(dfsis); } @Override public boolean truncate(Path f, long newLength) throws IOException, UnresolvedLinkException { return dfs.truncate(getUriPath(f), newLength); } @Override public void renameInternal(Path src, Path dst) throws IOException, UnresolvedLinkException { dfs.rename(getUriPath(src), getUriPath(dst), Options.Rename.NONE); } @Override public void renameInternal(Path src, Path dst, boolean overwrite) throws IOException, UnresolvedLinkException { dfs.rename(getUriPath(src), getUriPath(dst), overwrite ? Options.Rename.OVERWRITE : Options.Rename.NONE); } @Override public void setOwner(Path f, String username, String groupname) throws IOException, UnresolvedLinkException { dfs.setOwner(getUriPath(f), username, groupname); } @Override public void setPermission(Path f, FsPermission permission) throws IOException, UnresolvedLinkException { dfs.setPermission(getUriPath(f), permission); } @Override public boolean setReplication(Path f, short replication) throws IOException, UnresolvedLinkException { return dfs.setReplication(getUriPath(f), replication); } @Override public void setTimes(Path f, long mtime, long atime) throws IOException, UnresolvedLinkException { dfs.setTimes(getUriPath(f), mtime, atime); } @Override public void setVerifyChecksum(boolean verifyChecksum) throws IOException { this.verifyChecksum = verifyChecksum; } @Override public boolean supportsSymlinks() { return true; } @Override public void createSymlink(Path target, Path link, boolean createParent) throws IOException, UnresolvedLinkException { dfs.createSymlink(target.toString(), getUriPath(link), createParent); } @Override public Path getLinkTarget(Path p) throws IOException { return new Path(dfs.getLinkTarget(getUriPath(p))); } @Override public String getCanonicalServiceName() { return dfs.getCanonicalServiceName(); } @Override //AbstractFileSystem public List<Token<?>> getDelegationTokens(String renewer) throws IOException { Token<DelegationTokenIdentifier> result = dfs .getDelegationToken(renewer == null ? null : new Text(renewer)); List<Token<?>> tokenList = new ArrayList<Token<?>>(); tokenList.add(result); return tokenList; } @Override public void modifyAclEntries(Path path, List<AclEntry> aclSpec) throws IOException { dfs.modifyAclEntries(getUriPath(path), aclSpec); } @Override public void removeAclEntries(Path path, List<AclEntry> aclSpec) throws IOException { dfs.removeAclEntries(getUriPath(path), aclSpec); } @Override public void removeDefaultAcl(Path path) throws IOException { dfs.removeDefaultAcl(getUriPath(path)); } @Override public void removeAcl(Path path) throws IOException { dfs.removeAcl(getUriPath(path)); } @Override public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException { dfs.setAcl(getUriPath(path), aclSpec); } @Override public AclStatus getAclStatus(Path path) throws IOException { return dfs.getAclStatus(getUriPath(path)); } @Override public void setXAttr(Path path, String name, byte[] value, EnumSet<XAttrSetFlag> flag) throws IOException { dfs.setXAttr(getUriPath(path), name, value, flag); } @Override public byte[] getXAttr(Path path, String name) throws IOException { return dfs.getXAttr(getUriPath(path), name); } @Override public Map<String, byte[]> getXAttrs(Path path) throws IOException { return dfs.getXAttrs(getUriPath(path)); } @Override public Map<String, byte[]> getXAttrs(Path path, List<String> names) throws IOException { return dfs.getXAttrs(getUriPath(path), names); } @Override public List<String> listXAttrs(Path path) throws IOException { return dfs.listXAttrs(getUriPath(path)); } @Override public void removeXAttr(Path path, String name) throws IOException { dfs.removeXAttr(getUriPath(path), name); } @Override public void access(Path path, final FsAction mode) throws IOException { dfs.checkAccess(getUriPath(path), mode); } @Override public void satisfyStoragePolicy(Path path) throws IOException { dfs.satisfyStoragePolicy(getUriPath(path)); } @Override public void setStoragePolicy(Path path, String policyName) throws IOException { dfs.setStoragePolicy(getUriPath(path), policyName); } @Override public void unsetStoragePolicy(final Path src) throws IOException { dfs.unsetStoragePolicy(getUriPath(src)); } @Override public BlockStoragePolicySpi getStoragePolicy(Path src) throws IOException { return dfs.getStoragePolicy(getUriPath(src)); } @Override public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies() throws IOException { return Arrays.asList(dfs.getStoragePolicies()); } /** * Renew an existing delegation token. * * @param token delegation token obtained earlier * @return the new expiration time * @throws InvalidToken * @throws IOException * @deprecated Use Token.renew instead. */ @SuppressWarnings("unchecked") public long renewDelegationToken( Token<? extends AbstractDelegationTokenIdentifier> token) throws InvalidToken, IOException { return dfs.renewDelegationToken((Token<DelegationTokenIdentifier>) token); } /** * Cancel an existing delegation token. * * @param token delegation token * @throws InvalidToken * @throws IOException * @deprecated Use Token.cancel instead. */ @SuppressWarnings("unchecked") public void cancelDelegationToken( Token<? extends AbstractDelegationTokenIdentifier> token) throws InvalidToken, IOException { dfs.cancelDelegationToken((Token<DelegationTokenIdentifier>) token); } @Override public Path createSnapshot(final Path path, final String snapshotName) throws IOException { return new Path(dfs.createSnapshot(getUriPath(path), snapshotName)); } @Override public void renameSnapshot(final Path path, final String snapshotOldName, final String snapshotNewName) throws IOException { dfs.renameSnapshot(getUriPath(path), snapshotOldName, snapshotNewName); } @Override public void deleteSnapshot(final Path snapshotDir, final String snapshotName) throws IOException { dfs.deleteSnapshot(getUriPath(snapshotDir), snapshotName); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.codeInsight; import com.intellij.JavaTestUtil; import com.intellij.ide.DataManager; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.editor.actionSystem.EditorActionHandler; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.testFramework.LightCodeInsightTestCase; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; public class JoinLinesTest extends LightCodeInsightTestCase { @NotNull @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } public void testNormal() { doTest(); } public void testStringLiteral() { doTest(); } public void testLiteralSCR4989() { doTest(); } public void testSCR3493() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR3493a() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR3493b() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR3493c() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR3493d() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR3493e() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; } } public void testSCR5959() { doTest(); } public void testSCR6299() { doTest(); } public void testLocalVar() { doTest(); } public void testSlashComment() { doTest(); } public void testDocComment() { doTest(); } public void testOnEmptyLine() { doTest(); } public void testCollapseClass() { doTest(); } public void testSCR10386() { doTest(); } public void testDeclarationWithInitializer() {doTest(); } public void testUnwrapCodeBlock1() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); int old = settings.IF_BRACE_FORCE; try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; settings.getCommonSettings(JavaLanguage.INSTANCE).IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_IF_MULTILINE; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; settings.getCommonSettings(JavaLanguage.INSTANCE).IF_BRACE_FORCE = old; } } public void testUnwrapCodeBlock2() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); boolean use_tab_character = settings.useTabCharacter(null); boolean smart_tabs = settings.isSmartTabs(null); int old = settings.IF_BRACE_FORCE; try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; settings.getCommonSettings(JavaLanguage.INSTANCE).IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; doTest(); } finally { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = smart_tabs; settings.getCommonSettings(JavaLanguage.INSTANCE).IF_BRACE_FORCE = old; } } public void testAssignmentExpression() { doTest(); } public void testReformatInsertsNewlines() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); final Element root = new Element("fake"); settings.writeExternal(root); try { settings.getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true; settings.getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true; settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; settings.METHOD_BRACE_STYLE = CommonCodeStyleSettings.NEXT_LINE; doTest(); } finally { settings.readExternal(root); } } public void testForceBrace() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); int old = settings.IF_BRACE_FORCE; try { settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; doTest(); } finally { settings.IF_BRACE_FORCE = old; } } public void testWrongWrapping() { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); settings.setDefaultRightMargin(80); settings.CALL_PARAMETERS_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; settings.ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTest(); } public void testSubsequentJoiningAndUnexpectedTextRemoval() { // Inspired by IDEA-65342 CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()); settings.setDefaultRightMargin(50); settings.getCommonSettings(JavaLanguage.INSTANCE).CALL_PARAMETERS_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; doTest(2); } public void testLeaveTrailingComment() { doTest(); } public void testConvertComment() { doTest(); } public void testJoiningMethodCallWhenItDoesntFit() { CommonCodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()).getCommonSettings(JavaLanguage.INSTANCE); settings.METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; settings.RIGHT_MARGIN = 20; doTest(); } public void testMultipleBlockComments() { doTest(); } public void testPreserveSpaceIfOnNewLineOptionEnabled() { CommonCodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject()).getCommonSettings(JavaLanguage.INSTANCE); settings.CATCH_ON_NEW_LINE = true; doTest(); } private void doTest() { doTest(".java"); } private void doTest(int times) { doTest(".java", times); } private void doTest(@NonNls final String ext) { doTest(ext, 1); } private void doTest(@NonNls final String ext, int times) { @NonNls String path = "/codeInsight/joinLines/"; configureByFile(path + getTestName(false) + ext); while (times-- > 0) { performAction(); } checkResultByFile(path + getTestName(false) + "_after" + ext); } private void performAction() { EditorActionManager actionManager = EditorActionManager.getInstance(); EditorActionHandler actionHandler = actionManager.getActionHandler(IdeActions.ACTION_EDITOR_JOIN_LINES); actionHandler.execute(getEditor(), DataManager.getInstance().getDataContext()); } }
package com.awn.common.process.utils; import java.util.Locale; /** * DoubleUtils * */ public final class DoubleUtils { /** * private constructor */ private DoubleUtils() { } /** * @param double1 * Double * @return String representation of argument. Empty string if null. */ public static String toString(final Double double1) { return double1 == null ? "" : double1.toString(); } /** * @param double1 * Double * @param format * String * @param locale * String * @return String representation of argument with specified format and * locale. */ public static String toString(final Double double1, final String format, final String locale) { return double1 == null ? "" : String.format(Locale.forLanguageTag(locale), format, double1); } /** * @param double1 * Double * @param double2 * Double * @return the value 0 if double2 is numerically equal double1; a value less * than 0 if this double1 is numerically less than double2; and a * value greater than 0 if double1 is numerically greater than * double2. Null in case of null argument(s) */ public static Integer compareTo(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1.compareTo(double2); } /** * @param double1 * Double * @param double2 * Double * @return true if double1 is numerically greater than double2. Otherwise * false */ public static Boolean isGreaterThan(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1.compareTo(double2) > 0 ? true : false; } /** * @param double1 * Double * @param double2 * Double * @return true if double1 is numerically lower than double2. Otherwise * false */ public static Boolean isLowerThan(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1.compareTo(double2) < 0 ? true : false; } /** * @param double1 * Double * @return the absolute value of parameter */ public static Double abs(final Double double1) { return Math.abs(double1); } /** * @param double1 * Double * @return the largest (closest to positive infinity) floating-point value * that less than or equal to the argument and is equal to a * mathematical integer */ public static Double floor(final Double double1) { return Math.floor(double1); } /** * @param double1 * Double * @return the smallest (closest to negative infinity) floating-point value * that is greater than or equal to the argument and is equal to a * mathematical integer */ public static Double ceil(final Double double1) { return Math.ceil(double1); } /** * @param double1 * Double * @return the value of the argument rounded to the nearest long value */ public static Long round(final Double double1) { return Math.round(double1); } /** * @param double1 * Double * @param double2 * Double * @return the sum of the two arguments, handling null (ex : 3 + null = 3) */ public static Double add(final Double double1, final Double double2) { return double1 == null ? double2 : double2 == null ? double1 : double1 + double2; } /** * @param double1 * Double * @param double2 * Double * @param double3 * Double * @return the sum of the three arguments, handling null (ex : 3 + 2 + null * = 5) */ public static Double add(final Double double1, final Double double2, final Double double3) { return double3 == null ? add(double1, double2) : add(double1, double2) + double3; } /** * @param double1 * Double * @param double2 * Double * @param double3 * Double * @param double4 * Double * @return the sum of the four arguments, handling null (ex : 3 + 2 + 6 + * null = 11) */ public static Double add(final Double double1, final Double double2, final Double double3, final Double double4) { return double4 == null ? add(double1, double2, double3) : add(double1, double2, double3) + double4; } /** * @param double1 * Double * @param double2 * Double * @param double3 * Double * @param double4 * Double * @param double5 * Double * @return the sum of the five arguments, handling null (ex : 3 + 2 + 6 + 1 * + null = 12) */ public static Double add(final Double double1, final Double double2, final Double double3, final Double double4, final Double double5) { return double5 == null ? add(double1, double2, double3, double4) : add(double1, double2, double3, double4) + double5; } /** * @param double1 * Double * @param double2 * Double * @return the difference between double1 and double2. Null in case of null * argument(s) */ public static Double substract(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1 - double2; } /** * @param double1 * Double * @param double2 * Double * @return the product between double1 and double2. Null in case of null * argument */ public static Double multiply(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1 * double2; } /** * @param double1 * Double * @param double2 * Double * @return the division between double1 and double2. Null in case of null * argument */ public static Double divide(final Double double1, final Double double2) { return double1 == null || double2 == null ? null : double1 / double2; } /** * @param double1 * Double * @return true if argument is null or argument == 0 */ public static Boolean isNullOrZeroValue(final Double double1) { return double1 == null || double1 == 0; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.mac; import com.apple.eawt.*; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.UISettingsListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.BuildNumber; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.wm.impl.IdeFrameDecorator; import com.intellij.openapi.wm.impl.IdeFrameImpl; import com.intellij.ui.CustomProtocolHandler; import com.intellij.ui.mac.foundation.Foundation; import com.intellij.ui.mac.foundation.ID; import com.intellij.ui.mac.foundation.MacUtil; import com.intellij.util.EventDispatcher; import com.intellij.util.Function; import com.sun.jna.Callback; import com.sun.jna.Pointer; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.lang.reflect.Method; import java.util.EventListener; import java.util.LinkedList; import java.util.concurrent.atomic.AtomicInteger; import static com.intellij.ui.mac.foundation.Foundation.invoke; /** * User: spLeaner */ public class MacMainFrameDecorator extends IdeFrameDecorator implements UISettingsListener { private static final Logger LOG = Logger.getInstance("#com.intellij.ui.mac.MacMainFrameDecorator"); private final FullscreenQueue<Runnable> myFullscreenQueue = new FullscreenQueue<Runnable>(); private final EventDispatcher<FSListener> myDispatcher = EventDispatcher.create(FSListener.class); private interface FSListener extends FullScreenListener, EventListener {} private static class FSAdapter extends FullScreenAdapter implements FSListener {} private static class FullscreenQueue <T extends Runnable> { private boolean waitingForAppKit = false; private LinkedList<Runnable> queueModel = new LinkedList<Runnable>(); synchronized void runOrEnqueue (final T runnable) { if (waitingForAppKit) { enqueue(runnable); } else { ApplicationManager.getApplication().invokeLater(runnable); waitingForAppKit = true; } } synchronized private void enqueue (final T runnable) { queueModel.add(runnable); } synchronized void runFromQueue () { if (!queueModel.isEmpty()) { queueModel.remove().run(); waitingForAppKit = true; } else { waitingForAppKit = false; } } } // Fullscreen listener delivers event too late, // so we use method swizzling here private final Callback windowWillEnterFullScreenCallBack = new Callback() { public void callback(ID self, ID nsNotification) { invoke(self, "oldWindowWillEnterFullScreen:", nsNotification); enterFullscreen(); } }; private void enterFullscreen() { myInFullScreen = true; myFrame.storeFullScreenStateIfNeeded(true); myFullscreenQueue.runFromQueue(); } private final Callback windowWillExitFullScreenCallBack = new Callback() { public void callback(ID self, ID nsNotification) { invoke(self, "oldWindowWillExitFullScreen:", nsNotification); exitFullscreen(); } }; private void exitFullscreen() { myInFullScreen = false; myFrame.storeFullScreenStateIfNeeded(false); JRootPane rootPane = myFrame.getRootPane(); if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, null); myFullscreenQueue.runFromQueue(); } public static final String FULL_SCREEN = "Idea.Is.In.FullScreen.Mode.Now"; private static boolean HAS_FULLSCREEN_UTILITIES; private static Method requestToggleFullScreenMethod; static { try { Class.forName("com.apple.eawt.FullScreenUtilities"); requestToggleFullScreenMethod = Application.class.getMethod("requestToggleFullScreen", Window.class); HAS_FULLSCREEN_UTILITIES = true; } catch (Exception e) { HAS_FULLSCREEN_UTILITIES = false; } } public static final boolean FULL_SCREEN_AVAILABLE = SystemInfo.isJavaVersionAtLeast("1.6.0_29") && HAS_FULLSCREEN_UTILITIES; private static boolean SHOWN = false; private static Callback SET_VISIBLE_CALLBACK = new Callback() { public void callback(ID caller, ID selector, ID value) { SHOWN = value.intValue() == 1; SwingUtilities.invokeLater(CURRENT_SETTER); } }; private static Callback IS_VISIBLE = new Callback() { public boolean callback(ID caller) { return SHOWN; } }; private static AtomicInteger UNIQUE_COUNTER = new AtomicInteger(0); public static final Runnable TOOLBAR_SETTER = new Runnable() { @Override public void run() { final UISettings settings = UISettings.getInstance(); settings.SHOW_MAIN_TOOLBAR = SHOWN; settings.fireUISettingsChanged(); } }; public static final Runnable NAVBAR_SETTER = new Runnable() { @Override public void run() { final UISettings settings = UISettings.getInstance(); settings.SHOW_NAVIGATION_BAR = SHOWN; settings.fireUISettingsChanged(); } }; @SuppressWarnings("Convert2Lambda") public static final Function<Object, Boolean> NAVBAR_GETTER = new Function<Object, Boolean>() { @Override public Boolean fun(Object o) { return UISettings.getInstance().SHOW_NAVIGATION_BAR; } }; @SuppressWarnings("Convert2Lambda") public static final Function<Object, Boolean> TOOLBAR_GETTER = new Function<Object, Boolean>() { @Override public Boolean fun(Object o) { return UISettings.getInstance().SHOW_MAIN_TOOLBAR; } }; private static Runnable CURRENT_SETTER = null; private static Function<Object, Boolean> CURRENT_GETTER = null; private static CustomProtocolHandler ourProtocolHandler = null; private boolean myInFullScreen; public MacMainFrameDecorator(@NotNull final IdeFrameImpl frame, final boolean navBar) { super(frame); if (CURRENT_SETTER == null) { CURRENT_SETTER = navBar ? NAVBAR_SETTER : TOOLBAR_SETTER; CURRENT_GETTER = navBar ? NAVBAR_GETTER : TOOLBAR_GETTER; SHOWN = CURRENT_GETTER.fun(null); } UISettings.getInstance().addUISettingsListener(this, this); final ID pool = invoke("NSAutoreleasePool", "new"); int v = UNIQUE_COUNTER.incrementAndGet(); try { if (SystemInfo.isMacOSLion) { if (!FULL_SCREEN_AVAILABLE) return; FullScreenUtilities.setWindowCanFullScreen(frame, true); // Native fullscreen listener can be set only once FullScreenUtilities.addFullScreenListenerTo(frame, new FullScreenListener() { @Override public void windowEnteringFullScreen(AppEvent.FullScreenEvent event) { myDispatcher.getMulticaster().windowEnteringFullScreen(event); } @Override public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) { myDispatcher.getMulticaster().windowEnteredFullScreen(event); } @Override public void windowExitingFullScreen(AppEvent.FullScreenEvent event) { myDispatcher.getMulticaster().windowExitingFullScreen(event); } @Override public void windowExitedFullScreen(AppEvent.FullScreenEvent event) { myDispatcher.getMulticaster().windowExitedFullScreen(event); } }); myDispatcher.addListener(new FSAdapter() { @Override public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) { // We can get the notification when the frame has been disposed JRootPane rootPane = frame.getRootPane(); if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, Boolean.TRUE); enterFullscreen(); myFrame.validate(); } @Override public void windowExitedFullScreen(AppEvent.FullScreenEvent event) { // We can get the notification when the frame has been disposed if (myFrame == null/* || ORACLE_BUG_ID_8003173*/) return; exitFullscreen(); myFrame.validate(); } }); } else { final ID window = MacUtil.findWindowForTitle(frame.getTitle()); if (window == null) return; // toggle toolbar String className = "IdeaToolbar" + v; final ID ownToolbar = Foundation.allocateObjcClassPair(Foundation.getObjcClass("NSToolbar"), className); Foundation.registerObjcClassPair(ownToolbar); final ID toolbar = invoke(invoke(className, "alloc"), "initWithIdentifier:", Foundation.nsString(className)); Foundation.cfRetain(toolbar); invoke(toolbar, "setVisible:", 0); // hide native toolbar by default Foundation.addMethod(ownToolbar, Foundation.createSelector("setVisible:"), SET_VISIBLE_CALLBACK, "v*"); Foundation.addMethod(ownToolbar, Foundation.createSelector("isVisible"), IS_VISIBLE, "B*"); Foundation.executeOnMainThread(new Runnable() { @Override public void run() { invoke(window, "setToolbar:", toolbar); invoke(window, "setShowsToolbarButton:", 1); } }, true, true); } } finally { invoke(pool, "release"); } if (ourProtocolHandler == null) { // install uri handler final ID mainBundle = invoke("NSBundle", "mainBundle"); final ID urlTypes = invoke(mainBundle, "objectForInfoDictionaryKey:", Foundation.nsString("CFBundleURLTypes")); final ApplicationInfoEx info = ApplicationInfoImpl.getShadowInstance(); final BuildNumber build = info != null ? info.getBuild() : null; if (urlTypes.equals(ID.NIL) && build != null && !build.isSnapshot()) { LOG.warn("no url bundle present. \n" + "To use platform protocol handler to open external links specify required protocols in the mac app layout section of the build file\n" + "Example: args.urlSchemes = [\"your-protocol\"] will handle following links: your-protocol://open?file=file&line=line"); return; } ourProtocolHandler = new CustomProtocolHandler(); Application.getApplication().setOpenURIHandler(new OpenURIHandler() { @Override public void openURI(AppEvent.OpenURIEvent event) { ourProtocolHandler.openLink(event.getURI()); } }); } } private void replaceNativeFullscreenListenerCallback() { ID awtWindow = Foundation.getObjcClass("AWTWindow"); Pointer windowWillEnterFullScreenMethod = Foundation.createSelector("windowWillEnterFullScreen:"); ID originalWindowWillEnterFullScreen = Foundation.class_replaceMethod(awtWindow, windowWillEnterFullScreenMethod, windowWillEnterFullScreenCallBack, "v@::@"); Foundation.addMethodByID(awtWindow, Foundation.createSelector("oldWindowWillEnterFullScreen:"), originalWindowWillEnterFullScreen, "v@::@"); Pointer windowWillExitFullScreenMethod = Foundation.createSelector("windowWillExitFullScreen:"); ID originalWindowWillExitFullScreen = Foundation.class_replaceMethod(awtWindow, windowWillExitFullScreenMethod, windowWillExitFullScreenCallBack, "v@::@"); Foundation.addMethodByID(awtWindow, Foundation.createSelector("oldWindowWillExitFullScreen:"), originalWindowWillExitFullScreen, "v@::@"); } @Override public void uiSettingsChanged(final UISettings source) { if (CURRENT_GETTER != null) { SHOWN = CURRENT_GETTER.fun(null); } } @Override public boolean isInFullScreen() { return myInFullScreen; } @Override public ActionCallback toggleFullScreen(final boolean state) { if (!SystemInfo.isMacOSLion || myFrame == null || myInFullScreen == state) return ActionCallback.REJECTED; final ActionCallback callback = new ActionCallback(); myDispatcher.addListener(new FSAdapter() { @Override public void windowExitedFullScreen(AppEvent.FullScreenEvent event) { callback.setDone(); myDispatcher.removeListener(this); } @Override public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) { callback.setDone(); myDispatcher.removeListener(this); } }); myFullscreenQueue.runOrEnqueue(new Runnable() { @Override public void run() { toggleFullScreenNow(); } }); return callback; } public void toggleFullScreenNow() { try { requestToggleFullScreenMethod.invoke(Application.getApplication(), myFrame); } catch (Exception e) { LOG.error(e); } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.tool.marshallers; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import org.apache.activemq.openwire.tool.MultiSourceGenerator; import org.codehaus.jam.JAnnotation; import org.codehaus.jam.JAnnotationValue; import org.codehaus.jam.JClass; import org.codehaus.jam.JProperty; /** * * @version $Revision: 381410 $ */ public class AmqCppMarshallingHeadersGenerator extends MultiSourceGenerator { protected String targetDir="./src/main"; protected List<JClass> concreteClasses = new ArrayList<JClass>(); protected File factoryFile; protected String factoryFileName = "MarshallerFactory"; protected String indent = " "; /** * Overrides the base class init since we don't mark any marshaller classes as * being manually maintained. */ protected void initialiseManuallyMaintainedClasses() { } /** * Returns all the valid properties available on the current class. Overrides the * method in {@link MultiSourceGenerator} to add filtering on the Openwire Version * number so that we can rerun this tool for older versions and produce an exact * match to what was previously generated. * * @return List of Properties valid for the current {@link JClass} and Openwire version. */ public List<JProperty> getProperties() { List<JProperty> answer = new ArrayList<JProperty>(); JProperty[] properties = jclass.getDeclaredProperties(); for (int i = 0; i < properties.length; i++) { JProperty property = properties[i]; if (isValidProperty(property)) { JAnnotation annotation = property.getAnnotation("openwire:property"); JAnnotationValue version = annotation.getValue("version"); if( version.asInt() <= this.getOpenwireVersion() ) { answer.add(property); } } } return answer; } public Object run() { filePostFix = getFilePostFix(); if (destDir == null) { destDir = new File(targetDir+"/activemq/wireformat/openwire/marshal/generated"); } Object answer = super.run(); processFactory(); return answer; } protected void processClass(JClass jclass) { super.processClass(jclass); if (!jclass.isAbstract()) { concreteClasses.add(jclass); } } protected String getClassName(JClass jclass) { return super.getClassName(jclass) + "Marshaller"; } protected String getBaseClassName(JClass jclass) { String answer = jclass.getSimpleName(); if( answer.equals("ActiveMQTextMessage") ) { answer = "MessageMarshaller"; } else if( answer.equals("ActiveMQBytesMessage") ) { answer = "MessageMarshaller"; } else if( answer.equals("ActiveMQMapMessage") ) { answer = "MessageMarshaller"; } else if( answer.equals("ActiveMQObjectMessage") ) { answer = "MessageMarshaller"; } else if( answer.equals("ActiveMQStreamMessage") ) { answer = "MessageMarshaller"; } else if( answer.equals("ActiveMQBlobMessage") ) { answer = "MessageMarshaller"; } // We didn't map it directly so we turn it into something generic. if( answer.equals( jclass.getSimpleName() ) ) { answer = "BaseDataStreamMarshaller"; JClass superclass = jclass.getSuperclass(); if (superclass != null) { String superName = superclass.getSimpleName(); if (!superName.equals("Object") && !superName.equals("JNDIBaseStorable") && !superName.equals("DataStructureSupport")) { answer = superName + "Marshaller"; } } return answer; } return answer; } public boolean isMarshallAware(JClass j) { String answer = jclass.getSimpleName(); if( answer.equals("ActiveMQTextMessage") ) { return true; } else if( answer.equals("ActiveMQBytesMessage") ) { return true; } else if( answer.equals("ActiveMQMapMessage") ) { return true; } else if( answer.equals("ActiveMQObjectMessage") ) { return true; } else if( answer.equals("ActiveMQStreamMessage") ) { return true; } else if( answer.equals("ActiveMBlobMessage") ) { return true; } else { return super.isMarshallAware(jclass); } } protected String getFilePostFix() { return ".h"; } public String toCppType(JClass type) { String name = type.getSimpleName(); if (name.equals("String")) { return "std::string"; } else if( type.isArrayType() ) { if( name.equals( "byte[]" ) ) name = "unsigned char[]"; JClass arrayClass = type.getArrayComponentType(); if( arrayClass.isPrimitiveType() ) { return "std::vector<" + name.substring(0, name.length()-2) + ">"; } else { return "std::vector<" + name.substring(0, name.length()-2) + "*>"; } } else if( name.equals( "Throwable" ) || name.equals( "Exception" ) ) { return "BrokerError"; } else if( name.equals("BaseDataStructure" ) ){ return "DataStructure"; } else if( name.equals("ByteSequence") ) { return "std::vector<char>"; } else if( name.equals("boolean") ) { return "bool"; } else if( name.equals("long") ) { return "long long"; } else if( name.equals("byte") ) { return "unsigned char"; } else if( !type.isPrimitiveType() ) { return name; } else { return name; } } protected void generateLicence(PrintWriter out) { out.println("/*"); out.println(" * Licensed to the Apache Software Foundation (ASF) under one or more"); out.println(" * contributor license agreements. See the NOTICE file distributed with"); out.println(" * this work for additional information regarding copyright ownership."); out.println(" * The ASF licenses this file to You under the Apache License, Version 2.0"); out.println(" * (the \"License\"); you may not use this file except in compliance with"); out.println(" * the License. You may obtain a copy of the License at"); out.println(" *"); out.println(" * http://www.apache.org/licenses/LICENSE-2.0"); out.println(" *"); out.println(" * Unless required by applicable law or agreed to in writing, software"); out.println(" * distributed under the License is distributed on an \"AS IS\" BASIS,"); out.println(" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied."); out.println(" * See the License for the specific language governing permissions and"); out.println(" * limitations under the License."); out.println(" */"); } protected void generateFile(PrintWriter out) throws Exception { generateLicence(out); out.println(""); out.println("#ifndef _ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSAHAL_GENERATED_"+className.toUpperCase()+"_H_"); out.println("#define _ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSAHAL_GENERATED_"+className.toUpperCase()+"_H_"); out.println(""); out.println("// Turn off warning message for ignored exception specification"); out.println("#ifdef _MSC_VER"); out.println("#pragma warning( disable : 4290 )"); out.println("#endif"); out.println(""); if( baseClass.equals("BaseDataStreamMarshaller") ) { out.println("#include <activemq/wireformat/openwire/marshal/"+baseClass+".h>"); } else { out.println("#include <activemq/wireformat/openwire/marshal/generated/"+baseClass+".h>"); } out.println(""); out.println("#include <decaf/io/DataInputStream.h>"); out.println("#include <decaf/io/DataOutputStream.h>"); out.println("#include <decaf/io/IOException.h>"); out.println("#include <activemq/util/Config.h>"); out.println("#include <activemq/commands/DataStructure.h>"); out.println("#include <activemq/wireformat/openwire/OpenWireFormat.h>"); out.println("#include <activemq/wireformat/openwire/utils/BooleanStream.h>"); out.println(""); out.println("namespace activemq {"); out.println("namespace wireformat {"); out.println("namespace openwire {"); out.println("namespace marshal {"); out.println("namespace generated {"); out.println(""); out.println(" /**"); out.println(" * Marshaling code for Open Wire Format for "+className); out.println(" *"); out.println(" * NOTE!: This file is auto generated - do not modify!"); out.println(" * if you need to make a change, please see the Java Classes"); out.println(" * in the activemq-openwire-generator module"); out.println(" */"); out.println(" class AMQCPP_API "+className+" : public "+baseClass+" {"); out.println(" public:"); out.println(""); out.println(" "+className+"() {}"); out.println(" virtual ~"+className+"() {}"); out.println(""); if( !isAbstractClass() ) { out.println(" virtual commands::DataStructure* createObject() const;"); out.println(""); out.println(" virtual unsigned char getDataStructureType() const;"); out.println(""); } out.println(" virtual void tightUnmarshal(OpenWireFormat* wireFormat,"); out.println(" commands::DataStructure* dataStructure,"); out.println(" decaf::io::DataInputStream* dataIn,"); out.println(" utils::BooleanStream* bs);"); out.println(""); out.println(" virtual int tightMarshal1(OpenWireFormat* wireFormat,"); out.println(" commands::DataStructure* dataStructure,"); out.println(" utils::BooleanStream* bs);"); out.println(""); out.println(" virtual void tightMarshal2(OpenWireFormat* wireFormat,"); out.println(" commands::DataStructure* dataStructure,"); out.println(" decaf::io::DataOutputStream* dataOut,"); out.println(" utils::BooleanStream* bs);"); out.println(""); out.println(" virtual void looseUnmarshal(OpenWireFormat* wireFormat,"); out.println(" commands::DataStructure* dataStructure,"); out.println(" decaf::io::DataInputStream* dataIn);"); out.println(""); out.println(" virtual void looseMarshal(OpenWireFormat* wireFormat,"); out.println(" commands::DataStructure* dataStructure,"); out.println(" decaf::io::DataOutputStream* dataOut);"); out.println(""); out.println(" };"); out.println(""); out.println("}}}}}"); out.println(""); out.println("#endif /*_ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSAHAL_GENERATED_"+className.toUpperCase()+"_H_*/"); out.println(""); } protected void processFactory() { if (factoryFile == null) { factoryFile = new File(destDir, factoryFileName + filePostFix); } PrintWriter out = null; try { out = new PrintWriter(new FileWriter(factoryFile)); generateFactory(out); } catch (Exception e) { throw new RuntimeException(e); } finally { if (out != null) { out.close(); } } } public void generateFactory(PrintWriter out) { generateLicence(out); out.println("#ifndef _ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSAHAL_GENERATED_MARSHALERFACTORY_H_"); out.println("#define _ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSAHAL_GENERATED_MARSHALERFACTORY_H_"); out.println(""); out.println("// Turn off warning message for ignored exception specification"); out.println("#ifdef _MSC_VER"); out.println("#pragma warning( disable : 4290 )"); out.println("#endif"); out.println(""); out.println("#include <activemq/wireformat/openwire/OpenWireFormat.h>"); out.println(""); out.println("namespace activemq {"); out.println("namespace wireformat {"); out.println("namespace openwire {"); out.println("namespace marshal {"); out.println("namespace generated {"); out.println(""); out.println(" /**"); out.println(" * Used to create marshallers for a specific version of the wire"); out.println(" * protocol."); out.println(" *"); out.println(" * NOTE!: This file is auto generated - do not modify!"); out.println(" * if you need to make a change, please see the Groovy scripts"); out.println(" * in the activemq-openwire-generator module"); out.println(" */"); out.println(" class MarshallerFactory {"); out.println(" public:"); out.println(""); out.println(" virtual ~MarshallerFactory() {};"); out.println(""); out.println(" virtual void configure(OpenWireFormat* format);"); out.println(""); out.println(" };"); out.println(""); out.println("}}}}}"); out.println(""); out.println("#endif /*_ACTIVEMQ_WIREFORMAT_OPENWIRE_MARSHAL_GENERATED_MARSHALLERFACTORY_H_*/"); } public List<JClass> getConcreteClasses() { return concreteClasses; } public void setConcreteClasses(List<JClass> concreteClasses) { this.concreteClasses = concreteClasses; } public String getTargetDir() { return targetDir; } public void setTargetDir(String targetDir) { this.targetDir = targetDir; } }
package io.fabric8.openshift.api.model.hive.v1; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.IntOrString; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.LabelSelector; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.api.model.ObjectReference; import io.fabric8.kubernetes.api.model.PersistentVolumeClaim; import io.fabric8.kubernetes.api.model.PodTemplateSpec; import io.fabric8.kubernetes.api.model.ResourceRequirements; import io.sundr.builder.annotations.Buildable; import io.sundr.builder.annotations.BuildableReference; import lombok.EqualsAndHashCode; import lombok.Setter; import lombok.ToString; import lombok.experimental.Accessors; @JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class) @JsonInclude(JsonInclude.Include.NON_NULL) @JsonPropertyOrder({ "apiVersion", "kind", "metadata", "apiURL", "certificateBundles", "cliImage", "conditions", "installRestarts", "installStartedTimestamp", "installVersion", "installedTimestamp", "installerImage", "platformStatus", "powerState", "provisionRef", "webConsoleURL" }) @ToString @EqualsAndHashCode @Setter @Accessors(prefix = { "_", "" }) @Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = false, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder", refs = { @BuildableReference(ObjectMeta.class), @BuildableReference(LabelSelector.class), @BuildableReference(Container.class), @BuildableReference(PodTemplateSpec.class), @BuildableReference(ResourceRequirements.class), @BuildableReference(IntOrString.class), @BuildableReference(ObjectReference.class), @BuildableReference(io.fabric8.kubernetes.api.model.LocalObjectReference.class), @BuildableReference(PersistentVolumeClaim.class) }) public class ClusterDeploymentStatus implements KubernetesResource { @JsonProperty("apiURL") private java.lang.String apiURL; @JsonProperty("certificateBundles") @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<CertificateBundleStatus> certificateBundles = new ArrayList<CertificateBundleStatus>(); @JsonProperty("cliImage") private java.lang.String cliImage; @JsonProperty("conditions") @JsonInclude(JsonInclude.Include.NON_EMPTY) private List<ClusterDeploymentCondition> conditions = new ArrayList<ClusterDeploymentCondition>(); @JsonProperty("installRestarts") private Integer installRestarts; @JsonProperty("installStartedTimestamp") private String installStartedTimestamp; @JsonProperty("installVersion") private java.lang.String installVersion; @JsonProperty("installedTimestamp") private String installedTimestamp; @JsonProperty("installerImage") private java.lang.String installerImage; @JsonProperty("platformStatus") private PlatformStatus platformStatus; @JsonProperty("powerState") private java.lang.String powerState; @JsonProperty("provisionRef") private io.fabric8.kubernetes.api.model.LocalObjectReference provisionRef; @JsonProperty("webConsoleURL") private java.lang.String webConsoleURL; @JsonIgnore private Map<java.lang.String, Object> additionalProperties = new HashMap<java.lang.String, Object>(); /** * No args constructor for use in serialization * */ public ClusterDeploymentStatus() { } /** * * @param cliImage * @param webConsoleURL * @param provisionRef * @param installerImage * @param installStartedTimestamp * @param platformStatus * @param certificateBundles * @param installVersion * @param installedTimestamp * @param powerState * @param apiURL * @param installRestarts * @param conditions */ public ClusterDeploymentStatus(java.lang.String apiURL, List<CertificateBundleStatus> certificateBundles, java.lang.String cliImage, List<ClusterDeploymentCondition> conditions, Integer installRestarts, String installStartedTimestamp, java.lang.String installVersion, String installedTimestamp, java.lang.String installerImage, PlatformStatus platformStatus, java.lang.String powerState, io.fabric8.kubernetes.api.model.LocalObjectReference provisionRef, java.lang.String webConsoleURL) { super(); this.apiURL = apiURL; this.certificateBundles = certificateBundles; this.cliImage = cliImage; this.conditions = conditions; this.installRestarts = installRestarts; this.installStartedTimestamp = installStartedTimestamp; this.installVersion = installVersion; this.installedTimestamp = installedTimestamp; this.installerImage = installerImage; this.platformStatus = platformStatus; this.powerState = powerState; this.provisionRef = provisionRef; this.webConsoleURL = webConsoleURL; } @JsonProperty("apiURL") public java.lang.String getApiURL() { return apiURL; } @JsonProperty("apiURL") public void setApiURL(java.lang.String apiURL) { this.apiURL = apiURL; } @JsonProperty("certificateBundles") public List<CertificateBundleStatus> getCertificateBundles() { return certificateBundles; } @JsonProperty("certificateBundles") public void setCertificateBundles(List<CertificateBundleStatus> certificateBundles) { this.certificateBundles = certificateBundles; } @JsonProperty("cliImage") public java.lang.String getCliImage() { return cliImage; } @JsonProperty("cliImage") public void setCliImage(java.lang.String cliImage) { this.cliImage = cliImage; } @JsonProperty("conditions") public List<ClusterDeploymentCondition> getConditions() { return conditions; } @JsonProperty("conditions") public void setConditions(List<ClusterDeploymentCondition> conditions) { this.conditions = conditions; } @JsonProperty("installRestarts") public Integer getInstallRestarts() { return installRestarts; } @JsonProperty("installRestarts") public void setInstallRestarts(Integer installRestarts) { this.installRestarts = installRestarts; } @JsonProperty("installStartedTimestamp") public String getInstallStartedTimestamp() { return installStartedTimestamp; } @JsonProperty("installStartedTimestamp") public void setInstallStartedTimestamp(String installStartedTimestamp) { this.installStartedTimestamp = installStartedTimestamp; } @JsonProperty("installVersion") public java.lang.String getInstallVersion() { return installVersion; } @JsonProperty("installVersion") public void setInstallVersion(java.lang.String installVersion) { this.installVersion = installVersion; } @JsonProperty("installedTimestamp") public String getInstalledTimestamp() { return installedTimestamp; } @JsonProperty("installedTimestamp") public void setInstalledTimestamp(String installedTimestamp) { this.installedTimestamp = installedTimestamp; } @JsonProperty("installerImage") public java.lang.String getInstallerImage() { return installerImage; } @JsonProperty("installerImage") public void setInstallerImage(java.lang.String installerImage) { this.installerImage = installerImage; } @JsonProperty("platformStatus") public PlatformStatus getPlatformStatus() { return platformStatus; } @JsonProperty("platformStatus") public void setPlatformStatus(PlatformStatus platformStatus) { this.platformStatus = platformStatus; } @JsonProperty("powerState") public java.lang.String getPowerState() { return powerState; } @JsonProperty("powerState") public void setPowerState(java.lang.String powerState) { this.powerState = powerState; } @JsonProperty("provisionRef") public io.fabric8.kubernetes.api.model.LocalObjectReference getProvisionRef() { return provisionRef; } @JsonProperty("provisionRef") public void setProvisionRef(io.fabric8.kubernetes.api.model.LocalObjectReference provisionRef) { this.provisionRef = provisionRef; } @JsonProperty("webConsoleURL") public java.lang.String getWebConsoleURL() { return webConsoleURL; } @JsonProperty("webConsoleURL") public void setWebConsoleURL(java.lang.String webConsoleURL) { this.webConsoleURL = webConsoleURL; } @JsonAnyGetter public Map<java.lang.String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(java.lang.String name, Object value) { this.additionalProperties.put(name, value); } }
/* * Copyright 2015 Marco Semiao * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.spotnext.maven.mojo; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.lang.instrument.ClassFileTransformer; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.DependencyResolutionRequiredException; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.sonatype.plexus.build.incremental.BuildContext; import ch.qos.logback.core.util.CloseUtil; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.spotnext.maven.Constants; import io.spotnext.maven.util.JarTransformer; import io.spotnext.support.util.FileUtils; import io.spotnext.support.weaving.AbstractBaseClassTransformer; /** * <p> * TransformTypesMojo class. * </p> * * @see <a href="http://marcosemiao4j.wordpress.com">Marco4J</a> * @author Marco Semiao * @since 1.0 */ @Mojo(name = "transform-types", defaultPhase = LifecyclePhase.PROCESS_CLASSES, requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME) public class TransformTypesMojo extends AbstractMojo { @Component protected BuildContext buildContext; @Parameter(defaultValue = "${project}", readonly = true, required = true) private MavenProject project; @Parameter(property = "classFileTransformers", required = true) private List<String> classFileTransformers; @Parameter(property = "debug", required = false) private boolean debug = false; @Parameter(property = "skip", required = false) private boolean skip = false; @Parameter private boolean includeJars; /** {@inheritDoc} */ @Override public void execute() throws MojoExecutionException { if (skip) { getLog().info("Skipping type transformation!"); return; } trackExecution("start"); final ClassLoader classLoader = getClassloader(); final List<ClassFileTransformer> transformers = getClassFileTransformers(classLoader); List<File> classFiles = FileUtils.getFiles(project.getBuild().getOutputDirectory(), f -> f.getAbsolutePath().endsWith(".class")); getLog().debug("Found class files for processing: " + classFiles.stream().map(f -> f.getName()).collect(Collectors.joining(", "))); if (CollectionUtils.isNotEmpty(transformers)) { if (CollectionUtils.isNotEmpty(classFiles)) { getLog().info(String.format("Transforming %s classes", classFiles.size())); for (final File f : classFiles) { if (f.getName().endsWith(Constants.CLASS_EXTENSION)) { String relativeClassFilePath = StringUtils.remove(f.getPath(), project.getBuild().getOutputDirectory()); relativeClassFilePath = StringUtils.removeStart(relativeClassFilePath, "/"); final String className = relativeClassFilePath.substring(0, relativeClassFilePath.length() - Constants.CLASS_EXTENSION.length()); trackExecution("Loading class: " + f.getAbsolutePath()); byte[] byteCode; try { byteCode = Files.readAllBytes(f.toPath()); } catch (final IOException e) { String message = String.format("Can't read bytecode for class %s", className); buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new IllegalStateException(message, e); } trackExecution("Loaded class: " + f.getAbsolutePath()); for (final ClassFileTransformer t : transformers) { try { // log exceptions into separate folder, to be able to inspect them even if Eclipse swallows them ... if (t instanceof AbstractBaseClassTransformer) { ((AbstractBaseClassTransformer) t).setErrorLogger(this::logError); } // returns null if nothing has been transformed byteCode = t.transform(classLoader, className, null, null, byteCode); } catch (final Exception e) { String exception = "Exception during transformation of class: " + f.getAbsolutePath() + "\n" + e.getMessage(); trackExecution(exception); String message = String.format("Can't transform class %s, transformer %s: %s", className, t.getClass().getSimpleName(), ExceptionUtils.getStackTrace(e)); buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new MojoExecutionException(exception, e); } } if (byteCode != null && byteCode.length > 0) { try { Files.write(f.toPath(), byteCode, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); trackExecution("Saved transformed class: " + f.getAbsolutePath()); } catch (final IOException e) { String message = "Could not write modified class: " + relativeClassFilePath; buildContext.addMessage(f, 0, 0, message, BuildContext.SEVERITY_ERROR, e); throw new IllegalStateException(message); } finally { buildContext.refresh(f); getLog().info("Applied transformation to type: " + f.getAbsolutePath()); } } else { trackExecution("No changes made for class: " + f.getAbsolutePath()); getLog().debug("No transformation was applied to type: " + f.getAbsolutePath()); } } } } else { getLog().info("No class files found"); } trackExecution("All classes in build output folder transformed"); if (includeJars) { final String packaging = project.getPackaging(); final Artifact artifact = project.getArtifact(); if ("jar".equals(packaging) && artifact != null) { try { final File source = artifact.getFile(); if (source.isFile()) { final File destination = new File(source.getParent(), "instrument.jar"); final JarTransformer transformer = new JarTransformer(getLog(), classLoader, Arrays.asList(source), transformers); transformer.transform(destination); final File sourceRename = new File(source.getParent(), "notransform-" + source.getName()); if (source.renameTo(sourceRename)) { throw new MojoExecutionException(String.format("Could not move %s to %s", source.toString(), sourceRename.toString())); } if (destination.renameTo(sourceRename)) { throw new MojoExecutionException(String.format("Could not move %s to %s", destination.toString(), sourceRename.toString())); } buildContext.refresh(destination); } } catch (final Exception e) { buildContext.addMessage(artifact.getFile(), 0, 0, e.getMessage(), BuildContext.SEVERITY_ERROR, e); throw new MojoExecutionException(e.getMessage(), e); } } else { getLog().debug(String.format("Artifact %s not a jar file", artifact != null ? (artifact.getGroupId() + ":" + artifact.getArtifactId()) : "<null>")); } } } else { getLog().info("No class transformers configured"); } } @SuppressFBWarnings(value = "DM_DEFAULT_ENCODING", justification = "false positive") protected void logError(Throwable cause) { final String tempFolder = System.getProperty("java.io.tmpdir"); final Path logFilePath = Paths.get(tempFolder, "spot-transform.types.log"); final File logFile = logFilePath.toFile(); if (logFile.canWrite()) { FileWriter writer = null; try { writer = new FileWriter(logFile); writer.write(cause.getMessage()); writer.write(ExceptionUtils.getStackTrace(cause)); } catch (Exception e) { getLog().error("Can't log to separete error log file " + logFilePath.toString()); } finally { CloseUtil.closeQuietly(writer); } } } private ClassLoader getClassloader() throws MojoExecutionException { try { final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); final List<URL> classPathUrls = getClasspath(); final URLClassLoader urlClassLoader = URLClassLoader.newInstance( classPathUrls.toArray(new URL[classPathUrls.size()]), contextClassLoader); trackExecution("Classpath: " + classPathUrls.stream().map(u -> u.toString()).collect(Collectors.joining(","))); Thread.currentThread().setContextClassLoader(urlClassLoader); return urlClassLoader; } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } private List<URL> getClasspath() throws MalformedURLException, DependencyResolutionRequiredException { final List<URL> classPathUrls = new ArrayList<URL>(); final List<String> classpathElements = project.getRuntimeClasspathElements(); // add build output folder to classpath classpathElements.add(computeDir(project.getBuild().getOutputDirectory())); for (final String path : classpathElements) { classPathUrls.add(new File(path).toURI().toURL()); } return classPathUrls; } private String computeDir(String dir) { return new File(dir).getAbsolutePath(); } private List<ClassFileTransformer> getClassFileTransformers(final ClassLoader cl) throws MojoExecutionException { try { final List<URL> classPathUrls = getClasspath(); if (CollectionUtils.isNotEmpty(classFileTransformers)) { final List<ClassFileTransformer> list = new ArrayList<>(classFileTransformers.size()); for (final String classFileTransformer : classFileTransformers) { final Class<?> clazz = cl.loadClass(classFileTransformer); final ClassFileTransformer transformer = (ClassFileTransformer) clazz.newInstance(); if (transformer instanceof AbstractBaseClassTransformer) { final AbstractBaseClassTransformer baseClassTransformer = ((AbstractBaseClassTransformer) transformer); baseClassTransformer.addClassPaths(project.getBuild().getOutputDirectory()); baseClassTransformer.addClassPaths(classPathUrls.stream().map(u -> u.getFile()).collect(Collectors.toList())); } list.add(transformer); } return list; } else { getLog().warn("No class file transformers configured!"); return Collections.emptyList(); } } catch (final Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } private void trackExecution(String message) throws IllegalStateException { if (debug) { try { File tempDir = FileUtils.getTempDirectory(); Files.write(Paths.get(tempDir.getAbsolutePath(), "transform-classes.log"), (new Date().toString() + ": " + message + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND); } catch (Exception e) { throw new IllegalStateException("error", e); } } } }
package org.apache.maven.plugin.ear; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.zip.ZipException; import org.apache.maven.archiver.MavenArchiveConfiguration; import org.apache.maven.archiver.MavenArchiver; import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.ear.util.EarMavenArchiver; import org.apache.maven.plugin.ear.util.JavaEEVersion; import org.apache.maven.plugin.ear.util.ModuleIdentifierValidator; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProjectHelper; import org.apache.maven.shared.filtering.MavenFileFilter; import org.apache.maven.shared.filtering.MavenFilteringException; import org.apache.maven.shared.filtering.MavenResourcesExecution; import org.apache.maven.shared.filtering.MavenResourcesFiltering; import org.apache.maven.shared.utils.io.FileUtils; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.jar.JarArchiver; import org.codehaus.plexus.archiver.jar.Manifest; import org.codehaus.plexus.archiver.jar.Manifest.Attribute; import org.codehaus.plexus.archiver.jar.ManifestException; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; import org.codehaus.plexus.archiver.zip.ZipArchiver; import org.codehaus.plexus.archiver.zip.ZipUnArchiver; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.StringUtils; /** * Builds J2EE Enterprise Archive (EAR) files. * * @author <a href="snicoll@apache.org">Stephane Nicoll</a> * @version $Id$ */ // CHECKSTYLE_OFF: LineLength @Mojo( name = "ear", defaultPhase = LifecyclePhase.PACKAGE, threadSafe = true, requiresDependencyResolution = ResolutionScope.TEST ) // CHECKSTYLE_ON: LineLength public class EarMojo extends AbstractEarMojo { /** * Single directory for extra files to include in the EAR. */ @Parameter( defaultValue = "${basedir}/src/main/application", required = true ) private File earSourceDirectory; /** * The comma separated list of tokens to include in the EAR. */ @Parameter( alias = "includes", defaultValue = "**" ) private String earSourceIncludes; /** * The comma separated list of tokens to exclude from the EAR. */ @Parameter( alias = "excludes" ) private String earSourceExcludes; /** * Specify that the EAR sources should be filtered. * * @since 2.3.2 */ @Parameter( defaultValue = "false" ) private boolean filtering; /** * Filters (property files) to include during the interpolation of the pom.xml. * * @since 2.3.2 */ @Parameter private List<String> filters; /** * A list of file extensions that should not be filtered if filtering is enabled. * * @since 2.3.2 */ @Parameter private List<String> nonFilteredFileExtensions; /** * To escape interpolated value with Windows path c:\foo\bar will be replaced with c:\\foo\\bar. * * @since 2.3.2 */ @Parameter( property = "maven.ear.escapedBackslashesInFilePath", defaultValue = "false" ) private boolean escapedBackslashesInFilePath; /** * Expression preceded with this String won't be interpolated \${foo} will be replaced with ${foo}. * * @since 2.3.2 */ @Parameter( property = "maven.ear.escapeString" ) protected String escapeString; /** * In case of using the {@link #skinnyWars} and {@link #defaultLibBundleDir} usually the * classpath will be modified. * By settings this option {@code true} you can change this and keep the classpath untouched. * This option has been introduced to keep the backward compatibility with earlier versions * of the plugin. * * @since 2.10 */ @Parameter( defaultValue = "false" ) private boolean skipClassPathModification; /** * The location of the manifest file to be used within the EAR file. If no value if specified, the default location * in the workDirectory is taken. If the file does not exist, a manifest will be generated automatically. */ @Parameter private File manifestFile; /** * The location of a custom application.xml file to be used within the EAR file. */ @Parameter private String applicationXml; /** * The directory for the generated EAR. */ @Parameter( defaultValue = "${project.build.directory}", required = true ) private String outputDirectory; /** * The name of the EAR file to generate. */ @Parameter( alias = "earName", defaultValue = "${project.build.finalName}", required = true ) private String finalName; /** * The comma separated list of artifact's type(s) to unpack by default. */ @Parameter private String unpackTypes; /** * Classifier to add to the artifact generated. If given, the artifact will be an attachment instead. */ @Parameter private String classifier; /** * A comma separated list of tokens to exclude when packaging the EAR. By default nothing is excluded. Note that you * can use the Java Regular Expressions engine to include and exclude specific pattern using the expression * %regex[]. Hint: read the about (?!Pattern). * * @since 2.7 */ @Parameter private String packagingExcludes; /** * A comma separated list of tokens to include when packaging the EAR. By default everything is included. Note that * you can use the Java Regular Expressions engine to include and exclude specific pattern using the expression * %regex[]. * * @since 2.7 */ @Parameter private String packagingIncludes; /** * Whether to create skinny WARs or not. A skinny WAR is a WAR that does not have all of its dependencies in * WEB-INF/lib. Instead those dependencies are shared between the WARs through the EAR. * * @since 2.7 */ @Parameter( property = "maven.ear.skinnyWars", defaultValue = "false" ) private boolean skinnyWars; /** * The Jar archiver. */ @Component( role = Archiver.class, hint = "jar" ) private JarArchiver jarArchiver; /** * The Zip archiver. */ @Component( role = Archiver.class, hint = "zip" ) private ZipArchiver zipArchiver; /** * The Zip Un archiver. */ @Component( role = UnArchiver.class, hint = "zip" ) private ZipUnArchiver zipUnArchiver; /** * The archive configuration to use. See <a href="http://maven.apache.org/shared/maven-archiver/index.html">Maven * Archiver Reference</a>. */ @Parameter private MavenArchiveConfiguration archive = new MavenArchiveConfiguration(); /** */ @Component private MavenProjectHelper projectHelper; /** * The archive manager. */ @Component private ArchiverManager archiverManager; /** */ @Component( role = MavenFileFilter.class, hint = "default" ) private MavenFileFilter mavenFileFilter; /** */ @Component( role = MavenResourcesFiltering.class, hint = "default" ) private MavenResourcesFiltering mavenResourcesFiltering; /** * @since 2.3.2 */ @Parameter( defaultValue = "${session}", readonly = true, required = true ) private MavenSession session; private List<FileUtils.FilterWrapper> filterWrappers; /** * @since 2.9 */ @Parameter( property = "maven.ear.useJvmChmod", defaultValue = "true" ) private boolean useJvmChmod = true; /** * The list of artifacts is checked and if you set this to {@code true} the build will fail if duplicate artifacts * have been found within the build configuration. * * @since 2.10 */ // TODO: This can be removed if we change to full unique identifiers in EAR (next major version!) @Parameter( defaultValue = "false", property = "maven.ear.duplicateArtifactsBreakTheBuild" ) private boolean duplicateArtifactsBreakTheBuild; private void checkModuleUniqueness() throws MojoExecutionException { ModuleIdentifierValidator miv = new ModuleIdentifierValidator( getModules() ); miv.checkForDuplicateArtifacts(); if ( miv.existDuplicateArtifacts() ) { Map<String, List<EarModule>> duplicateArtifacts = miv.getDuplicateArtifacts(); for ( Entry<String, List<EarModule>> entry : duplicateArtifacts.entrySet() ) { getLog().warn( "The artifactId " + entry.getKey() + " exists more than once in the modules list." ); for ( EarModule earModule : entry.getValue() ) { getLog().warn( " --> " + earModule.getArtifact().getId() + " (" + earModule.getType() + ")" ); } } getLog().warn( "HINT: This can be simply solved by using the <fileNameMapping>full</fileNameMapping>" ); if ( duplicateArtifactsBreakTheBuild ) { // CHECKSTYLE_OFF: LineLength throw new MojoExecutionException( "The build contains duplicate artifacts which result in unpredictable ear content." ); // CHECKSTYLE_ON: LineLength } } } /** {@inheritDoc} */ public void execute() throws MojoExecutionException, MojoFailureException { // Initializes ear modules super.execute(); zipArchiver.setUseJvmChmod( useJvmChmod ); zipUnArchiver.setUseJvmChmod( useJvmChmod ); final JavaEEVersion javaEEVersion = JavaEEVersion.getJavaEEVersion( version ); // Initializes unpack types List<String> unpackTypesList = createUnpackList(); // Copy modules copyModules( javaEEVersion, unpackTypesList ); // Copy source files try { File earSourceDir = earSourceDirectory; if ( earSourceDir.exists() ) { getLog().info( "Copy ear sources to " + getWorkDirectory().getAbsolutePath() ); String[] fileNames = getEarFiles( earSourceDir ); for ( String fileName : fileNames ) { copyFile( new File( earSourceDir, fileName ), new File( getWorkDirectory(), fileName ) ); } } if ( applicationXml != null && !"".equals( applicationXml ) ) { // rename to application.xml getLog().info( "Including custom application.xml[" + applicationXml + "]" ); File metaInfDir = new File( getWorkDirectory(), META_INF ); copyFile( new File( applicationXml ), new File( metaInfDir, "/application.xml" ) ); } } catch ( IOException e ) { throw new MojoExecutionException( "Error copying EAR sources", e ); } catch ( MavenFilteringException e ) { throw new MojoExecutionException( "Error filtering EAR sources", e ); } // Check if deployment descriptor is there File ddFile = new File( getWorkDirectory(), APPLICATION_XML_URI ); if ( !ddFile.exists() && ( javaEEVersion.lt( JavaEEVersion.FIVE ) ) ) { // CHECKSTYLE_OFF: LineLength throw new MojoExecutionException( "Deployment descriptor: " + ddFile.getAbsolutePath() + " does not exist." ); // CHECKSTYLE_ON: LineLength } try { File earFile = getEarFile( outputDirectory, finalName, classifier ); final MavenArchiver archiver = new EarMavenArchiver( getModules() ); final JarArchiver theJarArchiver = getJarArchiver(); getLog().debug( "Jar archiver implementation [" + theJarArchiver.getClass().getName() + "]" ); archiver.setArchiver( theJarArchiver ); archiver.setOutputFile( earFile ); // Include custom manifest if necessary includeCustomManifestFile(); getLog().debug( "Excluding " + Arrays.asList( getPackagingExcludes() ) + " from the generated EAR." ); getLog().debug( "Including " + Arrays.asList( getPackagingIncludes() ) + " in the generated EAR." ); archiver.getArchiver().addDirectory( getWorkDirectory(), getPackagingIncludes(), getPackagingExcludes() ); archiver.createArchive( session, getProject(), archive ); if ( classifier != null ) { projectHelper.attachArtifact( getProject(), "ear", classifier, earFile ); } else { getProject().getArtifact().setFile( earFile ); } } catch ( Exception e ) { throw new MojoExecutionException( "Error assembling EAR", e ); } } private void copyModules( final JavaEEVersion javaEEVersion, List<String> unpackTypesList ) throws MojoExecutionException, MojoFailureException { try { // TODO: With the next major release the modules // should be identified by a unique id instead of the // the artifactId's only which means this // check can be removed. // http://jira.codehaus.org/browse/MEAR-209 checkModuleUniqueness(); for ( EarModule module : getModules() ) { if ( module instanceof JavaModule ) { getLog().warn( "JavaModule is deprecated (" + module + "), please use JarModule instead." ); } if ( module instanceof Ejb3Module ) { getLog().warn( "Ejb3Module is deprecated (" + module + "), please use EjbModule instead." ); } final File sourceFile = module.getArtifact().getFile(); final File destinationFile = buildDestinationFile( getWorkDirectory(), module.getUri() ); if ( !sourceFile.isFile() ) { throw new MojoExecutionException( "Cannot copy a directory: " + sourceFile.getAbsolutePath() + "; Did you package/install " + module.getArtifact() + "?" ); } if ( destinationFile.getCanonicalPath().equals( sourceFile.getCanonicalPath() ) ) { getLog().info( "Skipping artifact [" + module + "], as it already exists at [" + module.getUri() + "]" ); continue; } // If the module is within the unpack list, make sure that no unpack wasn't forced (null or true) // If the module is not in the unpack list, it should be true // CHECKSTYLE_OFF: LineLength if ( ( unpackTypesList.contains( module.getType() ) && ( module.shouldUnpack() == null || module.shouldUnpack() ) ) || ( module.shouldUnpack() != null && module.shouldUnpack() ) ) // CHECKSTYLE_ON: LineLength { getLog().info( "Copying artifact [" + module + "] to [" + module.getUri() + "] (unpacked)" ); // Make sure that the destination is a directory to avoid plexus nasty stuff :) destinationFile.mkdirs(); unpack( sourceFile, destinationFile ); if ( skinnyWars && module.changeManifestClasspath() ) { changeManifestClasspath( module, destinationFile, javaEEVersion ); } } else { if ( sourceFile.lastModified() > destinationFile.lastModified() ) { getLog().info( "Copying artifact [" + module + "] to [" + module.getUri() + "]" ); FileUtils.copyFile( sourceFile, destinationFile ); if ( skinnyWars && module.changeManifestClasspath() ) { changeManifestClasspath( module, destinationFile, javaEEVersion ); } } else { getLog().debug( "Skipping artifact [" + module + "], as it is already up to date at [" + module.getUri() + "]" ); } } } } catch ( IOException e ) { throw new MojoExecutionException( "Error copying EAR modules", e ); } catch ( ArchiverException e ) { throw new MojoExecutionException( "Error unpacking EAR modules", e ); } catch ( NoSuchArchiverException e ) { throw new MojoExecutionException( "No Archiver found for EAR modules", e ); } } private List<String> createUnpackList() throws MojoExecutionException { List<String> unpackTypesList = new ArrayList<String>(); if ( unpackTypes != null ) { unpackTypesList = Arrays.asList( unpackTypes.split( "," ) ); for ( String type : unpackTypesList ) { if ( !EarModuleFactory.STANDARD_ARTIFACT_TYPE.contains( type ) ) { throw new MojoExecutionException( "Invalid type [" + type + "] supported types are " + EarModuleFactory.STANDARD_ARTIFACT_TYPE ); } } getLog().debug( "Initialized unpack types " + unpackTypesList ); } return unpackTypesList; } /** * @return {@link #applicationXml} */ public String getApplicationXml() { return applicationXml; } /** * @param applicationXml {@link #applicationXml} */ public void setApplicationXml( String applicationXml ) { this.applicationXml = applicationXml; } /** * Returns a string array of the excludes to be used when assembling/copying the ear. * * @return an array of tokens to exclude */ protected String[] getExcludes() { List<String> excludeList = new ArrayList<String>( FileUtils.getDefaultExcludesAsList() ); if ( earSourceExcludes != null && !"".equals( earSourceExcludes ) ) { excludeList.addAll( Arrays.asList( StringUtils.split( earSourceExcludes, "," ) ) ); } // if applicationXml is specified, omit the one in the source directory if ( getApplicationXml() != null && !"".equals( getApplicationXml() ) ) { excludeList.add( "**/" + META_INF + "/application.xml" ); } return excludeList.toArray( new String[excludeList.size()] ); } /** * Returns a string array of the includes to be used when assembling/copying the ear. * * @return an array of tokens to include */ protected String[] getIncludes() { return StringUtils.split( StringUtils.defaultString( earSourceIncludes ), "," ); } /** * @return The array with the packaging excludes. */ public String[] getPackagingExcludes() { if ( StringUtils.isEmpty( packagingExcludes ) ) { return new String[0]; } else { return StringUtils.split( packagingExcludes, "," ); } } /** * @param packagingExcludes {@link #packagingExcludes} */ public void setPackagingExcludes( String packagingExcludes ) { this.packagingExcludes = packagingExcludes; } /** * @return The arrays with the includes. */ public String[] getPackagingIncludes() { if ( StringUtils.isEmpty( packagingIncludes ) ) { return new String[] { "**" }; } else { return StringUtils.split( packagingIncludes, "," ); } } /** * @param packagingIncludes {@link #packagingIncludes} */ public void setPackagingIncludes( String packagingIncludes ) { this.packagingIncludes = packagingIncludes; } private static File buildDestinationFile( File buildDir, String uri ) { return new File( buildDir, uri ); } private void includeCustomManifestFile() { if ( manifestFile == null ) { manifestFile = new File( getWorkDirectory(), "META-INF/MANIFEST.MF" ); } if ( !manifestFile.exists() ) { getLog().info( "Could not find manifest file: " + manifestFile + " - Generating one" ); } else { getLog().info( "Including custom manifest file [" + manifestFile + "]" ); archive.setManifestFile( manifestFile ); } } /** * Returns the EAR file to generate, based on an optional classifier. * * @param basedir the output directory * @param finalName the name of the ear file * @param classifier an optional classifier * @return the EAR file to generate */ private static File getEarFile( String basedir, String finalName, String classifier ) { if ( classifier == null ) { classifier = ""; } else if ( classifier.trim().length() > 0 && !classifier.startsWith( "-" ) ) { classifier = "-" + classifier; } return new File( basedir, finalName + classifier + ".ear" ); } /** * Returns a list of filenames that should be copied over to the destination directory. * * @param sourceDir the directory to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getEarFiles( File sourceDir ) { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir( sourceDir ); scanner.setExcludes( getExcludes() ); scanner.addDefaultExcludes(); scanner.setIncludes( getIncludes() ); scanner.scan(); return scanner.getIncludedFiles(); } /** * Unpacks the module into the EAR structure. * * @param source File to be unpacked. * @param destDir Location where to put the unpacked files. * @throws NoSuchArchiverException In case of we don't have an appropriate archiver. * @throws IOException In case of a general IOException. */ public void unpack( File source, File destDir ) throws NoSuchArchiverException, IOException { UnArchiver unArchiver = archiverManager.getUnArchiver( "zip" ); unArchiver.setSourceFile( source ); unArchiver.setDestDirectory( destDir ); // Extract the module unArchiver.extract(); } /** * Returns the {@link JarArchiver} implementation used to package the EAR file. * <p/> * By default the archiver is obtained from the Plexus container. * * @return the archiver */ protected JarArchiver getJarArchiver() { return jarArchiver; } private void copyFile( File source, File target ) throws MavenFilteringException, IOException, MojoExecutionException { if ( filtering && !isNonFilteredExtension( source.getName() ) ) { // Silly that we have to do this ourselves if ( target.getParentFile() != null && !target.getParentFile().exists() ) { target.getParentFile().mkdirs(); } mavenFileFilter.copyFile( source, target, true, getFilterWrappers(), encoding ); } else { FileUtils.copyFile( source, target ); } } /** * @param fileName The name of the file which should be checked. * @return {@code true} if the name is part of the non filtered extensions {@code false} otherwise. */ public boolean isNonFilteredExtension( String fileName ) { return !mavenResourcesFiltering.filteredFileExtension( fileName, nonFilteredFileExtensions ); } private List<FileUtils.FilterWrapper> getFilterWrappers() throws MojoExecutionException { if ( filterWrappers == null ) { try { MavenResourcesExecution mavenResourcesExecution = new MavenResourcesExecution(); mavenResourcesExecution.setMavenProject( getProject() ); mavenResourcesExecution.setEscapedBackslashesInFilePath( escapedBackslashesInFilePath ); mavenResourcesExecution.setFilters( filters ); mavenResourcesExecution.setEscapeString( escapeString ); filterWrappers = mavenFileFilter.getDefaultFilterWrappers( mavenResourcesExecution ); } catch ( MavenFilteringException e ) { getLog().error( "Fail to build filtering wrappers " + e.getMessage() ); throw new MojoExecutionException( e.getMessage(), e ); } } return filterWrappers; } private void changeManifestClasspath( EarModule module, File original, JavaEEVersion javaEEVersion ) throws MojoFailureException { try { File workDirectory; // Handle the case that the destination might be a directory (project-038) if ( original.isFile() ) { // Create a temporary work directory // MEAR-167 use uri as directory to prevent merging of artifacts with the same artifactId workDirectory = new File( new File( getTempFolder(), "temp" ), module.getUri() ); workDirectory.mkdirs(); getLog().debug( "Created a temporary work directory: " + workDirectory.getAbsolutePath() ); // Unpack the archive to a temporary work directory zipUnArchiver.setSourceFile( original ); zipUnArchiver.setDestDirectory( workDirectory ); zipUnArchiver.extract(); } else { workDirectory = original; } // Create a META-INF/MANIFEST.MF file if it doesn't exist (project-038) File metaInfDirectory = new File( workDirectory, "META-INF" ); boolean newMetaInfCreated = metaInfDirectory.mkdirs(); if ( newMetaInfCreated ) { // CHECKSTYLE_OFF: LineLength getLog().debug( "This project did not have a META-INF directory before, so a new directory was created." ); // CHECKSTYLE_ON: LineLength } File newCreatedManifestFile = new File( metaInfDirectory, "MANIFEST.MF" ); boolean newManifestCreated = newCreatedManifestFile.createNewFile(); if ( newManifestCreated ) { // CHECKSTYLE_OFF: LineLength getLog().debug( "This project did not have a META-INF/MANIFEST.MF file before, so a new file was created." ); // CHECKSTYLE_ON: LineLength } // Read the manifest from disk Manifest mf = new Manifest( new FileReader( newCreatedManifestFile ) ); Attribute classPath = mf.getMainSection().getAttribute( "Class-Path" ); List<String> classPathElements = new ArrayList<String>(); if ( classPath != null ) { classPathElements.addAll( Arrays.asList( classPath.getValue().split( " " ) ) ); } else { classPath = new Attribute( "Class-Path", "" ); } // Modify the classpath entries in the manifest for ( EarModule o : getModules() ) { if ( o instanceof JarModule ) { JarModule jm = (JarModule) o; if ( module.getLibDir() != null ) { // MEAR-189: // We use the original name, cause in case of fileNameMapping to no-version/full // we could not not delete it and it will end up in the resulting EAR and the WAR // will not be cleaned up. File artifact = new File( new File( workDirectory, module.getLibDir() ), jm.getOriginalBundleFileName() ); if ( artifact.exists() ) { getLog().debug( " -> Artifact to delete: " + artifact ); if ( !artifact.delete() ) { getLog().error( "Could not delete '" + artifact + "'" ); } } } if ( classPathElements.contains( jm.getBundleFileName() ) ) { classPathElements.set( classPathElements.indexOf( jm.getBundleFileName() ), jm.getUri() ); } else { if ( !skipClassPathModification ) { classPathElements.add( jm.getUri() ); } else { if ( javaEEVersion.lt( JavaEEVersion.FIVE ) || defaultLibBundleDir == null ) { classPathElements.add( jm.getUri() ); } } } } } classPath.setValue( StringUtils.join( classPathElements.iterator(), " " ) ); mf.getMainSection().addConfiguredAttribute( classPath ); // Write the manifest to disk PrintWriter pw = new PrintWriter( newCreatedManifestFile ); mf.write( pw ); pw.close(); if ( original.isFile() ) { // Pack up the archive again from the work directory if ( !original.delete() ) { getLog().error( "Could not delete original artifact file " + original ); } getLog().debug( "Zipping module" ); zipArchiver.setDestFile( original ); zipArchiver.addDirectory( workDirectory ); zipArchiver.createArchive(); } } catch ( ManifestException e ) { throw new MojoFailureException( e.getMessage() ); } catch ( ZipException e ) { throw new MojoFailureException( e.getMessage() ); } catch ( IOException e ) { throw new MojoFailureException( e.getMessage() ); } catch ( ArchiverException e ) { throw new MojoFailureException( e.getMessage() ); } } }
/** * Copyright (C) 2011-2012 Turn, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.turn.ttorrent.common.protocol; import com.turn.ttorrent.client.SharedTorrent; import java.nio.ByteBuffer; import java.text.ParseException; import java.util.BitSet; /** * BitTorrent peer protocol messages representations. * * <p> * This class and its <em>*Messages</em> subclasses provide POJO * representations of the peer protocol messages, along with easy parsing from * an input ByteBuffer to quickly get a usable representation of an incoming * message. * </p> * * @author mpetazzoni * @see <a href="http://wiki.theory.org/BitTorrentSpecification#Peer_wire_protocol_.28TCP.29">BitTorrent peer wire protocol</a> */ public abstract class PeerMessage { /** The size, in bytes, of the length field in a message (one 32-bit * integer). */ public static final int MESSAGE_LENGTH_FIELD_SIZE = 4; /** * Message type. * * <p> * Note that the keep-alive messages don't actually have an type ID defined * in the protocol as they are of length 0. * </p> */ public enum Type { KEEP_ALIVE(-1), CHOKE(0), UNCHOKE(1), INTERESTED(2), NOT_INTERESTED(3), HAVE(4), BITFIELD(5), REQUEST(6), PIECE(7), CANCEL(8); private byte id; Type(int id) { this.id = (byte)id; } public boolean equals(byte c) { return this.id == c; } public byte getTypeByte() { return this.id; } public static Type get(byte c) { for (Type t : Type.values()) { if (t.equals(c)) { return t; } } return null; } }; private final Type type; private final ByteBuffer data; private PeerMessage(Type type, ByteBuffer data) { this.type = type; this.data = data; this.data.rewind(); } public Type getType() { return this.type; } /** * Returns a {@link ByteBuffer} backed by the same data as this message. * * <p> * This method returns a duplicate of the buffer stored in this {@link * PeerMessage} object to allow for multiple consumers to read from the * same message without conflicting access to the buffer's position, mark * and limit. * </p> */ public ByteBuffer getData() { return this.data.duplicate(); } /** * Validate that this message makes sense for the torrent it's related to. * * <p> * This method is meant to be overloaded by distinct message types, where * it makes sense. Otherwise, it defaults to true. * </p> * * @param torrent The torrent this message is about. */ public PeerMessage validate(SharedTorrent torrent) throws MessageValidationException { return this; } public String toString() { return this.getType().name(); } /** * Parse the given buffer into a peer protocol message. * * <p> * Parses the provided byte array and builds the corresponding PeerMessage * subclass object. * </p> * * @param buffer The byte buffer containing the message data. * @param torrent The torrent this message is about. * @return A PeerMessage subclass instance. * @throws ParseException When the message is invalid, can't be parsed or * does not match the protocol requirements. */ public static PeerMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws ParseException { int length = buffer.getInt(); if (length == 0) { return KeepAliveMessage.parse(buffer, torrent); } else if (length != buffer.remaining()) { throw new ParseException("Message size did not match announced " + "size!", 0); } Type type = Type.get(buffer.get()); if (type == null) { throw new ParseException("Unknown message ID!", buffer.position()-1); } switch (type) { case CHOKE: return ChokeMessage.parse(buffer.slice(), torrent); case UNCHOKE: return UnchokeMessage.parse(buffer.slice(), torrent); case INTERESTED: return InterestedMessage.parse(buffer.slice(), torrent); case NOT_INTERESTED: return NotInterestedMessage.parse(buffer.slice(), torrent); case HAVE: return HaveMessage.parse(buffer.slice(), torrent); case BITFIELD: return BitfieldMessage.parse(buffer.slice(), torrent); case REQUEST: return RequestMessage.parse(buffer.slice(), torrent); case PIECE: return PieceMessage.parse(buffer.slice(), torrent); case CANCEL: return CancelMessage.parse(buffer.slice(), torrent); default: throw new IllegalStateException("Message type should have " + "been properly defined by now."); } } public static class MessageValidationException extends ParseException { static final long serialVersionUID = -1; public MessageValidationException(PeerMessage m) { super("Message " + m + " is not valid!", 0); } } /** * Keep alive message. * * <len=0000> */ public static class KeepAliveMessage extends PeerMessage { private static final int BASE_SIZE = 0; private KeepAliveMessage(ByteBuffer buffer) { super(Type.KEEP_ALIVE, buffer); } public static KeepAliveMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return (KeepAliveMessage)new KeepAliveMessage(buffer) .validate(torrent); } public static KeepAliveMessage craft() { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + KeepAliveMessage.BASE_SIZE); buffer.putInt(KeepAliveMessage.BASE_SIZE); return new KeepAliveMessage(buffer); } } /** * Choke message. * * <len=0001><id=0> */ public static class ChokeMessage extends PeerMessage { private static final int BASE_SIZE = 1; private ChokeMessage(ByteBuffer buffer) { super(Type.CHOKE, buffer); } public static ChokeMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return (ChokeMessage)new ChokeMessage(buffer) .validate(torrent); } public static ChokeMessage craft() { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + ChokeMessage.BASE_SIZE); buffer.putInt(ChokeMessage.BASE_SIZE); buffer.put(PeerMessage.Type.CHOKE.getTypeByte()); return new ChokeMessage(buffer); } } /** * Unchoke message. * * <len=0001><id=1> */ public static class UnchokeMessage extends PeerMessage { private static final int BASE_SIZE = 1; private UnchokeMessage(ByteBuffer buffer) { super(Type.UNCHOKE, buffer); } public static UnchokeMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return (UnchokeMessage)new UnchokeMessage(buffer) .validate(torrent); } public static UnchokeMessage craft() { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + UnchokeMessage.BASE_SIZE); buffer.putInt(UnchokeMessage.BASE_SIZE); buffer.put(PeerMessage.Type.UNCHOKE.getTypeByte()); return new UnchokeMessage(buffer); } } /** * Interested message. * * <len=0001><id=2> */ public static class InterestedMessage extends PeerMessage { private static final int BASE_SIZE = 1; private InterestedMessage(ByteBuffer buffer) { super(Type.INTERESTED, buffer); } public static InterestedMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return (InterestedMessage)new InterestedMessage(buffer) .validate(torrent); } public static InterestedMessage craft() { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + InterestedMessage.BASE_SIZE); buffer.putInt(InterestedMessage.BASE_SIZE); buffer.put(PeerMessage.Type.INTERESTED.getTypeByte()); return new InterestedMessage(buffer); } } /** * Not interested message. * * <len=0001><id=3> */ public static class NotInterestedMessage extends PeerMessage { private static final int BASE_SIZE = 1; private NotInterestedMessage(ByteBuffer buffer) { super(Type.NOT_INTERESTED, buffer); } public static NotInterestedMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return (NotInterestedMessage)new NotInterestedMessage(buffer) .validate(torrent); } public static NotInterestedMessage craft() { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + NotInterestedMessage.BASE_SIZE); buffer.putInt(NotInterestedMessage.BASE_SIZE); buffer.put(PeerMessage.Type.NOT_INTERESTED.getTypeByte()); return new NotInterestedMessage(buffer); } } /** * Have message. * * <len=0005><id=4><piece index=xxxx> */ public static class HaveMessage extends PeerMessage { private static final int BASE_SIZE = 5; private int piece; private HaveMessage(ByteBuffer buffer, int piece) { super(Type.HAVE, buffer); this.piece = piece; } public int getPieceIndex() { return this.piece; } @Override public HaveMessage validate(SharedTorrent torrent) throws MessageValidationException { if (this.piece >= 0 && this.piece < torrent.getPieceCount()) { return this; } throw new MessageValidationException(this); } public static HaveMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { return new HaveMessage(buffer, buffer.getInt()) .validate(torrent); } public static HaveMessage craft(int piece) { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + HaveMessage.BASE_SIZE); buffer.putInt(HaveMessage.BASE_SIZE); buffer.put(PeerMessage.Type.HAVE.getTypeByte()); buffer.putInt(piece); return new HaveMessage(buffer, piece); } public String toString() { return super.toString() + " #" + this.getPieceIndex(); } } /** * Bitfield message. * * <len=0001+X><id=5><bitfield> */ public static class BitfieldMessage extends PeerMessage { private static final int BASE_SIZE = 1; private BitSet bitfield; private BitfieldMessage(ByteBuffer buffer, BitSet bitfield) { super(Type.BITFIELD, buffer); this.bitfield = bitfield; } public BitSet getBitfield() { return this.bitfield; } @Override public BitfieldMessage validate(SharedTorrent torrent) throws MessageValidationException { if (this.bitfield.length() <= torrent.getPieceCount()) { return this; } throw new MessageValidationException(this); } public static BitfieldMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { BitSet bitfield = new BitSet(buffer.remaining()*8); for (int i=0; i < buffer.remaining()*8; i++) { if ((buffer.get(i/8) & (1 << (7 -(i % 8)))) > 0) { bitfield.set(i); } } return new BitfieldMessage(buffer, bitfield) .validate(torrent); } public static BitfieldMessage craft(BitSet availablePieces) { byte[] bitfield = new byte[ (int) Math.ceil((double)availablePieces.length()/8)]; for (int i=availablePieces.nextSetBit(0); i >= 0; i=availablePieces.nextSetBit(i+1)) { bitfield[i/8] |= 1 << (7 -(i % 8)); } ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + BitfieldMessage.BASE_SIZE + bitfield.length); buffer.putInt(BitfieldMessage.BASE_SIZE + bitfield.length); buffer.put(PeerMessage.Type.BITFIELD.getTypeByte()); buffer.put(ByteBuffer.wrap(bitfield)); return new BitfieldMessage(buffer, availablePieces); } public String toString() { return super.toString() + " " + this.getBitfield().cardinality(); } } /** * Request message. * * <len=00013><id=6><piece index><block offset><block length> */ public static class RequestMessage extends PeerMessage { private static final int BASE_SIZE = 13; /** Default block size is 2^14 bytes, or 16kB. */ public static final int DEFAULT_REQUEST_SIZE = 16384; /** Max block request size is 2^17 bytes, or 131kB. */ public static final int MAX_REQUEST_SIZE = 131072; private int piece; private int offset; private int length; private RequestMessage(ByteBuffer buffer, int piece, int offset, int length) { super(Type.REQUEST, buffer); this.piece = piece; this.offset = offset; this.length = length; } public int getPiece() { return this.piece; } public int getOffset() { return this.offset; } public int getLength() { return this.length; } @Override public RequestMessage validate(SharedTorrent torrent) throws MessageValidationException { if (this.piece >= 0 && this.piece < torrent.getPieceCount() && this.offset + this.length <= torrent.getPiece(this.piece).size()) { return this; } throw new MessageValidationException(this); } public static RequestMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { int piece = buffer.getInt(); int offset = buffer.getInt(); int length = buffer.getInt(); return new RequestMessage(buffer, piece, offset, length).validate(torrent); } public static RequestMessage craft(int piece, int offset, int length) { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + RequestMessage.BASE_SIZE); buffer.putInt(RequestMessage.BASE_SIZE); buffer.put(PeerMessage.Type.REQUEST.getTypeByte()); buffer.putInt(piece); buffer.putInt(offset); buffer.putInt(length); return new RequestMessage(buffer, piece, offset, length); } public String toString() { return super.toString() + " #" + this.getPiece() + " (" + this.getLength() + "@" + this.getOffset() + ")"; } } /** * Piece message. * * <len=0009+X><id=7><piece index><block offset><block data> */ public static class PieceMessage extends PeerMessage { private static final int BASE_SIZE = 9; private int piece; private int offset; private ByteBuffer block; private PieceMessage(ByteBuffer buffer, int piece, int offset, ByteBuffer block) { super(Type.PIECE, buffer); this.piece = piece; this.offset = offset; this.block = block; } public int getPiece() { return this.piece; } public int getOffset() { return this.offset; } public ByteBuffer getBlock() { return this.block; } @Override public PieceMessage validate(SharedTorrent torrent) throws MessageValidationException { if (this.piece >= 0 && this.piece < torrent.getPieceCount() && this.offset + this.block.limit() <= torrent.getPiece(this.piece).size()) { return this; } throw new MessageValidationException(this); } public static PieceMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { int piece = buffer.getInt(); int offset = buffer.getInt(); ByteBuffer block = buffer.slice(); return new PieceMessage(buffer, piece, offset, block) .validate(torrent); } public static PieceMessage craft(int piece, int offset, ByteBuffer block) { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + PieceMessage.BASE_SIZE + block.capacity()); buffer.putInt(PieceMessage.BASE_SIZE + block.capacity()); buffer.put(PeerMessage.Type.PIECE.getTypeByte()); buffer.putInt(piece); buffer.putInt(offset); buffer.put(block); return new PieceMessage(buffer, piece, offset, block); } public String toString() { return super.toString() + " #" + this.getPiece() + " (" + this.getBlock().capacity() + "@" + this.getOffset() + ")"; } } /** * Cancel message. * * <len=00013><id=8><piece index><block offset><block length> */ public static class CancelMessage extends PeerMessage { private static final int BASE_SIZE = 13; private int piece; private int offset; private int length; private CancelMessage(ByteBuffer buffer, int piece, int offset, int length) { super(Type.CANCEL, buffer); this.piece = piece; this.offset = offset; this.length = length; } public int getPiece() { return this.piece; } public int getOffset() { return this.offset; } public int getLength() { return this.length; } @Override public CancelMessage validate(SharedTorrent torrent) throws MessageValidationException { if (this.piece >= 0 && this.piece < torrent.getPieceCount() && this.offset + this.length <= torrent.getPiece(this.piece).size()) { return this; } throw new MessageValidationException(this); } public static CancelMessage parse(ByteBuffer buffer, SharedTorrent torrent) throws MessageValidationException { int piece = buffer.getInt(); int offset = buffer.getInt(); int length = buffer.getInt(); return new CancelMessage(buffer, piece, offset, length).validate(torrent); } public static CancelMessage craft(int piece, int offset, int length) { ByteBuffer buffer = ByteBuffer.allocateDirect( MESSAGE_LENGTH_FIELD_SIZE + CancelMessage.BASE_SIZE); buffer.putInt(CancelMessage.BASE_SIZE); buffer.put(PeerMessage.Type.CANCEL.getTypeByte()); buffer.putInt(piece); buffer.putInt(offset); buffer.putInt(length); return new CancelMessage(buffer, piece, offset, length); } public String toString() { return super.toString() + " #" + this.getPiece() + " (" + this.getLength() + "@" + this.getOffset() + ")"; } } }
package com.fsck.k9.activity.setup; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.text.method.DigitsKeyListener; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.*; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.CompoundButton.OnCheckedChangeListener; import com.fsck.k9.*; import com.fsck.k9.account.AccountCreator; import com.fsck.k9.activity.K9Activity; import com.fsck.k9.activity.setup.AccountSetupCheckSettings.CheckDirection; import com.fsck.k9.helper.Utility; import com.fsck.k9.mail.AuthType; import com.fsck.k9.mail.ServerSettings.Type; import com.fsck.k9.mail.ConnectionSecurity; import com.fsck.k9.mail.ServerSettings; import com.fsck.k9.mail.Transport; import com.fsck.k9.view.ClientCertificateSpinner; import com.fsck.k9.view.ClientCertificateSpinner.OnClientCertificateChangedListener; import java.net.URI; import java.net.URISyntaxException; public class AccountSetupOutgoing extends K9Activity implements OnClickListener, OnCheckedChangeListener { private static final String EXTRA_ACCOUNT = "account"; private static final String EXTRA_MAKE_DEFAULT = "makeDefault"; private static final String STATE_SECURITY_TYPE_POSITION = "stateSecurityTypePosition"; private static final String STATE_AUTH_TYPE_POSITION = "authTypePosition"; private EditText mUsernameView; private EditText mPasswordView; private ClientCertificateSpinner mClientCertificateSpinner; private TextView mClientCertificateLabelView; private TextView mPasswordLabelView; private EditText mServerView; private EditText mPortView; private String mCurrentPortViewSetting; private CheckBox mRequireLoginView; private ViewGroup mRequireLoginSettingsView; private Spinner mSecurityTypeView; private int mCurrentSecurityTypeViewPosition; private Spinner mAuthTypeView; private int mCurrentAuthTypeViewPosition; private AuthTypeAdapter mAuthTypeAdapter; private Button mNextButton; private Account mAccount; private boolean mMakeDefault; public static void actionOutgoingSettings(Context context, Account account, boolean makeDefault) { Intent i = new Intent(context, AccountSetupOutgoing.class); i.putExtra(EXTRA_ACCOUNT, account.getUuid()); i.putExtra(EXTRA_MAKE_DEFAULT, makeDefault); context.startActivity(i); } public static void actionEditOutgoingSettings(Context context, Account account) { context.startActivity(intentActionEditOutgoingSettings(context, account)); } public static Intent intentActionEditOutgoingSettings(Context context, Account account) { Intent i = new Intent(context, AccountSetupOutgoing.class); i.setAction(Intent.ACTION_EDIT); i.putExtra(EXTRA_ACCOUNT, account.getUuid()); return i; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.account_setup_outgoing); String accountUuid = getIntent().getStringExtra(EXTRA_ACCOUNT); mAccount = Preferences.getPreferences(this).getAccount(accountUuid); try { if (new URI(mAccount.getStoreUri()).getScheme().startsWith("webdav")) { mAccount.setTransportUri(mAccount.getStoreUri()); AccountSetupCheckSettings.actionCheckSettings(this, mAccount, CheckDirection.OUTGOING); } } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } mUsernameView = (EditText)findViewById(R.id.account_username); mPasswordView = (EditText)findViewById(R.id.account_password); mClientCertificateSpinner = (ClientCertificateSpinner)findViewById(R.id.account_client_certificate_spinner); mClientCertificateLabelView = (TextView)findViewById(R.id.account_client_certificate_label); mPasswordLabelView = (TextView)findViewById(R.id.account_password_label); mServerView = (EditText)findViewById(R.id.account_server); mPortView = (EditText)findViewById(R.id.account_port); mRequireLoginView = (CheckBox)findViewById(R.id.account_require_login); mRequireLoginSettingsView = (ViewGroup)findViewById(R.id.account_require_login_settings); mSecurityTypeView = (Spinner)findViewById(R.id.account_security_type); mAuthTypeView = (Spinner)findViewById(R.id.account_auth_type); mNextButton = (Button)findViewById(R.id.next); mNextButton.setOnClickListener(this); mSecurityTypeView.setAdapter(ConnectionSecurityAdapter.get(this)); mAuthTypeAdapter = AuthTypeAdapter.get(this); mAuthTypeView.setAdapter(mAuthTypeAdapter); /* * Only allow digits in the port field. */ mPortView.setKeyListener(DigitsKeyListener.getInstance("0123456789")); //FIXME: get Account object again? accountUuid = getIntent().getStringExtra(EXTRA_ACCOUNT); mAccount = Preferences.getPreferences(this).getAccount(accountUuid); mMakeDefault = getIntent().getBooleanExtra(EXTRA_MAKE_DEFAULT, false); /* * If we're being reloaded we override the original account with the one * we saved */ if (savedInstanceState != null && savedInstanceState.containsKey(EXTRA_ACCOUNT)) { accountUuid = savedInstanceState.getString(EXTRA_ACCOUNT); mAccount = Preferences.getPreferences(this).getAccount(accountUuid); } try { ServerSettings settings = Transport.decodeTransportUri(mAccount.getTransportUri()); updateAuthPlainTextFromSecurityType(settings.connectionSecurity); if (savedInstanceState == null) { // The first item is selected if settings.authenticationType is null or is not in mAuthTypeAdapter mCurrentAuthTypeViewPosition = mAuthTypeAdapter.getAuthPosition(settings.authenticationType); } else { mCurrentAuthTypeViewPosition = savedInstanceState.getInt(STATE_AUTH_TYPE_POSITION); } mAuthTypeView.setSelection(mCurrentAuthTypeViewPosition, false); updateViewFromAuthType(); // Select currently configured security type if (savedInstanceState == null) { mCurrentSecurityTypeViewPosition = settings.connectionSecurity.ordinal(); } else { /* * Restore the spinner state now, before calling * setOnItemSelectedListener(), thus avoiding a call to * onItemSelected(). Then, when the system restores the state * (again) in onRestoreInstanceState(), The system will see that * the new state is the same as the current state (set here), so * once again onItemSelected() will not be called. */ mCurrentSecurityTypeViewPosition = savedInstanceState.getInt(STATE_SECURITY_TYPE_POSITION); } mSecurityTypeView.setSelection(mCurrentSecurityTypeViewPosition, false); if (settings.username != null && !settings.username.isEmpty()) { mUsernameView.setText(settings.username); mRequireLoginView.setChecked(true); mRequireLoginSettingsView.setVisibility(View.VISIBLE); } if (settings.password != null) { mPasswordView.setText(settings.password); } if (settings.clientCertificateAlias != null) { mClientCertificateSpinner.setAlias(settings.clientCertificateAlias); } if (settings.host != null) { mServerView.setText(settings.host); } if (settings.port != -1) { mPortView.setText(String.format("%d", settings.port)); } else { updatePortFromSecurityType(); } mCurrentPortViewSetting = mPortView.getText().toString(); } catch (Exception e) { /* * We should always be able to parse our own settings. */ failure(e); } } /** * Called at the end of either {@code onCreate()} or * {@code onRestoreInstanceState()}, after the views have been initialized, * so that the listeners are not triggered during the view initialization. * This avoids needless calls to {@code validateFields()} which is called * immediately after this is called. */ private void initializeViewListeners() { /* * Updates the port when the user changes the security type. This allows * us to show a reasonable default which the user can change. */ mSecurityTypeView.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { /* * We keep our own record of the spinner state so we * know for sure that onItemSelected() was called * because of user input, not because of spinner * state initialization. This assures that the port * will not be replaced with a default value except * on user input. */ if (mCurrentSecurityTypeViewPosition != position) { updatePortFromSecurityType(); boolean isInsecure = (ConnectionSecurity.NONE == getSelectedSecurity()); boolean isAuthExternal = (AuthType.EXTERNAL == getSelectedAuthType()); boolean loginNotRequired = !mRequireLoginView.isChecked(); /* * If the user selects ConnectionSecurity.NONE, a * warning would normally pop up if the authentication * is AuthType.EXTERNAL (i.e., using client * certificates). But such a warning is irrelevant if * login is not required. So to avoid such a warning * (generated in validateFields()) under those * conditions, we change the (irrelevant) authentication * method to PLAIN. */ if (isInsecure && isAuthExternal && loginNotRequired) { OnItemSelectedListener onItemSelectedListener = mAuthTypeView.getOnItemSelectedListener(); mAuthTypeView.setOnItemSelectedListener(null); mCurrentAuthTypeViewPosition = mAuthTypeAdapter.getAuthPosition(AuthType.PLAIN); mAuthTypeView.setSelection(mCurrentAuthTypeViewPosition, false); mAuthTypeView.setOnItemSelectedListener(onItemSelectedListener); updateViewFromAuthType(); } validateFields(); } } @Override public void onNothingSelected(AdapterView<?> parent) { /* unused */ } }); mAuthTypeView.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { if (mCurrentAuthTypeViewPosition == position) { return; } updateViewFromAuthType(); validateFields(); AuthType selection = getSelectedAuthType(); // Have the user select (or confirm) the client certificate if (AuthType.EXTERNAL == selection) { // This may again invoke validateFields() mClientCertificateSpinner.chooseCertificate(); } else { mPasswordView.requestFocus(); } } @Override public void onNothingSelected(AdapterView<?> parent) { /* unused */ } }); mRequireLoginView.setOnCheckedChangeListener(this); mClientCertificateSpinner.setOnClientCertificateChangedListener(clientCertificateChangedListener); mUsernameView.addTextChangedListener(validationTextWatcher); mPasswordView.addTextChangedListener(validationTextWatcher); mServerView.addTextChangedListener(validationTextWatcher); mPortView.addTextChangedListener(validationTextWatcher); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString(EXTRA_ACCOUNT, mAccount.getUuid()); outState.putInt(STATE_SECURITY_TYPE_POSITION, mCurrentSecurityTypeViewPosition); outState.putInt(STATE_AUTH_TYPE_POSITION, mCurrentAuthTypeViewPosition); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (mRequireLoginView.isChecked()) { mRequireLoginSettingsView.setVisibility(View.VISIBLE); } else { mRequireLoginSettingsView.setVisibility(View.GONE); } } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); /* * We didn't want the listeners active while the state was being restored * because they could overwrite the restored port with a default port when * the security type was restored. */ initializeViewListeners(); validateFields(); } /** * Shows/hides password field and client certificate spinner */ private void updateViewFromAuthType() { AuthType authType = getSelectedAuthType(); boolean isAuthTypeExternal = (AuthType.EXTERNAL == authType); if (isAuthTypeExternal) { // hide password fields, show client certificate fields mPasswordView.setVisibility(View.GONE); mPasswordLabelView.setVisibility(View.GONE); mClientCertificateLabelView.setVisibility(View.VISIBLE); mClientCertificateSpinner.setVisibility(View.VISIBLE); } else { // show password fields, hide client certificate fields mPasswordView.setVisibility(View.VISIBLE); mPasswordLabelView.setVisibility(View.VISIBLE); mClientCertificateLabelView.setVisibility(View.GONE); mClientCertificateSpinner.setVisibility(View.GONE); } } /** * This is invoked only when the user makes changes to a widget, not when * widgets are changed programmatically. (The logic is simpler when you know * that this is the last thing called after an input change.) */ private void validateFields() { AuthType authType = getSelectedAuthType(); boolean isAuthTypeExternal = (AuthType.EXTERNAL == authType); ConnectionSecurity connectionSecurity = getSelectedSecurity(); boolean hasConnectionSecurity = (connectionSecurity != ConnectionSecurity.NONE); if (isAuthTypeExternal && !hasConnectionSecurity) { // Notify user of an invalid combination of AuthType.EXTERNAL & ConnectionSecurity.NONE String toastText = getString(R.string.account_setup_outgoing_invalid_setting_combo_notice, getString(R.string.account_setup_incoming_auth_type_label), AuthType.EXTERNAL.toString(), getString(R.string.account_setup_incoming_security_label), ConnectionSecurity.NONE.toString()); Toast.makeText(this, toastText, Toast.LENGTH_LONG).show(); // Reset the views back to their previous settings without recursing through here again OnItemSelectedListener onItemSelectedListener = mAuthTypeView.getOnItemSelectedListener(); mAuthTypeView.setOnItemSelectedListener(null); mAuthTypeView.setSelection(mCurrentAuthTypeViewPosition, false); mAuthTypeView.setOnItemSelectedListener(onItemSelectedListener); updateViewFromAuthType(); onItemSelectedListener = mSecurityTypeView.getOnItemSelectedListener(); mSecurityTypeView.setOnItemSelectedListener(null); mSecurityTypeView.setSelection(mCurrentSecurityTypeViewPosition, false); mSecurityTypeView.setOnItemSelectedListener(onItemSelectedListener); updateAuthPlainTextFromSecurityType(getSelectedSecurity()); mPortView.removeTextChangedListener(validationTextWatcher); mPortView.setText(mCurrentPortViewSetting); mPortView.addTextChangedListener(validationTextWatcher); authType = getSelectedAuthType(); isAuthTypeExternal = (AuthType.EXTERNAL == authType); connectionSecurity = getSelectedSecurity(); hasConnectionSecurity = (connectionSecurity != ConnectionSecurity.NONE); } else { mCurrentAuthTypeViewPosition = mAuthTypeView.getSelectedItemPosition(); mCurrentSecurityTypeViewPosition = mSecurityTypeView.getSelectedItemPosition(); mCurrentPortViewSetting = mPortView.getText().toString(); } boolean hasValidCertificateAlias = mClientCertificateSpinner.getAlias() != null; boolean hasValidUserName = Utility.requiredFieldValid(mUsernameView); boolean hasValidPasswordSettings = hasValidUserName && !isAuthTypeExternal && Utility.requiredFieldValid(mPasswordView); boolean hasValidExternalAuthSettings = hasValidUserName && isAuthTypeExternal && hasConnectionSecurity && hasValidCertificateAlias; mNextButton .setEnabled(Utility.domainFieldValid(mServerView) && Utility.requiredFieldValid(mPortView) && (!mRequireLoginView.isChecked() || hasValidPasswordSettings || hasValidExternalAuthSettings)); Utility.setCompoundDrawablesAlpha(mNextButton, mNextButton.isEnabled() ? 255 : 128); } private void updatePortFromSecurityType() { ConnectionSecurity securityType = getSelectedSecurity(); updateAuthPlainTextFromSecurityType(securityType); // Remove listener so as not to trigger validateFields() which is called // elsewhere as a result of user interaction. mPortView.removeTextChangedListener(validationTextWatcher); mPortView.setText(String.valueOf(AccountCreator.getDefaultPort(securityType, Type.SMTP))); mPortView.addTextChangedListener(validationTextWatcher); } private void updateAuthPlainTextFromSecurityType(ConnectionSecurity securityType) { mAuthTypeAdapter.useInsecureText(securityType == ConnectionSecurity.NONE); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode == RESULT_OK) { if (Intent.ACTION_EDIT.equals(getIntent().getAction())) { mAccount.save(Preferences.getPreferences(this)); finish(); } else { AccountSetupOptions.actionOptions(this, mAccount, mMakeDefault); finish(); } } } protected void onNext() { ConnectionSecurity securityType = getSelectedSecurity(); String uri; String username = null; String password = null; String clientCertificateAlias = null; AuthType authType = null; if (mRequireLoginView.isChecked()) { username = mUsernameView.getText().toString(); authType = getSelectedAuthType(); if (AuthType.EXTERNAL == authType) { clientCertificateAlias = mClientCertificateSpinner.getAlias(); } else { password = mPasswordView.getText().toString(); } } String newHost = mServerView.getText().toString(); int newPort = Integer.parseInt(mPortView.getText().toString()); ServerSettings server = new ServerSettings(Type.SMTP, newHost, newPort, securityType, authType, username, password, clientCertificateAlias); uri = Transport.createTransportUri(server); mAccount.deleteCertificate(newHost, newPort, CheckDirection.OUTGOING); mAccount.setTransportUri(uri); AccountSetupCheckSettings.actionCheckSettings(this, mAccount, CheckDirection.OUTGOING); } public void onClick(View v) { switch (v.getId()) { case R.id.next: onNext(); break; } } public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { mRequireLoginSettingsView.setVisibility(isChecked ? View.VISIBLE : View.GONE); validateFields(); } private void failure(Exception use) { Log.e(K9.LOG_TAG, "Failure", use); String toastText = getString(R.string.account_setup_bad_uri, use.getMessage()); Toast toast = Toast.makeText(getApplication(), toastText, Toast.LENGTH_LONG); toast.show(); } /* * Calls validateFields() which enables or disables the Next button * based on the fields' validity. */ TextWatcher validationTextWatcher = new TextWatcher() { public void afterTextChanged(Editable s) { validateFields(); } public void beforeTextChanged(CharSequence s, int start, int count, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { } }; OnClientCertificateChangedListener clientCertificateChangedListener = new OnClientCertificateChangedListener() { @Override public void onClientCertificateChanged(String alias) { validateFields(); } }; private AuthType getSelectedAuthType() { AuthTypeHolder holder = (AuthTypeHolder) mAuthTypeView.getSelectedItem(); return holder.authType; } private ConnectionSecurity getSelectedSecurity() { ConnectionSecurityHolder holder = (ConnectionSecurityHolder) mSecurityTypeView.getSelectedItem(); return holder.connectionSecurity; } }
package anytimeExactBeliefPropagation; import static anytimeExactBeliefPropagation.ModelGenerator.IsingModel; import static com.sri.ai.expresso.helper.Expressions.parse; import static com.sri.ai.util.Util.println; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import anytimeExactBeliefPropagation.Model.BFS; import anytimeExactBeliefPropagation.Model.Model; import anytimeExactBeliefPropagation.Model.Node.FactorNode; import com.sri.ai.expresso.api.Expression; import com.sri.ai.grinder.api.Context; import com.sri.ai.grinder.api.Theory; import com.sri.ai.grinder.core.TrueContext; import com.sri.ai.grinder.library.bounds.Bound; import com.sri.ai.grinder.library.bounds.Bounds; import com.sri.ai.grinder.theory.compound.CompoundTheory; import com.sri.ai.grinder.theory.differencearithmetic.DifferenceArithmeticTheory; import com.sri.ai.grinder.theory.equality.EqualityTheory; import com.sri.ai.grinder.theory.linearrealarithmetic.LinearRealArithmeticTheory; import com.sri.ai.grinder.theory.propositional.PropositionalTheory; import com.sri.ai.grinder.theory.tuple.TupleTheory; import com.sri.ai.util.base.Pair; import com.sri.ai.util.base.Triple; public class Tests { public static void main(String[] args) { // Theory initialization Theory theory = new CompoundTheory( new EqualityTheory(false, true), new DifferenceArithmeticTheory(false, false), new LinearRealArithmeticTheory(false, false), new TupleTheory(), new PropositionalTheory()); Context context = new TrueContext(theory); context = context.extendWithSymbolsAndTypes("A","Boolean"); Model m; String modelName; Triple<Set<Expression>, Context, Expression> a = IsingModel(4,4, context, parse("Boolean")); println(a); m = new Model(a,theory, true); Expression b = ModelGenerator.lveCalculation(m); println(b); // testFunction(modelName, m,true); // modelName = "Line Model"; // m = new Model(lineModel(10, context, parse("Boolean")),theory, true); // // testFunction(modelName, m,true); // // modelName = "Binary Tree Model"; // m = new Model(nTreeModel(4, 2, context, parse("Boolean")),theory, true); // // testFunction(modelName, m,true); // // modelName = "Random Model"; // m = new Model(ModelGenerator.randomModel(8, 10, context, parse("Boolean")),theory, true); // // testFunction(modelName, m,true); modelName = "Ising Model"; List<List<TupleOfData>> listOdModelsToPrintInFile = new ArrayList<>(); // m = new Model(IsingModel(20, 4, context, parse("Boolean")),theory, true); // List<InferenceResult> IsingModel2X2 = testing("IsingModel",m,2,2); // listOdModelsToPrintInFile.add(IsingModel2X2); // println("ok"); // // m = new Model(IsingModel(3, 3, context, parse("Boolean")),theory, true); // List<InferenceResult> IsingModel3X3 = testing("IsingModel",m,3,3); // listOdModelsToPrintInFile.add(IsingModel3X3); // println("ok"); // // m = new Model(IsingModel(3, 4, context, parse("Boolean")),theory, true); // List<InferenceResult> IsingModel3X4 = testing("IsingModel",m,3,4); // listOdModelsToPrintInFile.add(IsingModel3X4); // println("ok"); // // m = new Model(IsingModel(4, 4, context, parse("Boolean")),theory, true); // List<InferenceResult> IsingModel4X4 = testing("IsingModel",m,4,4); // listOdModelsToPrintInFile.add(IsingModel4X4); // println("ok"); // //// m = new Model(IsingModel(4, 5, context, parse("Boolean")),theory, true); //// List<InferenceResult> IsingModel4X5 = testing("IsingModel",m,4,5); //// listOdModelsToPrintInFile.add(IsingModel4X5); //// println("ok"); // // modelName = "Line Model"; // m = new Model(lineModel(20, context, parse("Boolean")),theory, true); // List<InferenceResult> line10 = testing(modelName,m,4,5); // listOdModelsToPrintInFile.add(line10); // println("ok"); modelName = "Binary Tree Model"; m = new Model(IsingModel(4, 4, context, parse("Boolean")),theory, true); List<TupleOfData> btree = testing(modelName,m,4,5); listOdModelsToPrintInFile.add(btree); println("ok"); testingAndWritingToFile(modelName + ".csv",listOdModelsToPrintInFile); } private static void testFunction(String modelName, Model m, boolean printAll) { Iterator<FactorNode> bfsExpander = new BFS(m); IncrementalBeliefPropagationWithConditioning sbp = new IncrementalBeliefPropagationWithConditioning(m); println("Exploring " + modelName); Bound inferenceResult = null; double totalTime = 0; while (bfsExpander.hasNext()) { long tStart = System.currentTimeMillis(); inferenceResult = sbp.expandAndComputeInference(bfsExpander); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; double time = tDelta / 1000.0; totalTime += time; // ModelGenerator.printModel(m, false); if (printAll) { println("Number of ExtremePoints : "+inferenceResult.getArguments().size()); Pair<Double, Double> minAndMaxProbabilityofQueryequalsTrue = ModelGenerator.maxMinProbability(inferenceResult, m); println("Minimal probability of Query = true : " + minAndMaxProbabilityofQueryequalsTrue.first + "\nMaximal probability of Query = true :" + minAndMaxProbabilityofQueryequalsTrue.second + "\nLength of interval (that is, (max - min)) : " + (minAndMaxProbabilityofQueryequalsTrue.second - minAndMaxProbabilityofQueryequalsTrue.first) + "\nTime to compute this iteration:" + time + ". Toatal time : " + totalTime); println("----------------- AllExplored : " + m.AllExplored() + "-----------------"); } } if (!printAll) println(inferenceResult); println("Computation with SGDPLL"); long tStart = System.currentTimeMillis(); Expression LVE = ModelGenerator.lveCalculation(m); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; double time = tDelta / 1000.0; println(LVE + "\n"+ "\nTime to compute:" + time); } public static List<TupleOfData> testing3(String modelName, Model m, Integer... parameter) { List<TupleOfData> result = new ArrayList<TupleOfData>(); int id = 0; m.clearExploredGraph(); Iterator<FactorNode> bfsExpander = new BFS(m); IncrementalBeliefPropagationWithConditioning sbp = new IncrementalBeliefPropagationWithConditioning(m); while (bfsExpander.hasNext()) { TupleOfData t = new TupleOfData(); long tStart = System.currentTimeMillis(); Bound inferenceResult = sbp.expandAndComputeInference(bfsExpander); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; t.time= tDelta / 1000.0; t.typeOfComputationUsed = "S-BP"; t.graphicalModelName = modelName; t.id = id++; t.numberOfExtremePoints = inferenceResult.getArguments().size(); Pair<Double, Double> minAndMaxProbabilityofQueryequalsTrue = ModelGenerator.maxMinProbability(inferenceResult, m); t.minAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.first; t.maxAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.second; t.IntervalLength = t.maxAndMaxProbabilityofQueryequalsTrue - t.minAndMaxProbabilityofQueryequalsTrue; t.allExplored = m.AllExplored(); for (int i = 0; i < parameter.length && i < t.parameter.length; i++) { t.parameter[i] = parameter[i]; } result.add(t); println("...."); } TupleOfData t = new TupleOfData(); long tStart = System.currentTimeMillis(); Expression inferenceLVE = ModelGenerator.lveCalculation(m); Bound EncapsulatingInference = Bounds.makeSingleElementBound(inferenceLVE, true); Pair<Double, Double> minAndMaxProbabilityofQueryequalsTrue = ModelGenerator.maxMinProbability(EncapsulatingInference, m); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; t.time= tDelta / 1000.0; t.minAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.first; t.maxAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.second; t.typeOfComputationUsed = "SGDPLL"; t.graphicalModelName = modelName; t.id = id++; t.numberOfExtremePoints = 0; t.IntervalLength = 0; t.allExplored = true; for (int i = 0; i < parameter.length && i < t.parameter.length; i++) { t.parameter[i] = parameter[i]; } result.add(t); println("------------------------------------------------------------"); return result; } /** * This tests a model and, instead of printing information, stores its in a list of data structures * each element of the list corresponds to a iteration of the algorithm * @param modelName * @param m * @param parameter * @return */ public static List<TupleOfData> testing(String modelName, Model m, Integer... parameter) { List<TupleOfData> result = new ArrayList<TupleOfData>(); int id = 0; m.clearExploredGraph(); Iterator<FactorNode> bfsExpander = new BFS(m); IncrementalBeliefPropagationWithConditioning sbp = new IncrementalBeliefPropagationWithConditioning(m); double tTotalTime = 0; while (bfsExpander.hasNext()) { TupleOfData t = new TupleOfData(); long tStart = System.currentTimeMillis(); Bound inferenceResult = sbp.expandAndComputeInference(bfsExpander); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; t.time = tDelta /1000.0; tTotalTime += tDelta / 1000.0; t.totalTime += tTotalTime; t.typeOfComputationUsed = "S-BP"; t.graphicalModelName = modelName; t.id = id++; t.numberOfExtremePoints = inferenceResult.getArguments().size(); Pair<Double, Double> minAndMaxProbabilityofQueryequalsTrue = ModelGenerator.maxMinProbability(inferenceResult, m); t.minAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.first; t.maxAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.second; t.IntervalLength = t.maxAndMaxProbabilityofQueryequalsTrue - t.minAndMaxProbabilityofQueryequalsTrue; t.allExplored = m.AllExplored(); for (int i = 0; i < parameter.length && i < t.parameter.length; i++) { t.parameter[i] = parameter[i]; } result.add(t); println("...."); } TupleOfData t = new TupleOfData(); long tStart = System.currentTimeMillis(); Expression inferenceLVE = ModelGenerator.lveCalculation(m); Bound encapsulatingInference = Bounds.makeSingleElementBound(inferenceLVE, true); Pair<Double, Double> minAndMaxProbabilityofQueryequalsTrue = ModelGenerator.maxMinProbability(encapsulatingInference, m); long tEnd = System.currentTimeMillis(); long tDelta = tEnd - tStart; t.time= tDelta / 1000.0; t.totalTime = t.time; t.minAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.first; t.maxAndMaxProbabilityofQueryequalsTrue = minAndMaxProbabilityofQueryequalsTrue.second; t.typeOfComputationUsed = "SGDPLL"; t.graphicalModelName = modelName; t.id = id++; t.numberOfExtremePoints = 0; t.IntervalLength = 0; t.allExplored = true; for (int i = 0; i < parameter.length && i < t.parameter.length; i++) { t.parameter[i] = parameter[i]; } result.add(t); println("------------------------- Done -----------------------------------"); return result; } /** * This prints in a file the content of trying many different models. * the idea is to test many different models (with teh function {@code testing}) and have them printed in the same .csv file * @param filename * @param testedModels */ public static void testingAndWritingToFile(String filename, List<List<TupleOfData>> testedModels) { try{ PrintWriter writer = new PrintWriter(filename, "UTF-8"); // print head of dataset writer.println("Id," + "typeOfComputationUsed," + "graphicalModelName," + "minAndMaxProbabilityofQueryequalsTrue," + "maxAndMaxProbabilityofQueryequalsTrue," + "IntervalLength," + "numberOfExtremePoints," + "allExplored," + "time," + "totaltime," + "Parameter 1," + "Parameter 2," + "Parameter 3," + "Parameter 4," + "Parameter 5"); // printLines for (List<TupleOfData> l : testedModels) { for (TupleOfData t : l) { writer.print(t.id + "," + t.typeOfComputationUsed +","+ t.graphicalModelName + "," + t.minAndMaxProbabilityofQueryequalsTrue + "," + t.maxAndMaxProbabilityofQueryequalsTrue + "," + t.IntervalLength + "," + t.numberOfExtremePoints + "," + t.allExplored + "," + t.time+ "," + t.totalTime); for (int i = 0; i < t.parameter.length; i++) { writer.print("," + t.parameter[i] ); } writer.println(); } } writer.close(); } catch (IOException e) { // do something } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lens.cube.metadata; import java.util.*; import org.apache.lens.cube.error.LensCubeErrorCode; import org.apache.lens.server.api.error.LensException; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import com.google.common.collect.Lists; public class DerivedCube extends AbstractCubeTable implements CubeInterface { private static final List<FieldSchema> COLUMNS = new ArrayList<FieldSchema>(); static { COLUMNS.add(new FieldSchema("dummy", "string", "dummy column")); } private final Cube parent; private final Set<String> measures = new HashSet<String>(); private final Set<String> dimensions = new HashSet<String>(); public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Cube parent) throws LensException { this(name, measures, dimensions, new HashMap<String, String>(), 0L, parent); } public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Map<String, String> properties, double weight, Cube parent) throws LensException { super(name, COLUMNS, properties, weight); for (String msr : measures) { this.measures.add(msr.toLowerCase()); } for (String dim : dimensions) { this.dimensions.add(dim.toLowerCase()); } this.parent = parent; validate(); addProperties(); } public void validate() throws LensException { List<String> measuresNotInParentCube = Lists.newArrayList(); List<String> dimAttributesNotInParentCube = Lists.newArrayList(); for (String msr : measures) { if (parent.getMeasureByName(msr) == null) { measuresNotInParentCube.add(msr); } } for (String dim : dimensions) { if (parent.getDimAttributeByName(dim) == null) { dimAttributesNotInParentCube.add(dim); } } StringBuilder validationErrorStringBuilder = new StringBuilder(); String sep = ""; boolean invalid = false; if (!measuresNotInParentCube.isEmpty()) { validationErrorStringBuilder.append(sep).append("Measures ").append(measuresNotInParentCube); sep = " and "; invalid = true; } if (!dimAttributesNotInParentCube.isEmpty()) { validationErrorStringBuilder.append(sep).append("Dim Attributes ").append(dimAttributesNotInParentCube); invalid = true; } if (invalid) { throw new LensException(LensCubeErrorCode.ERROR_IN_ENTITY_DEFINITION.getLensErrorInfo(), "Derived cube invalid: " + validationErrorStringBuilder.append(" were not present in " + "parent cube ") .append(parent)); } } public DerivedCube(Table tbl, Cube parent) { super(tbl); this.measures.addAll(getMeasures(getName(), getProperties())); this.dimensions.addAll(getDimensions(getName(), getProperties())); this.parent = parent; } private Set<CubeMeasure> cachedMeasures = new HashSet<CubeMeasure>(); private Set<CubeDimAttribute> cachedDims = new HashSet<CubeDimAttribute>(); public Set<CubeMeasure> getMeasures() { synchronized (measures) { if (cachedMeasures.isEmpty()) { for (String msr : measures) { cachedMeasures.add(parent.getMeasureByName(msr)); } } } return cachedMeasures; } public Set<CubeDimAttribute> getDimAttributes() { synchronized (dimensions) { if (cachedDims.isEmpty()) { for (String dim : dimensions) { cachedDims.add(parent.getDimAttributeByName(dim)); } } } return cachedDims; } @Override public CubeTableType getTableType() { return CubeTableType.CUBE; } @Override public Set<String> getStorages() { return null; } @Override public void addProperties() { super.addProperties(); getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase()); getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()), StringUtils.join(dimensions, ",").toLowerCase()); getProperties().put(MetastoreUtil.getParentCubeNameKey(getName()), parent.getName().toLowerCase()); getProperties().put(MetastoreUtil.getParentCubeNameKey(getName()), parent.getName().toLowerCase()); } public static Set<String> getMeasures(String name, Map<String, String> props) { Set<String> measures = new HashSet<String>(); String measureStr = props.get(MetastoreUtil.getCubeMeasureListKey(name)); measures.addAll(Arrays.asList(StringUtils.split(measureStr, ','))); return measures; } public Set<String> getTimedDimensions() { String str = getProperties().get(MetastoreUtil.getCubeTimedDimensionListKey(getName())); if (str != null) { Set<String> timedDimensions = new HashSet<String>(); timedDimensions.addAll(Arrays.asList(StringUtils.split(str, ','))); return timedDimensions; } else { return parent.getTimedDimensions(); } } public static Set<String> getDimensions(String name, Map<String, String> props) { Set<String> dimensions = new HashSet<String>(); String dimStr = props.get(MetastoreUtil.getCubeDimensionListKey(name)); dimensions.addAll(Arrays.asList(StringUtils.split(dimStr, ','))); return dimensions; } public Cube getParent() { return parent; } @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals(Object obj) { if (!super.equals(obj)) { return false; } DerivedCube other = (DerivedCube) obj; if (!this.getParent().equals(other.getParent())) { return false; } if (this.getMeasureNames() == null) { if (other.getMeasureNames() != null) { return false; } } else if (!this.getMeasureNames().equals(other.getMeasureNames())) { return false; } if (this.getDimAttributeNames() == null) { if (other.getDimAttributeNames() != null) { return false; } } else if (!this.getDimAttributeNames().equals(other.getDimAttributeNames())) { return false; } return true; } public CubeDimAttribute getDimAttributeByName(String dimension) { if (dimensions.contains(dimension.toLowerCase())) { return parent.getDimAttributeByName(dimension); } return null; } public CubeMeasure getMeasureByName(String measure) { if (measures.contains(measure.toLowerCase())) { return parent.getMeasureByName(measure); } return null; } public CubeColumn getColumnByName(String column) { CubeColumn cubeCol = (CubeColumn) getMeasureByName(column); if (cubeCol == null) { cubeCol = (CubeColumn) getDimAttributeByName(column); } return cubeCol; } /** * Add a new measure * * @param measure * @throws HiveException */ public void addMeasure(String measure) throws HiveException { measures.add(measure.toLowerCase()); getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase()); } /** * Add a new dimension * * @param dimension * @throws HiveException */ public void addDimension(String dimension) throws HiveException { dimensions.add(dimension.toLowerCase()); getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()), StringUtils.join(dimensions, ",").toLowerCase()); } /** * Remove the dimension with name specified * * @param dimName */ public void removeDimension(String dimName) { dimensions.remove(dimName.toLowerCase()); getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()), StringUtils.join(dimensions, ",").toLowerCase()); } /** * Remove the measure with name specified * * @param msrName */ public void removeMeasure(String msrName) { measures.remove(msrName.toLowerCase()); getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase()); } @Override public boolean isDerivedCube() { return true; } @Override public Set<String> getMeasureNames() { return measures; } @Override public Set<String> getDimAttributeNames() { Set<String> dimNames = new HashSet<String>(); for (CubeDimAttribute f : getDimAttributes()) { MetastoreUtil.addColumnNames(f, dimNames); } return dimNames; } @Override public boolean allFieldsQueriable() { return true; } @Override public Set<ExprColumn> getExpressions() { return null; } @Override public ExprColumn getExpressionByName(String exprName) { return null; } @Override public Set<String> getAllFieldNames() { Set<String> fieldNames = new HashSet<String>(); fieldNames.addAll(getMeasureNames()); fieldNames.addAll(getDimAttributeNames()); fieldNames.addAll(getTimedDimensions()); return fieldNames; } @Override public Set<String> getExpressionNames() { // TODO Auto-generated method stub return null; } @Override public Set<JoinChain> getJoinChains() { // TODO Auto-generated method stub return null; } @Override public JoinChain getChainByName(String chainName) { // TODO Auto-generated method stub return null; } @Override public Set<String> getJoinChainNames() { // TODO Auto-generated method stub return null; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/tensorboard_data.proto package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * One blob (e.g, image, graph) viewable on a blob metric plot. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.TensorboardBlob} */ public final class TensorboardBlob extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.TensorboardBlob) TensorboardBlobOrBuilder { private static final long serialVersionUID = 0L; // Use TensorboardBlob.newBuilder() to construct. private TensorboardBlob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TensorboardBlob() { id_ = ""; data_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TensorboardBlob(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TensorboardBlob( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); id_ = s; break; } case 18: { data_ = input.readBytes(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TensorboardDataProto .internal_static_google_cloud_aiplatform_v1beta1_TensorboardBlob_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TensorboardDataProto .internal_static_google_cloud_aiplatform_v1beta1_TensorboardBlob_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.TensorboardBlob.class, com.google.cloud.aiplatform.v1beta1.TensorboardBlob.Builder.class); } public static final int ID_FIELD_NUMBER = 1; private volatile java.lang.Object id_; /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The id. */ @java.lang.Override public java.lang.String getId() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } } /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for id. */ @java.lang.Override public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATA_FIELD_NUMBER = 2; private com.google.protobuf.ByteString data_; /** * * * <pre> * Optional. The bytes of the blob is not present unless it's returned by the * ReadTensorboardBlobData endpoint. * </pre> * * <code>bytes data = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The data. */ @java.lang.Override public com.google.protobuf.ByteString getData() { return data_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } if (!data_.isEmpty()) { output.writeBytes(2, data_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } if (!data_.isEmpty()) { size += com.google.protobuf.CodedOutputStream.computeBytesSize(2, data_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.TensorboardBlob)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.TensorboardBlob other = (com.google.cloud.aiplatform.v1beta1.TensorboardBlob) obj; if (!getId().equals(other.getId())) return false; if (!getData().equals(other.getData())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); hash = (37 * hash) + DATA_FIELD_NUMBER; hash = (53 * hash) + getData().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.TensorboardBlob prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * One blob (e.g, image, graph) viewable on a blob metric plot. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.TensorboardBlob} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.TensorboardBlob) com.google.cloud.aiplatform.v1beta1.TensorboardBlobOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TensorboardDataProto .internal_static_google_cloud_aiplatform_v1beta1_TensorboardBlob_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TensorboardDataProto .internal_static_google_cloud_aiplatform_v1beta1_TensorboardBlob_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.TensorboardBlob.class, com.google.cloud.aiplatform.v1beta1.TensorboardBlob.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.TensorboardBlob.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); id_ = ""; data_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.TensorboardDataProto .internal_static_google_cloud_aiplatform_v1beta1_TensorboardBlob_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TensorboardBlob getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.TensorboardBlob.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TensorboardBlob build() { com.google.cloud.aiplatform.v1beta1.TensorboardBlob result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TensorboardBlob buildPartial() { com.google.cloud.aiplatform.v1beta1.TensorboardBlob result = new com.google.cloud.aiplatform.v1beta1.TensorboardBlob(this); result.id_ = id_; result.data_ = data_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.TensorboardBlob) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.TensorboardBlob) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.TensorboardBlob other) { if (other == com.google.cloud.aiplatform.v1beta1.TensorboardBlob.getDefaultInstance()) return this; if (!other.getId().isEmpty()) { id_ = other.id_; onChanged(); } if (other.getData() != com.google.protobuf.ByteString.EMPTY) { setData(other.getData()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1beta1.TensorboardBlob parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1beta1.TensorboardBlob) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object id_ = ""; /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The id. */ public java.lang.String getId() { java.lang.Object ref = id_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for id. */ public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The id to set. * @return This builder for chaining. */ public Builder setId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } id_ = value; onChanged(); return this; } /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearId() { id_ = getDefaultInstance().getId(); onChanged(); return this; } /** * * * <pre> * Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob * stored in the Cloud Storage bucket of the consumer project. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for id to set. * @return This builder for chaining. */ public Builder setIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); id_ = value; onChanged(); return this; } private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY; /** * * * <pre> * Optional. The bytes of the blob is not present unless it's returned by the * ReadTensorboardBlobData endpoint. * </pre> * * <code>bytes data = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The data. */ @java.lang.Override public com.google.protobuf.ByteString getData() { return data_; } /** * * * <pre> * Optional. The bytes of the blob is not present unless it's returned by the * ReadTensorboardBlobData endpoint. * </pre> * * <code>bytes data = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The data to set. * @return This builder for chaining. */ public Builder setData(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } data_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The bytes of the blob is not present unless it's returned by the * ReadTensorboardBlobData endpoint. * </pre> * * <code>bytes data = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearData() { data_ = getDefaultInstance().getData(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.TensorboardBlob) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.TensorboardBlob) private static final com.google.cloud.aiplatform.v1beta1.TensorboardBlob DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.TensorboardBlob(); } public static com.google.cloud.aiplatform.v1beta1.TensorboardBlob getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TensorboardBlob> PARSER = new com.google.protobuf.AbstractParser<TensorboardBlob>() { @java.lang.Override public TensorboardBlob parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TensorboardBlob(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TensorboardBlob> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TensorboardBlob> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TensorboardBlob getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */ /* This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package processing.app; import processing.app.debug.MessageConsumer; import processing.core.*; import static processing.app.I18n._; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.event.*; import javax.swing.text.*; import java.io.*; import java.net.*; import java.util.*; public class SerialMonitor extends JFrame implements MessageConsumer { private Serial serial; private String port; private JTextArea textArea; private JScrollPane scrollPane; private JTextField textField; private JButton sendButton; private JCheckBox autoscrollBox; private JComboBox lineEndings; private JComboBox serialRates; private int serialRate; public SerialMonitor(String port) { super(port); this.port = port; addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { closeSerialPort(); } }); // obvious, no? KeyStroke wc = Editor.WINDOW_CLOSE_KEYSTROKE; getRootPane().getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).put(wc, "close"); getRootPane().getActionMap().put("close", new AbstractAction() { public void actionPerformed(ActionEvent e) { closeSerialPort(); setVisible(false); }}); getContentPane().setLayout(new BorderLayout()); Font consoleFont = Theme.getFont("console.font"); Font editorFont = Preferences.getFont("editor.font"); Font font = new Font(consoleFont.getName(), consoleFont.getStyle(), editorFont.getSize()); textArea = new JTextArea(16, 40); textArea.setEditable(false); textArea.setFont(font); // don't automatically update the caret. that way we can manually decide // whether or not to do so based on the autoscroll checkbox. ((DefaultCaret)textArea.getCaret()).setUpdatePolicy(DefaultCaret.NEVER_UPDATE); scrollPane = new JScrollPane(textArea); getContentPane().add(scrollPane, BorderLayout.CENTER); JPanel pane = new JPanel(); pane.setLayout(new BoxLayout(pane, BoxLayout.X_AXIS)); pane.setBorder(new EmptyBorder(4, 4, 4, 4)); textField = new JTextField(40); textField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { send(textField.getText()); textField.setText(""); }}); sendButton = new JButton(_("Send")); sendButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { send(textField.getText()); textField.setText(""); }}); pane.add(textField); pane.add(Box.createRigidArea(new Dimension(4, 0))); pane.add(sendButton); getContentPane().add(pane, BorderLayout.NORTH); pane = new JPanel(); pane.setLayout(new BoxLayout(pane, BoxLayout.X_AXIS)); pane.setBorder(new EmptyBorder(4, 4, 4, 4)); autoscrollBox = new JCheckBox(_("Autoscroll"), true); lineEndings = new JComboBox(new String[] { _("No line ending"), _("Newline"), _("Carriage return"), _("Both NL & CR") }); lineEndings.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { Preferences.setInteger("serial.line_ending", lineEndings.getSelectedIndex()); } }); if (Preferences.get("serial.line_ending") != null) { lineEndings.setSelectedIndex(Preferences.getInteger("serial.line_ending")); } lineEndings.setMaximumSize(lineEndings.getMinimumSize()); String[] serialRateStrings = { "300","1200","2400","4800","9600","14400", "19200","28800","38400","57600","115200" }; serialRates = new JComboBox(); for (int i = 0; i < serialRateStrings.length; i++) serialRates.addItem(serialRateStrings[i] + " " + _("baud")); serialRate = Preferences.getInteger("serial.debug_rate"); serialRates.setSelectedItem(serialRate + " " + _("baud")); serialRates.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { String wholeString = (String) serialRates.getSelectedItem(); String rateString = wholeString.substring(0, wholeString.indexOf(' ')); serialRate = Integer.parseInt(rateString); Preferences.set("serial.debug_rate", rateString); closeSerialPort(); try { openSerialPort(); } catch (SerialException e) { System.err.println(e); } }}); serialRates.setMaximumSize(serialRates.getMinimumSize()); pane.add(autoscrollBox); pane.add(Box.createHorizontalGlue()); pane.add(lineEndings); pane.add(Box.createRigidArea(new Dimension(8, 0))); pane.add(serialRates); getContentPane().add(pane, BorderLayout.SOUTH); pack(); Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); if (Preferences.get("last.screen.height") != null) { // if screen size has changed, the window coordinates no longer // make sense, so don't use them unless they're identical int screenW = Preferences.getInteger("last.screen.width"); int screenH = Preferences.getInteger("last.screen.height"); if ((screen.width == screenW) && (screen.height == screenH)) { String locationStr = Preferences.get("last.serial.location"); if (locationStr != null) { int[] location = PApplet.parseInt(PApplet.split(locationStr, ',')); setPlacement(location); } } } } protected void setPlacement(int[] location) { setBounds(location[0], location[1], location[2], location[3]); } protected int[] getPlacement() { int[] location = new int[4]; // Get the dimensions of the Frame Rectangle bounds = getBounds(); location[0] = bounds.x; location[1] = bounds.y; location[2] = bounds.width; location[3] = bounds.height; return location; } private void send(String s) { if (serial != null) { switch (lineEndings.getSelectedIndex()) { case 1: s += "\n"; break; case 2: s += "\r"; break; case 3: s += "\r\n"; break; } serial.write(s); } } public void openSerialPort() throws SerialException { if (serial != null) return; if (Base.isTeensyduino() == false) { serial = new Serial(port, serialRate); } else { // only do this weird stuff if we're sure it's teensy! if (Base.getBoardMenuPreferenceBoolean("serial.restart_cmd")) { RestartCommand r = new RestartCommand(port); serial = r.getSerial(); } String fake = Base.getBoardMenuPreference("fake_serial"); if (fake == null) { if (Base.getBoardMenuPreferenceBoolean("serial.safe_baud_rates_only")) { if (serialRate == 14400) serialRate = 19200; if (serialRate == 28800) serialRate = 38400; } if (serial == null) { serial = new Serial(port, serialRate); } else { serial.setBaud(serialRate); } } else { serial = new FakeSerial(fake); } } serial.addListener(this); } public void closeSerialPort() { if (serial != null) { int[] location = getPlacement(); String locationStr = PApplet.join(PApplet.str(location), ","); Preferences.set("last.serial.location", locationStr); textArea.setText(""); serial.dispose(); serial = null; } } public void message(final String s) { SwingUtilities.invokeLater(new Runnable() { public void run() { textArea.append(s); if (autoscrollBox.isSelected()) { textArea.setCaretPosition(textArea.getDocument().getLength()); } }}); } } class FakeSerial extends Serial { Socket sock=null; inputListener listener=null; int[] addrlist = {28541,4984,18924,16924,27183,31091}; static Process gateway=null; static boolean gateway_shutdown_scheduled=false; public FakeSerial(String name) throws SerialException { super("fake serial"); int attempt=1; do { if (gateway_connect(name)) return; if (attempt <= 2 && !gateway_start(name)) { System.err.println("Error starting " + name); } delay_20ms(); } while (++attempt < 4); throw new SerialException("no connection"); } private boolean gateway_connect(String name) { int namelen = name.length(); byte[] buf = new byte[namelen]; byte[] namebuf = name.getBytes(); InetAddress local; try { byte[] loop = new byte[] {127, 0, 0, 1}; local = InetAddress.getByAddress("localhost", loop); } catch (Exception e) { sock = null; return false; } for (int i=0; i<addrlist.length; i++) { try { sock = new Socket(); InetSocketAddress addr = new InetSocketAddress(local, addrlist[i]); sock.connect(addr, 50); // if none, should timeout instantly // but windows will wait up to 1 sec! input = sock.getInputStream(); output = sock.getOutputStream(); } catch (Exception e) { sock = null; return false; } // check for welcome message try { int wait = 0; while (input.available() < namelen) { if (++wait > 6) throw new Exception(); delay_20ms(); } input.read(buf, 0, namelen); String id = new String(buf, 0, namelen); for (int n=0; n<namelen; n++) { if (buf[n] != namebuf[n]) throw new Exception(); } } catch (Exception e) { // mistakenly connected to some other program! close_sock(); continue; } return true; } sock = null; return false; } private void close_sock() { try { sock.close(); } catch (Exception e) { } sock = null; } private void delay_20ms() { try { Thread.sleep(20); } catch (Exception e) { } } public void dispose() { if (listener != null) { listener.interrupt(); listener.consumer = null; listener = null; } if (sock != null) { try { sock.close(); } catch (Exception e) { } sock = null; } dispose_gateway(); } public static void dispose_gateway() { if (gateway != null) { gateway.destroy(); gateway = null; } } private boolean gateway_start(String cmd) { String path = Base.getHardwarePath() + "/tools/"; try { gateway = Runtime.getRuntime().exec(path + cmd); if (!gateway_shutdown_scheduled) { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { FakeSerial.dispose_gateway(); } }); gateway_shutdown_scheduled = true; } } catch (Exception e) { gateway = null; return false; } return true; } public void addListener(MessageConsumer c) { if (sock == null) return; if (listener != null) listener.interrupt(); listener = new inputListener(); listener.input = input; listener.consumer = c; listener.start(); } public void write(byte bytes[]) { if (output == null) return; if (bytes.length > 0) { try { output.write(bytes, 0, bytes.length); } catch (IOException e) { } } } public void setDTR(boolean state) { } static public ArrayList<String> list() { return new ArrayList<String>(); } } class RestartCommand { private Process restarter=null; private Serial s=null; public RestartCommand(String port) { if (Base.getBoardMenuPreference("fake_serial") == null) { try { s = new Serial(port, 150); } catch (SerialException e) { s = null; } } else { String path = Base.getHardwarePath() + "/tools/"; try { restarter = Runtime.getRuntime().exec(path + "teensy_restart"); } catch (Exception e) { } } } public Serial getSerial() { return s; } } class inputListener extends Thread { MessageConsumer consumer; InputStream input; public void run() { byte[] buffer = new byte[1024]; int num, errcount=0; try { while (true) { num = input.read(buffer); if (num <= 0) break; consumer.message(new String(buffer, 0, num)); } } catch (Exception e) { } } }
package br.com.uol.pagseguro.api.preapproval; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.modules.junit4.PowerMockRunner; import java.io.IOException; import java.math.BigDecimal; import java.text.DateFormat; import java.text.SimpleDateFormat; import br.com.uol.pagseguro.api.Resource4Test; import br.com.uol.pagseguro.api.common.domain.ShippingType; import br.com.uol.pagseguro.api.common.domain.builder.AddressBuilder; import br.com.uol.pagseguro.api.common.domain.builder.DateRangeBuilder; import br.com.uol.pagseguro.api.common.domain.builder.ParameterBuilder; import br.com.uol.pagseguro.api.common.domain.builder.PaymentItemBuilder; import br.com.uol.pagseguro.api.common.domain.builder.PhoneBuilder; import br.com.uol.pagseguro.api.common.domain.builder.PreApprovalBuilder; import br.com.uol.pagseguro.api.common.domain.builder.SenderBuilder; import br.com.uol.pagseguro.api.common.domain.builder.ShippingBuilder; import br.com.uol.pagseguro.api.common.domain.enums.Currency; import br.com.uol.pagseguro.api.exception.PagSeguroBadRequestException; import br.com.uol.pagseguro.api.exception.PagSeguroLibException; import br.com.uol.pagseguro.api.exception.ServerError; import br.com.uol.pagseguro.api.exception.ServerErrors; import br.com.uol.pagseguro.api.http.HttpMethod; import br.com.uol.pagseguro.api.http.HttpRequestBody; import br.com.uol.pagseguro.api.http.HttpResponse; import br.com.uol.pagseguro.api.preapproval.cancel.CancelledPreApproval; import br.com.uol.pagseguro.api.preapproval.cancel.PreApprovalCancellation; import br.com.uol.pagseguro.api.preapproval.cancel.PreApprovalCancellationBuilder; import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMap; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.when; /** * @author PagSeguro Internet Ltda. */ @RunWith(PowerMockRunner.class) public class PreApprovalsResourceTest extends Resource4Test { private PreApprovalsResource preApprovalsResource; private PreApprovalRegistration preApprovalRegistration; private PreApprovalCharging preApprovalCharging; private PreApprovalCancellation preApprovalCancellation; private DateFormat dateFormat; @Before public void setUp() throws Exception { preApprovalsResource = new PreApprovalsResource(pagSeguro, httpClient); dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); preApprovalRegistration = new PreApprovalRegistrationBuilder() .withRedirectURL("redirectUrl") .withNotificationURL("notificationUrl") .withCurrency(Currency.BRL) .withExtraAmount(new BigDecimal(9.99)) .withReference("reference") .withShipping(new ShippingBuilder() .withAddress(new AddressBuilder() .withCountry("BRA") .withState("PA") .withPostalCode("99999999") .withCity("city") .withDistrict("district") .withStreet("street") .withNumber("999") .withComplement("complement")) .withType(ShippingType.Type.SEDEX) .withCost(new BigDecimal(99.99)) ) .withSender(new SenderBuilder() .withEmail("email@email.com") .withName("name") .withPhone(new PhoneBuilder() .withAreaCode("16") .withNumber("123456789") ) .withAddress(new AddressBuilder() .withCountry("BRA") .withState("PA") .withPostalCode("99999999") .withCity("city") .withDistrict("district") .withStreet("street") .withNumber("999") .withComplement("complement") ) .withCPF("99999999999") .withHash("hash") ) .withPreApproval(new PreApprovalBuilder() .withCharge("charge") .withName("name") .withDetails("details") .withAmountPerPayment(new BigDecimal(99.99)) .withMaxAmountPerPayment(new BigDecimal(99.99)) .withMaxTotalAmount(new BigDecimal(99.99)) .withMaxAmountPerPeriod(new BigDecimal(99.99)) .withMaxPaymentsPerPeriod(4) .withPeriod("period") .withDateRange(new DateRangeBuilder() .between(dateFormat.parse("2016/11/09 00:00:00"), dateFormat.parse("2016/11/09 23:59:59")) ) ) .addParameter(new ParameterBuilder() .withName("param1") .withValue("value1") ) .build(); preApprovalCharging = new PreApprovalChargingBuilder() .withCode("code") .withReference("reference") .addItem(new PaymentItemBuilder() .withId("2") .withDescription("description2") .withAmount(new BigDecimal(99.99)) .withQuantity(7) .withWeight(123) .withShippingCost(new BigDecimal(99.99)) ) .addParameter(new ParameterBuilder() .withName("param1") .withValue("value1") ) .build(); preApprovalCancellation = new PreApprovalCancellationBuilder() .withCode("code") .addParameter(new ParameterBuilder() .withName("param1") .withValue("value1") ) .build(); } @Test public void shouldRegister() throws Exception { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<preApprovalRequest>" + "<code>code</code>" + "<date>2016-11-09T00:00:00.000-03:00</date>" + "</preApprovalRequest>"; HttpResponse response = new HttpResponse(200, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); RegisteredPreApproval registeredPreApproval = preApprovalsResource .register(preApprovalRegistration); assertEquals("code", registeredPreApproval.getPreApprovalCode()); assertEquals("https://sandbox.pagseguro.uol.com.br/v2/pre-approvals/request.html?code=" + "code", registeredPreApproval.getRedirectURL()); } @Test public void shouldThrowsBadRequestOnRegister() { try { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<errors>" + "<error>" + "<code>00001</code>" + "<message>Currency is required.</message>" + "</error>" + "</errors>"; HttpResponse response = new HttpResponse(400, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); preApprovalsResource.register(preApprovalRegistration); } catch (PagSeguroBadRequestException e) { ServerErrors errors = e.getErrors(); ServerError error = errors.getErrors().iterator().next(); assertEquals(new Integer(00001), error.getCode()); assertEquals("Currency is required.", error.getMessage()); } catch (Exception e) { } } @Test(expected = PagSeguroLibException.class) public void shouldThrowsErrorLibOnRegister() throws Exception { when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenThrow(new IOException()); preApprovalsResource.register(preApprovalRegistration); } @Test public void shouldCharge() throws Exception { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<result>" + "<transactionCode>code</transactionCode>" + "<date>2016-11-09T00:00:00.000-02:00</date>" + "</result>"; HttpResponse response = new HttpResponse(200, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); ChargedPreApproval chargedPreApproval = preApprovalsResource.charge(preApprovalCharging); assertEquals("code", chargedPreApproval.getTransactionCode()); } @Test public void shouldThrowsBadRequestOnCharge() { try { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<errors>" + "<error>" + "<code>00001</code>" + "<message>Code is required.</message>" + "</error>" + "</errors>"; HttpResponse response = new HttpResponse(400, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); preApprovalsResource.charge(preApprovalCharging); } catch (PagSeguroBadRequestException e) { ServerErrors errors = e.getErrors(); ServerError error = errors.getErrors().iterator().next(); assertEquals(new Integer(00001), error.getCode()); assertEquals("Code is required.", error.getMessage()); } catch (Exception e) { } } @Test(expected = PagSeguroLibException.class) public void shouldThrowsErrorLibOnCharge() throws Exception { when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenThrow(new IOException()); preApprovalsResource.charge(preApprovalCharging); } @Test public void shouldCancel() throws Exception { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<result>" + "<date>2016-11-09T00:00:00.000-02:00</date>" + "<status>OK</status>" + "</result>"; HttpResponse response = new HttpResponse(200, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); CancelledPreApproval cancelledPreApproval = preApprovalsResource .cancel(preApprovalCancellation); assertEquals("OK", cancelledPreApproval.getTransactionStatus()); } @Test public void shouldThrowsBadRequestOnCancel() { try { String responseAsString = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + "<errors>" + "<error>" + "<code>00001</code>" + "<message>Code is required.</message>" + "</error>" + "</errors>"; HttpResponse response = new HttpResponse(400, responseAsString); when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenReturn(response); preApprovalsResource.cancel(preApprovalCancellation); } catch (PagSeguroBadRequestException e) { ServerErrors errors = e.getErrors(); ServerError error = errors.getErrors().iterator().next(); assertEquals(new Integer(00001), error.getCode()); assertEquals("Code is required.", error.getMessage()); } catch (Exception e) { } } @Test(expected = PagSeguroLibException.class) public void shouldThrowsErrorLibOnCancel() throws Exception { when(httpClient.execute(any(HttpMethod.class), anyString(), anyMap(), any(HttpRequestBody.class))).thenThrow(new IOException()); preApprovalsResource.cancel(preApprovalCancellation); } }
/* * Copyright 2016 Netbrasoft * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package br.com.netbrasoft.gnuob.generic.security; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.ACCESS_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.DESCRIPTION_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.GNUOB_ROLES_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.GNUOB_USERS_GNUOB_GROUPS_TABLE_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.GNUOB_USERS_GNUOB_SITES_TABLE_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.GNUOB_USERS_ID_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.GROUPS_ID_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.ID_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.NAME_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.PASSWORD_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.PASSWORD_REGEX; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.ROLE; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.ROOT_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.SITES_ID_COLUMN_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.USER_ENTITY_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.USER_TABLE_NAME; import static br.com.netbrasoft.gnuob.generic.NetbrasoftSoapConstants.ZERO; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; import static java.lang.String.format; import static java.util.stream.Collectors.counting; import static javax.persistence.CascadeType.PERSIST; import static javax.persistence.EnumType.STRING; import static javax.persistence.FetchType.EAGER; import java.util.Set; import javax.persistence.Cacheable; import javax.persistence.Column; import javax.persistence.ElementCollection; import javax.persistence.Entity; import javax.persistence.Enumerated; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.Table; import javax.persistence.Transient; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.velocity.context.Context; import br.com.netbrasoft.gnuob.exception.GNUOpenBusinessServiceException; import br.com.netbrasoft.gnuob.generic.content.contexts.IContextVisitor; @Cacheable(value = true) @Entity(name = USER_ENTITY_NAME) @Table(name = USER_TABLE_NAME) @XmlRootElement(name = USER_ENTITY_NAME) public class User extends AbstractAccess { private static final long serialVersionUID = 2439569681567208145L; private Rule access; private String description; private Set<Group> groups; private String name; private String password; private Set<Role> roles; private Boolean root; private Set<Site> sites; public User() { groups = newHashSet(); roles = newHashSet(); sites = newHashSet(); } private User(final String name) { this(); this.name = name; } public static User getInstance() { return new User(); } public static User getInstance(final String name) { return new User(name); } @Transient @Override public boolean isDetached() { return newArrayList(isAbstractTypeDetached(), isSitesDetached(), isGroupsDetached()).stream() .filter(e -> e.booleanValue()).count() > ZERO; } @Transient private boolean isSitesDetached() { return sites != null && sites.stream().filter(e -> e.isDetached()).collect(counting()).intValue() > ZERO; } @Transient private boolean isGroupsDetached() { return groups != null && groups.stream().filter(e -> e.isDetached()).collect(counting()).intValue() > ZERO; } @Override public void prePersist() { if (!password.matches(PASSWORD_REGEX)) { throw new GNUOpenBusinessServiceException( format("Given user [%s] doesn't contain a valid password, verify that the given password is valid", name)); } } @Override public void preUpdate() { prePersist(); } @Override public Context accept(final IContextVisitor visitor) { return visitor.visit(this); } @Column(name = ACCESS_COLUMN_NAME, nullable = false) @Enumerated(STRING) public Rule getAccess() { return access; } @XmlElement @Column(name = DESCRIPTION_COLUMN_NAME) public String getDescription() { return description; } @ManyToMany(cascade = {PERSIST}, fetch = EAGER) @JoinTable(name = GNUOB_USERS_GNUOB_GROUPS_TABLE_NAME, joinColumns = {@JoinColumn(name = GNUOB_USERS_ID_COLUMN_NAME, referencedColumnName = ID_COLUMN_NAME)}, inverseJoinColumns = {@JoinColumn(name = GROUPS_ID_COLUMN_NAME, referencedColumnName = ID_COLUMN_NAME)}) public Set<Group> getGroups() { return groups; } @XmlElement(required = true) @Column(name = NAME_COLUMN_NAME, nullable = false, unique = true) public String getName() { return name; } @XmlElement(required = true) @Column(name = PASSWORD_COLUMN_NAME, nullable = false) public String getPassword() { return password; } @ElementCollection(targetClass = Role.class, fetch = EAGER) @JoinTable(name = GNUOB_ROLES_COLUMN_NAME, joinColumns = @JoinColumn(name = GNUOB_USERS_ID_COLUMN_NAME)) @Column(name = ROLE) @Enumerated(STRING) public Set<Role> getRoles() { return roles; } @XmlTransient @Column(name = ROOT_COLUMN_NAME) public Boolean getRoot() { return root; } @ManyToMany(cascade = {PERSIST}, fetch = EAGER) @JoinTable(name = GNUOB_USERS_GNUOB_SITES_TABLE_NAME, joinColumns = {@JoinColumn(name = GNUOB_USERS_ID_COLUMN_NAME, referencedColumnName = ID_COLUMN_NAME)}, inverseJoinColumns = {@JoinColumn(name = SITES_ID_COLUMN_NAME, referencedColumnName = ID_COLUMN_NAME)}) public Set<Site> getSites() { return sites; } public void setAccess(final Rule access) { this.access = access; } public void setDescription(final String description) { this.description = description; } public void setGroups(final Set<Group> groups) { this.groups = groups; } public void setName(final String name) { this.name = name; } public void setPassword(final String password) { this.password = password; } public void setRoles(final Set<Role> roles) { this.roles = roles; } public void setRoot(final Boolean root) { this.root = root; } public void setSites(final Set<Site> sites) { this.sites = sites; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.buffer; import org.apache.flink.util.TestLogger; import org.apache.flink.shaded.guava18.com.google.common.collect.Lists; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.powermock.api.mockito.PowerMockito.spy; /** * Tests for the {@link LocalBufferPool}. */ public class LocalBufferPoolTest extends TestLogger { private static final int numBuffers = 1024; private static final int memorySegmentSize = 128; private NetworkBufferPool networkBufferPool; private BufferPool localBufferPool; private static final ExecutorService executor = Executors.newCachedThreadPool(); @Before public void setupLocalBufferPool() { networkBufferPool = new NetworkBufferPool(numBuffers, memorySegmentSize, 1); localBufferPool = new LocalBufferPool(networkBufferPool, 1); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); } @After public void destroyAndVerifyAllBuffersReturned() throws IOException { if (!localBufferPool.isDestroyed()) { localBufferPool.lazyDestroy(); } String msg = "Did not return all buffers to memory segment pool after test."; assertEquals(msg, numBuffers, networkBufferPool.getNumberOfAvailableMemorySegments()); // no other local buffer pools used than the one above, but call just in case networkBufferPool.destroyAllBufferPools(); networkBufferPool.destroy(); } @AfterClass public static void shutdownExecutor() { executor.shutdownNow(); } @Test public void testRequestMoreThanAvailable() throws IOException { localBufferPool.setNumBuffers(numBuffers); List<Buffer> requests = new ArrayList<Buffer>(numBuffers); for (int i = 1; i <= numBuffers; i++) { Buffer buffer = localBufferPool.requestBuffer(); assertEquals(i, getNumRequestedFromMemorySegmentPool()); assertNotNull(buffer); requests.add(buffer); } { // One more... Buffer buffer = localBufferPool.requestBuffer(); assertEquals(numBuffers, getNumRequestedFromMemorySegmentPool()); assertNull(buffer); } for (Buffer buffer : requests) { buffer.recycleBuffer(); } } @Test public void testRequestAfterDestroy() throws IOException { localBufferPool.lazyDestroy(); try { localBufferPool.requestBuffer(); fail("Call should have failed with an IllegalStateException"); } catch (IllegalStateException e) { // we expect exactly that } } @Test public void testRecycleAfterDestroy() throws IOException { localBufferPool.setNumBuffers(numBuffers); List<Buffer> requests = new ArrayList<Buffer>(numBuffers); for (int i = 0; i < numBuffers; i++) { requests.add(localBufferPool.requestBuffer()); } localBufferPool.lazyDestroy(); // All buffers have been requested, but can not be returned yet. assertEquals(numBuffers, getNumRequestedFromMemorySegmentPool()); // Recycle should return buffers to memory segment pool for (Buffer buffer : requests) { buffer.recycleBuffer(); } } @Test public void testRecycleExcessBuffersAfterRecycling() throws Exception { localBufferPool.setNumBuffers(numBuffers); List<Buffer> requests = new ArrayList<Buffer>(numBuffers); // Request all buffers for (int i = 1; i <= numBuffers; i++) { requests.add(localBufferPool.requestBuffer()); } assertEquals(numBuffers, getNumRequestedFromMemorySegmentPool()); // Reduce the number of buffers in the local pool localBufferPool.setNumBuffers(numBuffers / 2); // Need to wait until we recycle the buffers assertEquals(numBuffers, getNumRequestedFromMemorySegmentPool()); for (int i = 1; i < numBuffers / 2; i++) { requests.remove(0).recycleBuffer(); assertEquals(numBuffers - i, getNumRequestedFromMemorySegmentPool()); } for (Buffer buffer : requests) { buffer.recycleBuffer(); } } @Test public void testRecycleExcessBuffersAfterChangingNumBuffers() throws Exception { localBufferPool.setNumBuffers(numBuffers); List<Buffer> requests = new ArrayList<Buffer>(numBuffers); // Request all buffers for (int i = 1; i <= numBuffers; i++) { requests.add(localBufferPool.requestBuffer()); } // Recycle all for (Buffer buffer : requests) { buffer.recycleBuffer(); } assertEquals(numBuffers, localBufferPool.getNumberOfAvailableMemorySegments()); localBufferPool.setNumBuffers(numBuffers / 2); assertEquals(numBuffers / 2, localBufferPool.getNumberOfAvailableMemorySegments()); } @Test(expected = IllegalArgumentException.class) public void testSetLessThanRequiredNumBuffers() throws IOException { localBufferPool.setNumBuffers(1); localBufferPool.setNumBuffers(0); } // ------------------------------------------------------------------------ // Pending requests and integration with buffer futures // ------------------------------------------------------------------------ @Test public void testPendingRequestWithListenersAfterRecycle() throws Exception { BufferListener twoTimesListener = createBufferListener(2); BufferListener oneTimeListener = createBufferListener(1); localBufferPool.setNumBuffers(2); Buffer available1 = localBufferPool.requestBuffer(); Buffer available2 = localBufferPool.requestBuffer(); assertNull(localBufferPool.requestBuffer()); assertTrue(localBufferPool.addBufferListener(twoTimesListener)); assertTrue(localBufferPool.addBufferListener(oneTimeListener)); // Recycle the first buffer to notify both of the above listeners once // and the twoTimesListener will be added into the registeredListeners // queue of buffer pool again available1.recycleBuffer(); verify(oneTimeListener, times(1)).notifyBufferAvailable(any(Buffer.class)); verify(twoTimesListener, times(1)).notifyBufferAvailable(any(Buffer.class)); // Recycle the second buffer to only notify the twoTimesListener available2.recycleBuffer(); verify(oneTimeListener, times(1)).notifyBufferAvailable(any(Buffer.class)); verify(twoTimesListener, times(2)).notifyBufferAvailable(any(Buffer.class)); } @Test @SuppressWarnings("unchecked") public void testCancelPendingRequestsAfterDestroy() throws IOException { BufferListener listener = Mockito.mock(BufferListener.class); localBufferPool.setNumBuffers(1); Buffer available = localBufferPool.requestBuffer(); Buffer unavailable = localBufferPool.requestBuffer(); assertNull(unavailable); localBufferPool.addBufferListener(listener); localBufferPool.lazyDestroy(); available.recycleBuffer(); verify(listener, times(1)).notifyBufferDestroyed(); } // ------------------------------------------------------------------------ // Concurrent requests // ------------------------------------------------------------------------ @Test @SuppressWarnings("unchecked") public void testConcurrentRequestRecycle() throws ExecutionException, InterruptedException, IOException { int numConcurrentTasks = 128; int numBuffersToRequestPerTask = 1024; localBufferPool.setNumBuffers(numConcurrentTasks); Future<Boolean>[] taskResults = new Future[numConcurrentTasks]; for (int i = 0; i < numConcurrentTasks; i++) { taskResults[i] = executor.submit(new BufferRequesterTask(localBufferPool, numBuffersToRequestPerTask)); } for (int i = 0; i < numConcurrentTasks; i++) { assertTrue(taskResults[i].get()); } } @Test public void testDestroyDuringBlockingRequest() throws Exception { // Config final int numberOfBuffers = 1; localBufferPool.setNumBuffers(numberOfBuffers); final CountDownLatch sync = new CountDownLatch(1); final Callable<List<Buffer>> requester = new Callable<List<Buffer>>() { // Request all buffers in a blocking manner. @Override public List<Buffer> call() throws Exception { final List<Buffer> requested = Lists.newArrayList(); // Request all available buffers for (int i = 0; i < numberOfBuffers; i++) { requested.add(localBufferPool.requestBufferBlocking()); } // Notify that we've requested all buffers sync.countDown(); // Try to request the next buffer (but pool should be destroyed either right before // the request or more likely during the request). try { localBufferPool.requestBufferBlocking(); fail("Call should have failed with an IllegalStateException"); } catch (IllegalStateException e) { // we expect exactly that } return requested; } }; Future<List<Buffer>> f = executor.submit(requester); sync.await(); localBufferPool.lazyDestroy(); // Increase the likelihood that the requested is currently in the request call Thread.sleep(50); // This should return immediately if everything works as expected List<Buffer> requestedBuffers = f.get(60, TimeUnit.SECONDS); for (Buffer buffer : requestedBuffers) { buffer.recycleBuffer(); } } @Test public void testBoundedBuffer() throws Exception { localBufferPool.lazyDestroy(); localBufferPool = new LocalBufferPool(networkBufferPool, 1, 2); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertEquals(2, localBufferPool.getMaxNumberOfMemorySegments()); Buffer buffer1, buffer2; // check min number of buffers: localBufferPool.setNumBuffers(1); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer1 = localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNull(localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); buffer1.recycleBuffer(); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); // check max number of buffers: localBufferPool.setNumBuffers(2); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer1 = localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer2 = localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNull(localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); buffer1.recycleBuffer(); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); buffer2.recycleBuffer(); assertEquals(2, localBufferPool.getNumberOfAvailableMemorySegments()); // try to set too large buffer size: localBufferPool.setNumBuffers(3); assertEquals(2, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer1 = localBufferPool.requestBuffer()); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer2 = localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNull(localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); buffer1.recycleBuffer(); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); buffer2.recycleBuffer(); assertEquals(2, localBufferPool.getNumberOfAvailableMemorySegments()); // decrease size again localBufferPool.setNumBuffers(1); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); assertNotNull(buffer1 = localBufferPool.requestBuffer()); assertEquals(0, localBufferPool.getNumberOfAvailableMemorySegments()); assertNull(localBufferPool.requestBuffer()); buffer1.recycleBuffer(); assertEquals(1, localBufferPool.getNumberOfAvailableMemorySegments()); } // ------------------------------------------------------------------------ // Helpers // ------------------------------------------------------------------------ private int getNumRequestedFromMemorySegmentPool() { return networkBufferPool.getTotalNumberOfMemorySegments() - networkBufferPool.getNumberOfAvailableMemorySegments(); } private BufferListener createBufferListener(int notificationTimes) { return spy(new BufferListener() { AtomicInteger times = new AtomicInteger(0); @Override public NotificationResult notifyBufferAvailable(Buffer buffer) { int newCount = times.incrementAndGet(); buffer.recycleBuffer(); if (newCount < notificationTimes) { return NotificationResult.BUFFER_USED_NEED_MORE; } else { return NotificationResult.BUFFER_USED_NO_NEED_MORE; } } @Override public void notifyBufferDestroyed() { } }); } private static class BufferRequesterTask implements Callable<Boolean> { private final BufferProvider bufferProvider; private final int numBuffersToRequest; private BufferRequesterTask(BufferProvider bufferProvider, int numBuffersToRequest) { this.bufferProvider = bufferProvider; this.numBuffersToRequest = numBuffersToRequest; } @Override public Boolean call() throws Exception { try { for (int i = 0; i < numBuffersToRequest; i++) { Buffer buffer = bufferProvider.requestBufferBlocking(); buffer.recycleBuffer(); } } catch (Throwable t) { return false; } return true; } } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * AdGroupAdLabel.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201809.cm; /** * Manages the labels associated with an AdGroupAd. */ public class AdGroupAdLabel implements java.io.Serializable { /* The id of the adgroup containing the ad that the label to be * applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ private java.lang.Long adGroupId; /* The id of the ad that the label to be applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ private java.lang.Long adId; /* The id of an existing label to be applied to the adgroup ad. * <span class="constraint Required">This field is required and should * not be {@code null} when it is contained within {@link Operator}s * : ADD, REMOVE.</span> */ private java.lang.Long labelId; public AdGroupAdLabel() { } public AdGroupAdLabel( java.lang.Long adGroupId, java.lang.Long adId, java.lang.Long labelId) { this.adGroupId = adGroupId; this.adId = adId; this.labelId = labelId; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("adGroupId", getAdGroupId()) .add("adId", getAdId()) .add("labelId", getLabelId()) .toString(); } /** * Gets the adGroupId value for this AdGroupAdLabel. * * @return adGroupId * The id of the adgroup containing the ad that the label to be * applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ public java.lang.Long getAdGroupId() { return adGroupId; } /** * Sets the adGroupId value for this AdGroupAdLabel. * * @param adGroupId * The id of the adgroup containing the ad that the label to be * applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ public void setAdGroupId(java.lang.Long adGroupId) { this.adGroupId = adGroupId; } /** * Gets the adId value for this AdGroupAdLabel. * * @return adId * The id of the ad that the label to be applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ public java.lang.Long getAdId() { return adId; } /** * Sets the adId value for this AdGroupAdLabel. * * @param adId * The id of the ad that the label to be applied to. * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD, REMOVE.</span> */ public void setAdId(java.lang.Long adId) { this.adId = adId; } /** * Gets the labelId value for this AdGroupAdLabel. * * @return labelId * The id of an existing label to be applied to the adgroup ad. * <span class="constraint Required">This field is required and should * not be {@code null} when it is contained within {@link Operator}s * : ADD, REMOVE.</span> */ public java.lang.Long getLabelId() { return labelId; } /** * Sets the labelId value for this AdGroupAdLabel. * * @param labelId * The id of an existing label to be applied to the adgroup ad. * <span class="constraint Required">This field is required and should * not be {@code null} when it is contained within {@link Operator}s * : ADD, REMOVE.</span> */ public void setLabelId(java.lang.Long labelId) { this.labelId = labelId; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof AdGroupAdLabel)) return false; AdGroupAdLabel other = (AdGroupAdLabel) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.adGroupId==null && other.getAdGroupId()==null) || (this.adGroupId!=null && this.adGroupId.equals(other.getAdGroupId()))) && ((this.adId==null && other.getAdId()==null) || (this.adId!=null && this.adId.equals(other.getAdId()))) && ((this.labelId==null && other.getLabelId()==null) || (this.labelId!=null && this.labelId.equals(other.getLabelId()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getAdGroupId() != null) { _hashCode += getAdGroupId().hashCode(); } if (getAdId() != null) { _hashCode += getAdId().hashCode(); } if (getLabelId() != null) { _hashCode += getLabelId().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(AdGroupAdLabel.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupAdLabel")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adGroupId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "adGroupId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "adId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("labelId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "labelId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/* * Copyright 2015-2018 Igor Maznitsa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.igormaznitsa.mindmap.print; import com.igormaznitsa.mindmap.swing.services.UIComponentFactoryProvider; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; import java.util.ResourceBundle; import javax.annotation.Nonnull; import javax.swing.Box; import javax.swing.ButtonGroup; import javax.swing.DefaultComboBoxModel; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JSeparator; import javax.swing.JSpinner; import javax.swing.SpinnerNumberModel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; /** * Panel to tune parameters of print. * * @since 1.4.1 */ public final class MMDPrintOptionsPanel extends JPanel { private static final long serialVersionUID = 4095304247486153265L; private static final ResourceBundle BUNDLE = java.util.ResourceBundle.getBundle("com/igormaznitsa/mindmap/swing/panel/Bundle"); private final MMDPrintOptions options; private final JRadioButton radioZoomTo = UIComponentFactoryProvider.findInstance().makeRadioButton(); private final JRadioButton radioFitWidthTo = UIComponentFactoryProvider.findInstance().makeRadioButton(); private final JRadioButton radioFitHeightTo = UIComponentFactoryProvider.findInstance().makeRadioButton(); private final JRadioButton radioFitToPage = UIComponentFactoryProvider.findInstance().makeRadioButton(); private final JComboBox comboZoom = UIComponentFactoryProvider.findInstance().makeComboBox(); private final JSpinner spinnerFitWidth = UIComponentFactoryProvider.findInstance().makeSpinner(); private final JSpinner spinnerFitHeight = UIComponentFactoryProvider.findInstance().makeSpinner(); public MMDPrintOptionsPanel(@Nonnull final MMDPrintOptions options) { super(new GridBagLayout()); this.options = new MMDPrintOptions(options); this.radioZoomTo.setText(BUNDLE.getString("MMDPrintOptionsPanel.ZoomTo")); this.radioFitWidthTo.setText(BUNDLE.getString("MMDPrintOptionsPanel.FitWithTo")); this.radioFitHeightTo.setText(BUNDLE.getString("MMDPrintOptionsPanel.FitHeightTo")); this.radioFitToPage.setText(BUNDLE.getString("MMDPrintOptionsPanel.FitToPage")); this.spinnerFitHeight.setModel(new SpinnerNumberModel(1, 1, 100, 1)); this.spinnerFitWidth.setModel(new SpinnerNumberModel(1, 1, 100, 1)); final List<String> zoom = new ArrayList<>(); for (int i = 25; i <= 500; i += 25) { zoom.add(i + " %"); } this.comboZoom.setModel(new DefaultComboBoxModel(zoom.toArray())); final GridBagConstraints gbc = new GridBagConstraints(); gbc.fill = GridBagConstraints.HORIZONTAL; gbc.weightx = 1; gbc.gridx = 0; gbc.gridy = 0; final JLabel titleLabel = UIComponentFactoryProvider.findInstance().makeLabel(); titleLabel.setText(BUNDLE.getString("MMDPrintOptionsPanel.ZoomSectionTitle") + ' '); this.add(titleLabel, gbc); gbc.gridx = 1; gbc.weightx = 1000; this.add(new JSeparator(JSeparator.HORIZONTAL), gbc); gbc.weightx = 1; gbc.gridx = 0; gbc.gridy = 1; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.BOTH; this.add(makeZoomPanel(), gbc); final ButtonGroup radioGroup = new ButtonGroup(); radioGroup.add(this.radioFitHeightTo); radioGroup.add(this.radioFitWidthTo); radioGroup.add(this.radioZoomTo); radioGroup.add(this.radioFitToPage); selectZoomButton(); enableZoomComponentsForState(); final ChangeListener zoomChangeListener = new ChangeListener() { @Override public void stateChanged(@Nonnull ChangeEvent e) { fillZoomData(); } }; final ActionListener zoomActionListener = new ActionListener() { @Override public void actionPerformed(@Nonnull ActionEvent e) { fillZoomData(); } }; this.radioFitHeightTo.addActionListener(zoomActionListener); this.radioFitWidthTo.addActionListener(zoomActionListener); this.radioZoomTo.addActionListener(zoomActionListener); this.radioFitToPage.addActionListener(zoomActionListener); this.comboZoom.addActionListener(zoomActionListener); this.spinnerFitHeight.addChangeListener(zoomChangeListener); this.spinnerFitWidth.addChangeListener(zoomChangeListener); this.doLayout(); } @Nonnull private JPanel makeZoomPanel() { final JPanel result = UIComponentFactoryProvider.findInstance().makePanel(); result.setLayout(new GridBagLayout()); final GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.anchor = GridBagConstraints.WEST; gbc.fill = GridBagConstraints.HORIZONTAL; result.add(this.radioZoomTo, gbc); gbc.gridx = 1; result.add(this.comboZoom, gbc); gbc.gridx = 0; gbc.gridy = 1; gbc.gridwidth = 2; result.add(this.radioFitToPage, gbc); gbc.anchor = GridBagConstraints.EAST; gbc.gridwidth = 1; gbc.gridx = 2; gbc.gridy = 0; result.add(Box.createHorizontalStrut(32), gbc); gbc.gridy = 1; result.add(Box.createHorizontalStrut(32), gbc); gbc.gridx = 3; gbc.gridy = 0; result.add(this.radioFitWidthTo, gbc); gbc.gridx = 4; result.add(this.spinnerFitWidth, gbc); gbc.gridx = 5; final JLabel page1 = UIComponentFactoryProvider.findInstance().makeLabel(); page1.setText(' ' + BUNDLE.getString("MMDPrintOptionsPanel.Page_s")); result.add(page1, gbc); gbc.gridx = 3; gbc.gridy = 1; result.add(this.radioFitHeightTo, gbc); gbc.gridx = 4; gbc.gridy = 1; result.add(this.spinnerFitHeight, gbc); gbc.gridx = 5; final JLabel page2 = UIComponentFactoryProvider.findInstance().makeLabel(); page2.setText(' ' + BUNDLE.getString("MMDPrintOptionsPanel.Page_s")); result.add(page2, gbc); this.comboZoom.setSelectedIndex(Math.max(0, Math.min(this.comboZoom.getModel().getSize() - 1, (int) Math.round(this.options.getScale() * 100 / 25) - 1))); this.spinnerFitWidth.getModel().setValue(this.options.getPagesInRow()); this.spinnerFitHeight.getModel().setValue(this.options.getPagesInColumn()); enableZoomComponentsForState(); return result; } private void selectZoomButton() { switch (this.options.getScaleType()) { case ZOOM: this.radioZoomTo.setSelected(true); break; case FIT_HEIGHT_TO_PAGES: this.radioFitHeightTo.setSelected(true); break; case FIT_WIDTH_TO_PAGES: this.radioFitWidthTo.setSelected(true); break; case FIT_TO_SINGLE_PAGE: this.radioFitToPage.setSelected(true); break; } } private void fillZoomData() { this.options.setPagesInColumn((Integer) this.spinnerFitHeight.getValue()); this.options.setPagesInRow((Integer) this.spinnerFitWidth.getValue()); this.options.setScale(((double) (this.comboZoom.getSelectedIndex() + 1) * 25) / 100.0d); if (this.radioFitHeightTo.isSelected()) { this.options.setScaleType(MMDPrintOptions.ScaleType.FIT_HEIGHT_TO_PAGES); } else if (this.radioFitToPage.isSelected()) { this.options.setScaleType(MMDPrintOptions.ScaleType.FIT_TO_SINGLE_PAGE); } else if (this.radioFitWidthTo.isSelected()) { this.options.setScaleType(MMDPrintOptions.ScaleType.FIT_WIDTH_TO_PAGES); } else if (this.radioZoomTo.isSelected()) { this.options.setScaleType(MMDPrintOptions.ScaleType.ZOOM); } enableZoomComponentsForState(); } private void enableZoomComponentsForState() { this.comboZoom.setEnabled(false); this.spinnerFitHeight.setEnabled(false); this.spinnerFitWidth.setEnabled(false); switch (this.options.getScaleType()) { case ZOOM: this.comboZoom.setEnabled(true); break; case FIT_HEIGHT_TO_PAGES: this.spinnerFitHeight.setEnabled(true); break; case FIT_WIDTH_TO_PAGES: this.spinnerFitWidth.setEnabled(true); break; case FIT_TO_SINGLE_PAGE: break; default: throw new Error("Unexpected state"); } } @Nonnull public MMDPrintOptions getOptions() { return this.options; } }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.internal.mxml; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.apache.flex.compiler.mxml.IMXMLManifestManager; import org.apache.flex.compiler.mxml.IMXMLNamespaceMapping; import org.apache.flex.compiler.problems.ICompilerProblem; import org.apache.flex.compiler.problems.ManifestProblem; import org.apache.flex.compiler.common.XMLName; import org.apache.flex.compiler.filespecs.IFileSpecification; import org.apache.flex.compiler.internal.projects.FlexProject; import org.apache.flex.swc.ISWCComponent; import org.apache.flex.swc.ISWC; import com.google.common.collect.HashMultimap; import com.google.common.collect.SetMultimap; /** * Each {@code FlexProject} has an {@code MXMLManifestManager} to resolve MXML tags to ActionScript classes, * using the <component> tags inside SWCs' catalog.xml files and any manifest files associated * with the project. * This manager must be recreated whenever the library path, or a manifest file, changes. */ public class MXMLManifestManager implements IMXMLManifestManager { /** * Helper method to get the class name from the * class info. Takes are of checking if the class * info is null. * * @param classInfo may be null. * @return The name of the class if classInfo is not null, * null otherwise. */ static String getClassName(ClassInfo classInfo) { return classInfo != null ? classInfo.className : null; } /** * Constructor. * * @param project The {@code FlexProject} for which this manifest manager * provides MXML-tag-to-ActionScript-classname mappings. */ public MXMLManifestManager(FlexProject project) { // Loop over all the SWCs on the library path. for (ISWC swc : project.getLibraries()) { addSWC(swc); } // Loop over all the manifest files that MXML namespace URIs are mapped to. for (IMXMLNamespaceMapping namespaceMapping : project.getNamespaceMappings()) { addManifest(project, namespaceMapping.getURI(), namespaceMapping.getManifestFileName()); } } // Maps an MXML tag name to a fully-qualified classname // such as "spark.components.Button"; null values in this map // indicate that there were inconsistent manifest entries // for the tag name. private Map<XMLName, ClassInfo> lookupMap = new HashMap<XMLName, ClassInfo>(); // Maps a tag name to a fully-qualified class name. This map only contains // manifest entries where 'lookupOnly' is true. This is only really needed // for manifests specified in the -include-namespace option. private Map<XMLName, String> lookupOnlyMap = new HashMap<XMLName, String>(); /** * Maps a fully qualified classname such as "spark.components.Button" to * an MXML tag name such as "&lt;s:Button&gt;". */ private SetMultimap<String, XMLName> reverseLookupMap = HashMultimap.<String, XMLName>create(); // Maps an MXML tag name to a list of (qname, path) duples, // for reporting inconsistencies or duplications between manifests. private HashMap<XMLName, ArrayList<ProblemEntry>> problemMap = new HashMap<XMLName, ArrayList<ProblemEntry>>(); // // Object overrides // /** * For debugging only. * Lists all of the MXML-tag-to-ActionScript-classname mappings, * in sorted order. Useful for debugging manifest problems. */ @Override public String toString() { StringBuilder sb = new StringBuilder(); TreeSet<XMLName> keys = new TreeSet<XMLName>(lookupMap.keySet()); for (XMLName key : keys) { sb.append(key); sb.append(" -> "); sb.append(lookupMap.get(key)); sb.append(", lookupOnly = "); sb.append(isLookupOnly(key)); sb.append('\n'); } return sb.toString(); } // // IMXMLManifestManager implementations // @Override public String resolve(XMLName tagName) { return getClassName(lookupMap.get(tagName)); } @Override public boolean isLookupOnly(XMLName tagName) { return lookupOnlyMap.get(tagName) != null; } @Override public Collection<XMLName> getTagNamesForClass(String className) { Collection<XMLName> result = reverseLookupMap.get(className); if (result == null) return Collections.emptySet(); else return Collections.unmodifiableCollection(result); } @Override public Collection<String> getQualifiedNamesForNamespaces(Set<String> namespaceURIs, boolean manifestEntriesOnly) { HashSet<String> qualifiedNames = new HashSet<String>(); for (Map.Entry<XMLName, ClassInfo> entry : lookupMap.entrySet()) { if (namespaceURIs.contains(entry.getKey().getXMLNamespace())) { ClassInfo classInfo = entry.getValue(); if (classInfo != null && (!manifestEntriesOnly || (manifestEntriesOnly && classInfo.fromManifest))) { qualifiedNames.add(classInfo.className); } } } return qualifiedNames; } // // Other methods // private void addSWC(ISWC swc) { File swcFile = swc.getSWCFile(); // Loop over all the <component> tags in the catalog.xml file // inside each SWC. for (ISWCComponent component : swc.getComponents()) { String uri = component.getURI(); String name = component.getName(); XMLName tagName = new XMLName(uri, name); String qname = component.getQName(); // Add the mapping info in the <component> tag // to the maps of this manifest manager. add(tagName, qname, swcFile.getAbsolutePath(), false); } } private void addManifest(FlexProject project, String uri, String manifestFileName) { Document manifestDocument = null; IFileSpecification manifestFileSpec = project.getWorkspace().getFileSpecification(manifestFileName); try { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setIgnoringElementContentWhitespace(true); documentBuilderFactory.setCoalescing(true); documentBuilderFactory.setIgnoringComments(true); manifestDocument = documentBuilderFactory.newDocumentBuilder().parse(new InputSource(manifestFileSpec.createReader())); } catch (Exception e) { // TODO Report a problem. } if (manifestDocument != null) { NodeList components = manifestDocument.getElementsByTagName("component"); for (int i = 0; i < components.getLength(); i++) { Element component = (Element)components.item(i); if (component != null) { String id = component.getAttribute("id"); if (id != null) { // TODO Why are we checking for dots in the tag name? int lastDot = id.lastIndexOf("."); if (lastDot != -1) id = id.substring(lastDot + 1); } XMLName tagName = new XMLName(uri, id); String className = component.getAttribute("class"); if (className != null) className = className.replaceAll("/", "."); String lookupOnlyStr = component.getAttribute("lookupOnly"); boolean lookupOnly = lookupOnlyStr == null ? false : Boolean.valueOf(lookupOnlyStr).booleanValue(); if (id != null && className != null) { add(tagName, className, manifestFileName, true); if (lookupOnly) addLookupOnly(tagName, className); } } } } else System.out.println("Unable to parse " + manifestFileName); } /** * Adds a mapping to this manifest manager. * * @param tagName An {@code XMLName} for an MXML tag. * * @param className The fully-qualified ActionScript classname * to which this tag maps. * * @param file The SWC file in which the mapping was declared. */ private void add(XMLName tagName, String className, String fileName, boolean fromManifest) { if (!lookupMap.containsKey(tagName)) { // The first manifest entry associating a className // with this tagName is being added. // The ClassInfo keeps track of whether it came // from the catalog of a SWC or from a manifest file. lookupMap.put(tagName, new ClassInfo(className, fromManifest)); reverseLookupMap.put(className, tagName); return; } else { // A particular mapping might come first from a SWC and later // from a manifest. In that case, change the fromManifest flag to true; // otherwise getQualifiedNamesForNamespaces() won't return the // right names and COMPC won't link in all the classes that // were in manifests. if (fromManifest) lookupMap.get(tagName).fromManifest = true; } // If subsequent classNames added for this tagName aren't consistent, // null out the className in this map so that the tag won't // resolve to a class. String oldClassName = getClassName(lookupMap.get(tagName)); if (className.equals(oldClassName)) return; lookupMap.put(tagName, null); reverseLookupMap.remove(oldClassName, tagName); // ProblemEntry entry = new ProblemEntry(className, fileName); ArrayList<ProblemEntry> list = problemMap.get(tagName); if (list == null) { list = new ArrayList<ProblemEntry>(); problemMap.put(tagName, list); } list.add(entry); } /** * Adds a 'lookupOnly' mapping to this manifest manager. * * @param tagName An {@code XMLName} for an MXML tag. * * @param className The fully-qualified ActionScript classname * to which this tag maps. */ private void addLookupOnly(XMLName tagName, String className) { if (!lookupOnlyMap.containsKey(tagName)) { // The first manifest entry associating a className // with this tagName is being added. lookupOnlyMap.put(tagName, className); } } /** * Looks for inconsistent manifest mappings and returns * a collection of compiler problems for them. * * @return A collection of {@code ICompilerProblem} objects. */ public Collection<ICompilerProblem> getProblems() { Collection<ICompilerProblem> problems = new HashSet<ICompilerProblem>(); // Search the lookupMap for null values, which indicate // an inconsistent tagName->className mapping. for (XMLName key : lookupMap.keySet()) { if (lookupMap.get(key) == null) { // The corresponding entry in the problemMap // has information about all the mapping of that tagName. List<ProblemEntry> list = problemMap.get(key); ICompilerProblem problem = new ManifestProblem(list); problems.add(problem); } } return problems; } /** * This inner class stores information about a class in a namespace mapping. */ private static class ClassInfo { /** * Constructor. * * @param className fully qualified class name. * @param fromManifest true if the class name came from a manifest * file entry, false otherwise. */ ClassInfo(String className, boolean fromManifest) { this.className = className; this.fromManifest = fromManifest; } public String className; public boolean fromManifest; } /** * This inner class is a simple duple struct used to keep track * of all the manifest mappings for a particular tag. * For example, <whatever:Foo> might map to a.b.Foo in * X.swc and Y.swc but c.d.Foo in Z.swc. * We keep track of all of this so that we can create compiler * problems describing where the inconsistencies are. */ private static class ProblemEntry { ProblemEntry(String className, String fileName) { this.className = className; this.fileName = fileName; } @SuppressWarnings("unused") public String className; @SuppressWarnings("unused") public String fileName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.subsystem.itests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.aries.subsystem.core.archive.Clause; import org.apache.aries.subsystem.core.archive.RequireCapabilityHeader; import org.junit.Before; import org.junit.Test; import org.osgi.framework.Bundle; import org.osgi.framework.Constants; import org.osgi.service.resolver.ResolutionException; import org.osgi.service.subsystem.Subsystem; import org.osgi.service.subsystem.SubsystemConstants; import org.osgi.service.subsystem.SubsystemException; /* * Contains a series of tests related to resolution. */ public class ResolutionTest extends SubsystemTest { /* * Subsystem-SymbolicName: application.a.esa * Subsystem-Content: bundle.a.jar */ private static final String APPLICATION_A = "application.a.esa"; /* * Subsystem-SymbolicName: application.b.esa * Subsystem-Content: bundle.d.jar */ private static final String APPLICATION_B = "application.b.esa"; /* * Subsystem-SymbolicName: application.c.esa * Subsystem-Content: bundle.e.jar */ private static final String APPLICATION_C = "application.c.esa"; /* * Subsystem-SymbolicName: application.d.esa * Subsystem-Content: bundle.f.jar */ private static final String APPLICATION_D = "application.d.esa"; /* Subsystem-SymbolicName: application.e.esa * Subsystem-Content: bundle.g.jar */ private static final String APPLICATION_E = "application.e.esa"; /* * Bundle-SymbolicName: bundle.a.jar * Require-Capability: a */ private static final String BUNDLE_A = "bundle.a.jar"; /* * Bundle-SymbolicName: bundle.b.jar * Provide-Capability: a * Require-Capability: b */ private static final String BUNDLE_B = "bundle.b.jar"; /* * Bundle-SymbolicName: bundle.c.jar * Provide-Capability: b */ private static final String BUNDLE_C = "bundle.c.jar"; /* * Bundle-SymbolicName: bundle.d.jar * Bundle-RequiredExecutionEnvironment: JavaSE-100.100 */ private static final String BUNDLE_D = "bundle.d.jar"; /* * Bundle-SymbolicName: bundle.e.jar * Bundle-RequiredExecutionEnvironment: J2SE-1.4, J2SE-1.5, J2SE-1.6,JavaSE-1.7 */ private static final String BUNDLE_E = "bundle.e.jar"; /* * Bundle-SymbolicName: bundle.f.jar * Bundle-NativeCode: \ * native.file; osname=Linux; processor=x86, \ * native.file; osname=Linux; processor=x86-64, \ * native.file; osname=Win32; processor=x86, \ * native.file; osname=Win32; processor=x86-64, \ * native.file; osname="mac os x"; processor=x86-64 */ private static final String BUNDLE_F = "bundle.f.jar"; /* * Bundle-SymbolicName: bundle.f.jar * Bundle-NativeCode: \ * native.file; osname=noMatch; processor=noMatch */ private static final String BUNDLE_G = "bundle.g.jar"; @Before public void createApplications() throws Exception { if (createdApplications) { return; }; createBundleA(); createBundleB(); createBundleC(); createBundleD(); createBundleE(); createBundleF(); createBundleG(); createApplicationA(); createApplicationB(); createApplicationC(); createApplicationD(); createApplicationE(); createdApplications = true; } private static void createApplicationA() throws IOException { createApplicationAManifest(); createSubsystem(APPLICATION_A, BUNDLE_A); } private static void createApplicationAManifest() throws IOException { Map<String, String> attributes = new HashMap<String, String>(); attributes.put(SubsystemConstants.SUBSYSTEM_SYMBOLICNAME, APPLICATION_A); createManifest(APPLICATION_A + ".mf", attributes); } private static void createApplicationB() throws IOException { createApplicationBManifest(); createSubsystem(APPLICATION_B, BUNDLE_D); } private static void createApplicationBManifest() throws IOException { Map<String, String> attributes = new HashMap<String, String>(); attributes.put(SubsystemConstants.SUBSYSTEM_SYMBOLICNAME, APPLICATION_B); createManifest(APPLICATION_B + ".mf", attributes); } private static void createApplicationC() throws IOException { createApplicationCManifest(); createSubsystem(APPLICATION_C, BUNDLE_E); } private static void createApplicationCManifest() throws IOException { Map<String, String> attributes = new HashMap<String, String>(); attributes.put(SubsystemConstants.SUBSYSTEM_SYMBOLICNAME, APPLICATION_C); createManifest(APPLICATION_C + ".mf", attributes); } private static void createApplicationD() throws IOException { createApplicationDManifest(); createSubsystem(APPLICATION_D, BUNDLE_F); } private static void createApplicationDManifest() throws IOException { Map<String, String> attributes = new HashMap<String, String>(); attributes.put(SubsystemConstants.SUBSYSTEM_SYMBOLICNAME, APPLICATION_D); createManifest(APPLICATION_D + ".mf", attributes); } private static void createApplicationE() throws IOException { createApplicationEManifest(); createSubsystem(APPLICATION_E, BUNDLE_G); } private static void createApplicationEManifest() throws IOException { Map<String, String> attributes = new HashMap<String, String>(); attributes.put(SubsystemConstants.SUBSYSTEM_SYMBOLICNAME, APPLICATION_E); createManifest(APPLICATION_E + ".mf", attributes); } private void createBundleA() throws IOException { createBundle(name(BUNDLE_A), new Header(Constants.REQUIRE_CAPABILITY, "a")); } private void createBundleB() throws IOException { createBundle(name(BUNDLE_B), provideCapability("a"), requireCapability("b")); } private void createBundleC() throws IOException { createBundle(name(BUNDLE_C), provideCapability("b")); } @SuppressWarnings("deprecation") private void createBundleD() throws IOException { createBundle(name(BUNDLE_D), new Header(Constants.BUNDLE_REQUIREDEXECUTIONENVIRONMENT, "JavaSE-100.100")); } @SuppressWarnings("deprecation") private void createBundleE() throws IOException { createBundle(name(BUNDLE_E), new Header(Constants.BUNDLE_REQUIREDEXECUTIONENVIRONMENT, "J2SE-1.4, J2SE-1.5, J2SE-1.6,JavaSE-1.7")); } private void createBundleF() throws IOException { createBundle(Collections.singletonList("native.file"), name(BUNDLE_F), new Header(Constants.BUNDLE_NATIVECODE, "native.file; osname=Linux; processor=x86," + "native.file; osname=Linux; processor=x86-64," + "native.file; osname=Win32; processor=x86," + "native.file; osname=Win32; processor=x86-64," + "native.file; osname=\"MacOSX\"; processor=x86-64")); } private void createBundleG() throws IOException { createBundle(Collections.singletonList("native.file"), name(BUNDLE_G), new Header(Constants.BUNDLE_NATIVECODE, "native.file; osname=noMatch; processor=noMatch")); } /* * Test that the right regions are used when validating capabilities. * * Application A contains a content bundle requiring capability A. Bundle B * provides capability A and is available as an installable resource from a * repository service. Bundle B also requires capability B. Bundle C is an * already installed resource in the root subsystem providing capability B. * When validating capability A, the subsystem should use the root region as * the from region, and its own region as the to region. When validating * capability B, the subsystem should use the root region as the from region * as well as for the to region. */ @Test public void testContentWithNonConstituentDependencyWithNonConstituentDependency() throws Exception { // Register a repository service containing bundle B requiring // capability B and providing capability A. registerRepositoryService(BUNDLE_B); Subsystem root = getRootSubsystem(); // Install unmanaged bundle C providing capability B as a constituent // of the root subsystem. Bundle bundleC = installBundleFromFile(BUNDLE_C, root); try { // Install application A with content bundle A requiring // capability A. Subsystem applicationA = installSubsystemFromFile(APPLICATION_A); // Make sure the Require-Capability exists for capability a... assertHeaderExists(applicationA, Constants.REQUIRE_CAPABILITY); // ...but not for capability b. RequireCapabilityHeader header = new RequireCapabilityHeader(applicationA.getSubsystemHeaders(null).get(Constants.REQUIRE_CAPABILITY)); assertEquals("Wrong number of clauses", 1, header.getClauses().size()); Clause clause = header.getClauses().iterator().next(); assertEquals("Wrong path", "a", clause.getPath()); assertEquals("Wrong resolution directive", Constants.RESOLUTION_MANDATORY, clause.getDirective(Constants.RESOLUTION_DIRECTIVE).getValue()); assertEquals("Wrong effective directive", Constants.EFFECTIVE_RESOLVE, clause.getDirective(Constants.EFFECTIVE_DIRECTIVE).getValue()); try { // Make sure the runtime resolution works as well. applicationA.start(); } catch (SubsystemException e) { fail("Application A should have started"); } finally { stopAndUninstallSubsystemSilently(applicationA); } } catch (SubsystemException e) { fail("Application A should have installed." + e.getMessage()); } finally { uninstallSilently(bundleC); } } /* * BREE headers must be converted into osgi.ee requirements. * * The subsystem should fail to resolve and install if the required * execution environment is not present. */ @Test public void testMissingBundleRequiredExecutionEnvironment() throws Exception { Subsystem applicationB = null; try { applicationB = installSubsystemFromFile(APPLICATION_B); fail("Missing BREE should result in installation failure"); } catch (Exception e) { assertTrue("Installation failure should be due to resolution error", e.getCause() instanceof ResolutionException); } finally { uninstallSubsystemSilently(applicationB); } } /* * BREE headers must be converted into osgi.ee requirements. * * The subsystem should resolve and install if at least one of the specified * execution environments is present. */ @Test public void testMultipleBundleRequiredExecutionEnvironments() throws Exception { Subsystem applicationC = null; try { applicationC = installSubsystemFromFile(APPLICATION_C); } catch (Exception e) { e.printStackTrace(); fail("Installation should succeed when at least one BREE is present"); } finally { uninstallSubsystemSilently(applicationC); } } @Test public void testNativeCodeRequirement() throws Exception { Subsystem applicationD = null; try { applicationD = installSubsystemFromFile(APPLICATION_D); applicationD.start(); } catch (Exception e) { e.printStackTrace(); fail("Installation should succeed for Bundle-NativeCode"); } finally { uninstallSubsystemSilently(applicationD); } } @Test public void testMissingNativeCodeRequirement() throws Exception { Subsystem applicationE = null; try { applicationE = installSubsystemFromFile(APPLICATION_E); // TODO this should fail to intsall } catch (SubsystemException e) { e.printStackTrace(); fail("Installation should succeed for Bundle-NativeCode"); } try { applicationE.start(); fail("Expected to fail to install"); } catch (Exception e) { // expected } finally { uninstallSubsystemSilently(applicationE); } } }
package org.ovirt.engine.api.restapi.resource; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Locale; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.UriInfo; import org.easymock.classextension.EasyMock; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.ovirt.engine.api.common.invocation.Current; import org.ovirt.engine.api.common.security.auth.Principal; import org.ovirt.engine.api.model.BootDevice; import org.ovirt.engine.api.model.Capabilities; import org.ovirt.engine.api.model.CPU; import org.ovirt.engine.api.model.CustomProperty; import org.ovirt.engine.api.model.DiskFormat; import org.ovirt.engine.api.model.DiskInterface; import org.ovirt.engine.api.model.DiskType; import org.ovirt.engine.api.model.DisplayType; import org.ovirt.engine.api.model.ErrorHandlingOptions; import org.ovirt.engine.api.model.FenceType; import org.ovirt.engine.api.model.NicInterface; import org.ovirt.engine.api.model.OsType; import org.ovirt.engine.api.model.PowerManagement; import org.ovirt.engine.api.model.Option; import org.ovirt.engine.api.model.SchedulingPolicyType; import org.ovirt.engine.api.model.StorageDomainType; import org.ovirt.engine.api.model.StorageType; import org.ovirt.engine.api.model.Version; import org.ovirt.engine.api.model.VmAffinities; import org.ovirt.engine.api.model.VmType; import org.ovirt.engine.api.model.VersionCaps; import org.ovirt.engine.api.restapi.logging.MessageBundle; import org.ovirt.engine.api.restapi.util.SessionHelper; import org.ovirt.engine.api.restapi.utils.VersionUtils; import org.ovirt.engine.core.common.businessentities.ActionGroup; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.interfaces.BackendLocal; import org.ovirt.engine.core.common.queries.ConfigurationValues; import org.ovirt.engine.core.common.queries.GetConfigurationValueParameters; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import static org.easymock.EasyMock.expect; import static org.powermock.api.easymock.PowerMock.createMock; import static org.powermock.api.easymock.PowerMock.mockStatic; import static org.powermock.api.easymock.PowerMock.replayAll; import static org.powermock.api.easymock.PowerMock.verifyAll; @RunWith(PowerMockRunner.class) @PrepareForTest( { Config.class }) public class BackendCapabilitiesResourceTest extends AbstractBackendResourceTest { BackendCapabilitiesResource resource; private static final Version VERSION_2_3 = new Version() {{ major = 2; minor = 3; }}; public BackendCapabilitiesResourceTest() { resource = new BackendCapabilitiesResource(); } protected BackendCapabilitiesResourceTest(BackendCapabilitiesResource resource) { this.resource = resource; } protected void setUriInfo(UriInfo uriInfo) { resource.setUriInfo(uriInfo); } @After public void tearDown() { verifyAll(); } @Ignore @Test public void testGet() throws Exception { mockStatic(Config.class); HashSet<org.ovirt.engine.core.compat.Version> supportedVersions = new HashSet<org.ovirt.engine.core.compat.Version>(); supportedVersions.add(new org.ovirt.engine.core.compat.Version(1, 5)); supportedVersions.add(new org.ovirt.engine.core.compat.Version(10, 3)); //expect(Config.GetValue(ConfigValues.SupportedClusterLevels)).andReturn(supportedVersions); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "ConfigValue"}, new Object[] { ConfigurationValues.SupportedClusterLevels}, supportedVersions); //expect(Config.GetValue(ConfigValues.ServerCPUList, "1.5")).andReturn("0:bar:0:foo"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.ServerCPUList }, "0:bar:0:foo"); //expect(Config.GetValue(ConfigValues.ServerCPUList, "10.3")).andReturn("15:foo:1,2,3:bar"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.ServerCPUList }, "15:foo:1,2,3:bar"); //expect(Config.GetValue(ConfigValues.VdsFenceOptionMapping, "1.5")).andReturn("foo:one=1,two=2"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.VdsFenceOptionMapping }, "foo:one=1,two=2"); //expect(Config.GetValue(ConfigValues.VdsFenceOptionTypes, "1.5")).andReturn("one=int,two=bool"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.VdsFenceOptionTypes }, "one=int,two=bool"); //expect(Config.GetValue(ConfigValues.VdsFenceOptionMapping, "10.3")).andReturn("foo:one=1,two=2"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.VdsFenceOptionMapping }, "foo:one=1,two=2"); //expect(Config.GetValue(ConfigValues.VdsFenceOptionTypes, "10.3")).andReturn("one=int,two=bool"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.VdsFenceOptionTypes }, "one=int,two=bool"); //expect(Config.GetValue(ConfigValues.LocalStorageEnabled, "1.5")).andReturn(Boolean.FALSE); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.LocalStorageEnabled }, Boolean.FALSE); //expect(Config.GetValue(ConfigValues.LocalStorageEnabled, "10.3")).andReturn(Boolean.TRUE); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.LocalStorageEnabled }, Boolean.TRUE); //expect(Config.GetValue(ConfigValues.PredefinedVMProperties, "1.5")).andReturn(""); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.PredefinedVMProperties }, ""); //expect(Config.GetValue(ConfigValues.UserDefinedVMProperties, "1.5")).andReturn(""); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "1.5", ConfigurationValues.UserDefinedVMProperties }, ""); //expect(Config.GetValue(ConfigValues.PredefinedVMProperties, "10.3")).andReturn("foo=true|false"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.PredefinedVMProperties }, "foo=true|false"); //expect(Config.GetValue(ConfigValues.UserDefinedVMProperties, "10.3")).andReturn("bar=[a-z]"); setUpGetEntityExpectations(VdcQueryType.GetConfigurationValue, GetConfigurationValueParameters.class, new String[] { "Version", "ConfigValue" }, new Object[] { "10.3", ConfigurationValues.UserDefinedVMProperties }, "bar=[a-z]"); replayAll(); verifyCapabilities(resource.get()); } private void verifyCapabilities(Capabilities capabilities) { assertNotNull(capabilities); assertEquals(2, capabilities.getVersions().size()); verifyVersion(capabilities.getVersions().get(0), 1, 5, false, "bar", 0, false, false, false); verifyVersion(capabilities.getVersions().get(1), 10, 3, true, "foo", 15, true, true, true); verifyPermits(capabilities); verifySchedulingPolicies(capabilities); } private void verifyVersion(VersionCaps version, int major, int minor, boolean current, String cpuName, int cpuLevel, boolean localStorage, boolean hooks, boolean thp) { assertEquals(major, version.getMajor()); assertEquals(minor, version.getMinor()); assertEquals(current, version.isCurrent()); assertNotNull(version.getCPUs()); assertTrue(version.getCPUs().getCPUs().size() == 1); verifyCPU(version.getCPUs().getCPUs().get(0), cpuName, cpuLevel); assertNotNull(version.getPowerManagers()); assertEquals(1, version.getPowerManagers().getPowerManagers().size()); verifyPowerManagement(version.getPowerManagers().getPowerManagers().get(0)); verifyVmTypes(version.getVmTypes().getVmTypes()); verifyStorageTypes(version.getStorageTypes().getStorageTypes(), localStorage); verifyStorageDomainTypes(version.getStorageDomainTypes().getStorageDomainTypes()); verifyFenceTypes(version.getFenceTypes().getFenceTypes()); verifyBootDevices(version.getBootDevices().getBootDevices()); verifyDisplayTypes(version.getDisplayTypes().getDisplayTypes()); verifyNicTypes(version.getNicInterfaces().getNicInterfaces()); verifyDiskTypes(version.getDiskTypes().getDiskTypes()); verifyDiskFormats(version.getDiskFormats().getDiskFormats()); verifyDiskInterfaces(version.getDiskInterfaces().getDiskInterfaces()); verifyVmAffinities(version,version.getVmAffinities()); verifyMigrateOnErrorOptions(version, version.getErrorHandling()); verifyOsTypes(version.getOsTypes().getOsTypes()); if (hooks) { verifyHooksEnvs(version.getCustomProperties().getCustomProperty()); } if (thp) { assertNotNull(version.getFeatures()); assertNotNull(version.getFeatures().getTransparentHugepages()); } } private void verifyOsTypes(List<String> osTypes) { assertEquals(OsType.values().length, osTypes.size()); for (OsType osType : OsType.values()) { assertTrue(osTypes.contains(osType.value())); } } private void verifyVmAffinities(final VersionCaps version, VmAffinities vmAffinities) { if(VersionUtils.greaterOrEqual( new Version(){{major=version.getMajor();minor=version.getMinor();}}, VERSION_2_3)){ assertNotNull(vmAffinities); } else { assertEquals(null, vmAffinities); } } private void verifyMigrateOnErrorOptions(final VersionCaps version, ErrorHandlingOptions errorHandling) { if(greaterOrEqual( new Version(){{major=version.getMajor();minor=version.getMinor();}}, VERSION_2_3)){ assertNotNull(errorHandling); } else { assertNull(errorHandling); } } private void verifyCPU(CPU cpu, String name, Integer level) { assertNotNull(cpu); assertEquals(name, cpu.getId()); assertEquals(level, cpu.getLevel()); } private void verifyPowerManagement(PowerManagement pm) { assertNotNull(pm); assertEquals("foo", pm.getType()); assertEquals(2, pm.getOptions().getOptions().size()); verifyOption(pm.getOptions().getOptions().get(0), "one", "int"); verifyOption(pm.getOptions().getOptions().get(1), "two", "bool"); } private void verifyOption(Option option, String name, String type) { assertEquals(name, option.getName()); assertEquals(type, option.getType()); assertNull(option.getValue()); } private void verifyVmTypes(List<String> vmTypes) { assertEquals(VmType.values().length, vmTypes.size()); for (VmType vmType : VmType.values()) { assertTrue(vmTypes.contains(vmType.value())); } } private void verifyStorageTypes(List<String> storageTypes, boolean localStorage) { assertTrue(storageTypes.contains(StorageType.ISCSI.value())); assertTrue(storageTypes.contains(StorageType.FCP.value())); assertTrue(storageTypes.contains(StorageType.NFS.value())); if (localStorage) { assertTrue(storageTypes.contains(StorageType.LOCALFS.value())); } } private void verifyStorageDomainTypes(List<String> storageDomainTypes) { assertEquals(StorageDomainType.values().length, storageDomainTypes.size()); for (StorageDomainType storageDomainType : StorageDomainType.values()) { assertTrue(storageDomainTypes.contains(storageDomainType.value())); } } private void verifyFenceTypes(List<String> fenceTypes) { assertEquals(FenceType.values().length, fenceTypes.size()); for (FenceType fenceType : FenceType.values()) { assertTrue(fenceTypes.contains(fenceType.value())); } } private void verifyBootDevices(List<String> bootDevices) { assertEquals(BootDevice.values().length, bootDevices.size()); for (BootDevice bootDevice : BootDevice.values()) { assertTrue(bootDevices.contains(bootDevice.value())); } } private void verifyDisplayTypes(List<String> displayTypes) { assertEquals(DisplayType.values().length, displayTypes.size()); for (DisplayType displayType : DisplayType.values()) { assertTrue(displayTypes.contains(displayType.value())); } } private void verifyNicTypes(List<String> nicTypes) { assertEquals(NicInterface.values().length, nicTypes.size()); for (NicInterface nicType : NicInterface.values()) { assertTrue(nicTypes.contains(nicType.value())); } } private void verifyDiskTypes(List<String> diskTypes) { assertEquals(DiskType.values().length, diskTypes.size()); for (DiskType diskType : DiskType.values()) { assertTrue(diskTypes.contains(diskType.value())); } } private void verifyDiskFormats(List<String> diskFormats) { assertEquals(DiskFormat.values().length, diskFormats.size()); for (DiskFormat diskFormat : DiskFormat.values()) { assertTrue(diskFormats.contains(diskFormat.value())); } } private void verifyDiskInterfaces(List<String> diskInterfaces) { assertEquals(DiskInterface.values().length, diskInterfaces.size()); for (DiskInterface diskInterface : DiskInterface.values()) { assertTrue(diskInterfaces.contains(diskInterface.value())); } } private void verifyHooksEnvs(List<CustomProperty> envs) { assertEquals(2, envs.size()); verifyHooksEnv(envs.get(0), "foo", "true|false"); verifyHooksEnv(envs.get(1), "bar", "[a-z]"); } private void verifyHooksEnv(CustomProperty env, String name, String regexp) { assertNotNull(env); assertEquals(name, env.getName()); assertEquals(regexp, env.getRegexp()); } private void verifyPermits(Capabilities capabilities) { assertTrue(capabilities.isSetPermits()); assertTrue(capabilities.getPermits().isSetPermits()); assertFalse(capabilities.getPermits().getPermits().isEmpty()); assertEquals(ActionGroup.values().length, capabilities.getPermits().getPermits().size()); } private void verifySchedulingPolicies(Capabilities capabilities) { assertTrue(capabilities.isSetSchedulingPolicies()); assertTrue(capabilities.getSchedulingPolicies().isSetPolicy()); assertFalse(capabilities.getSchedulingPolicies().getPolicy().isEmpty()); assertEquals(SchedulingPolicyType.values().length, capabilities.getSchedulingPolicies().getPolicy().size()); for (SchedulingPolicyType policy : SchedulingPolicyType.values()) { assertTrue(capabilities.getSchedulingPolicies().getPolicy().contains(policy.value())); } } private boolean greaterOrEqual(Version a, Version b) { return a.getMajor() != b.getMajor() ? a.getMajor() >= b.getMajor() : a.getMinor() >= b.getMinor(); } @Override protected Object getEntity(int index) { // TODO Auto-generated method stub return null; } protected String getSessionId() { return resource.getSessionHelper().getSessionId(principal); } @Override protected void init() { resource.setBackend(backend); resource.setMappingLocator(mapperLocator); resource.setSessionHelper(sessionHelper); resource.setMessageBundle(messageBundle); resource.setHttpHeaders(httpHeaders); } @Before public void setUp() { control = EasyMock.createNiceControl(); current = createMock(Current.class); principal = new Principal(USER, SECRET, DOMAIN); expect(current.get(Principal.class)).andReturn(principal).anyTimes(); sessionHelper = new SessionHelper(); sessionHelper.setCurrent(current); resource.setSessionHelper(sessionHelper); backend = createMock(BackendLocal.class); resource.setBackend(backend); MessageBundle messageBundle = new MessageBundle(); messageBundle.setPath(BUNDLE_PATH); messageBundle.populate(); resource.setMessageBundle(messageBundle); httpHeaders = createMock(HttpHeaders.class); List<Locale> locales = new ArrayList<Locale>(); expect(httpHeaders.getAcceptableLanguages()).andReturn(locales).anyTimes(); resource.setHttpHeaders(httpHeaders); init(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.github.somi92.seecsk.domain; import com.github.somi92.sqldbb.annotations.Collection; import com.github.somi92.sqldbb.annotations.Column; import com.github.somi92.sqldbb.annotations.ForeignKey; import com.github.somi92.sqldbb.annotations.PrimaryKey; import com.github.somi92.sqldbb.annotations.Table; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * * @author milos */ @Table("Clan") public class Clan { @PrimaryKey("idClan") private long idClan; @Column("brojLK") private String brojLK; @Column("imePrezime") private String imePrezime; @Column("pol") private char pol; @Column("email") private String email; @Column("adresa") private String adresa; @Column("brojTel") private String brojTel; @Column("datumRodjenja") private Date datumRodjenja; @Column("datumUclanjenja") private Date datumUclanjenja; @Column("napomena") private String napomena; @ForeignKey(column = "idGrupa", referencingTable = "Grupa", referencingColumn = "idGrupa", isCollectionItem = false) private Grupa grupa; @Collection(childEntityClass = Uplata.class, referencingField = "clan") private List<Uplata> uplate; @Collection(childEntityClass = Prisustvo.class, referencingField = "clan") private List<Prisustvo> prisustva; public Clan() { uplate = new ArrayList<>(); prisustva = new ArrayList<>(); brojLK = ""; imePrezime = ""; email = ""; adresa = ""; brojTel = ""; datumRodjenja = new Date(); datumUclanjenja = new Date(); napomena = ""; } public Clan(String brojLK, String imePrezime, char pol, String email, String adresa, String brojTel, Date datumRodjenja, Date datumUclanjenja, String napomena) { this(); this.brojLK = brojLK; this.imePrezime = imePrezime; this.pol = pol; this.email = email; this.adresa = adresa; this.brojTel = brojTel; this.datumRodjenja = datumRodjenja; this.datumUclanjenja = datumUclanjenja; this.napomena = napomena; } public long getIdClan() { return idClan; } public void setIdClan(long idClan) { this.idClan = idClan; } public String getBrojLK() { return brojLK; } public void setBrojLK(String brojLK) { this.brojLK = brojLK; } public String getImePrezime() { return imePrezime; } public void setImePrezime(String imePrezime) { this.imePrezime = imePrezime; } public char getPol() { return pol; } public void setPol(char pol) { this.pol = pol; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getAdresa() { return adresa; } public void setAdresa(String adresa) { this.adresa = adresa; } public String getBrojTel() { return brojTel; } public void setBrojTel(String brojTel) { this.brojTel = brojTel; } public Date getDatumRodjenja() { return datumRodjenja; } public void setDatumRodjenja(Date datumRodjenja) { this.datumRodjenja = datumRodjenja; } public Date getDatumUclanjenja() { return datumUclanjenja; } public void setDatumUclanjenja(Date datumUclanjenja) { this.datumUclanjenja = datumUclanjenja; } public String getNapomena() { return napomena; } public void setNapomena(String napomena) { this.napomena = napomena; } public Grupa getGrupa() { return grupa; } public void setGrupa(Grupa grupa) { this.grupa = grupa; // this.grupa.dodajClana(this); } public List<Uplata> getUplate() { return uplate; } public void setUplate(List<Uplata> uplate) { this.uplate = uplate; } public List<Prisustvo> getPrisustva() { return prisustva; } public void setPrisustva(List<Prisustvo> prisustva) { this.prisustva = prisustva; } @Override public int hashCode() { int hash = 3; hash = 73 * hash + (int) (this.idClan ^ (this.idClan >>> 32)); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Clan other = (Clan) obj; if (this.idClan != other.idClan) { return false; } return true; } @Override public String toString() { return imePrezime; } }
/* * Copyright 2014 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Rewrites static inheritance to explicitly copy inherited properties from superclass to * subclass so that the typechecker knows the subclass has those properties. * * <p>For example, the main transpilation passes will convert this ES6 code: * * <pre> * class Foo { static f() {} } * class Bar extends Foo {} * </pre> * * to this ES3 code: * * <pre> * function Foo() {} * Foo.f = function() {}; * function Bar() {} * $jscomp.inherits(Bar, Foo); * </pre> * * and then this class will convert that to * * <pre> * function Foo() {} * Foo.f = function() {}; * function Bar() {} * $jscomp.inherits(Bar, Foo); * Bar.f = Foo.f; * </pre> * * Additionally, there are getter and setter fields which are transpiled from: * * <pre> * class Foo { static get prop() { return 1; } } * class Bar extends Foo {} * </pre> * * to: * * <pre> * var Foo = function() {}; * Foo.prop; // stub declaration so that the type checker knows about prop * Object.defineProperties(Foo, {prop:{get:function() { return 1; }}}); * * var Bar = function() {}; * $jscomp.inherits(Bar, Foo); * </pre> * * The stub declaration of Foo.prop needs to be duplicated for Bar so that the type checker knows * that Bar also has this property. (ES5 clases don't have class-side inheritance). * * <pre> * var Bar = function() {}; * Bar.prop; * $jscomp.inherits(Bar, Foo); * </pre> * * <p>In order to gather the type checker declarations, this pass gathers all GETPROPs on * a class. In order to determine which of these are the stub declarations it filters them based * on names discovered in Object.defineProperties. Unfortunately, we cannot simply gather the * defined properties because they don't have the type information (JSDoc). The type information * is stored on the stub declarations so we must gather both to transpile correctly. * <p> * TODO(tdeegan): In the future the type information for getter/setter properties could be stored * in the defineProperties functions. It would reduce the complexity of this pass significantly. * * @author mattloring@google.com (Matthew Loring) * @author tdeegan@google.com (Thomas Deegan) */ public final class Es6ToEs3ClassSideInheritance implements HotSwapCompilerPass { static final DiagnosticType DUPLICATE_CLASS = DiagnosticType.error( "DUPLICATE_CLASS", "Multiple classes cannot share the same name."); private final Set<String> duplicateClassNames = new HashSet<>(); private static class JavascriptClass { // All static members to the class including get set properties. private Set<Node> staticMembers = new LinkedHashSet<>(); // Collect all the static field accesses to the class. private Set<Node> staticFieldAccess = new LinkedHashSet<>(); // Collect all get set properties as defined by Object.defineProperties(...) private Set<String> definedProperties = new LinkedHashSet<>(); } private final AbstractCompiler compiler; private final LinkedHashMap<String, JavascriptClass> classByAlias = new LinkedHashMap<>(); public Es6ToEs3ClassSideInheritance(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { FindStaticMembers findStaticMembers = new FindStaticMembers(); TranspilationPasses.processTranspile(compiler, externs, findStaticMembers); TranspilationPasses.processTranspile(compiler, root, findStaticMembers); processInherits(findStaticMembers.inheritsCalls); } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { FindStaticMembers findStaticMembers = new FindStaticMembers(); TranspilationPasses.processTranspile(compiler, scriptRoot, findStaticMembers); processInherits(findStaticMembers.inheritsCalls); } private void processInherits(List<Node> inheritsCalls) { for (Node inheritsCall : inheritsCalls) { Node superclassNameNode = inheritsCall.getLastChild(); String superclassQname = superclassNameNode.getQualifiedName(); Node subclassNameNode = superclassNameNode.getPrevious(); String subclassQname = subclassNameNode.getQualifiedName(); JavascriptClass superClass = classByAlias.get(superclassQname); JavascriptClass subClass = classByAlias.get(subclassQname); if (duplicateClassNames.contains(superclassQname)) { compiler.report(JSError.make(inheritsCall, DUPLICATE_CLASS)); return; } if (superClass == null || subClass == null) { continue; } copyStaticMembers(superClass, subClass, inheritsCall); copyDeclarations(superClass, subClass, inheritsCall); } } /** * When static get/set properties are transpiled, in addition to the Object.defineProperties, they * are declared with stub GETPROP declarations so that the type checker understands that these * properties exist on the class. * When subclassing, we also need to declare these properties on the subclass so that the type * checker knows they exist. */ private void copyDeclarations( JavascriptClass superClass, JavascriptClass subClass, Node inheritsCall) { for (Node staticGetProp : superClass.staticFieldAccess) { checkState(staticGetProp.isGetProp()); String memberName = staticGetProp.getLastChild().getString(); // We only copy declarations that have corresponding Object.defineProperties if (!superClass.definedProperties.contains(memberName)) { continue; } // If the subclass already declares the property no need to redeclare it. if (isOverriden(subClass, memberName)) { continue; } Node subclassNameNode = inheritsCall.getSecondChild(); Node getprop = IR.getprop(subclassNameNode.cloneTree(), IR.string(memberName)); JSDocInfoBuilder info = JSDocInfoBuilder.maybeCopyFrom(staticGetProp.getJSDocInfo()); JSTypeExpression unknown = new JSTypeExpression(new Node(Token.QMARK), "<synthetic>"); info.recordType(unknown); // In case there wasn't a type specified on the base class. info.addSuppression("visibility"); getprop.setJSDocInfo(info.build()); Node declaration = IR.exprResult(getprop); declaration.useSourceInfoIfMissingFromForTree(inheritsCall); Node parent = inheritsCall.getParent(); parent.getParent().addChildBefore(declaration, parent); compiler.reportChangeToEnclosingScope(parent); // Copy over field access so that subclasses of this subclass can also make the declarations if (!subClass.definedProperties.contains(memberName)) { subClass.staticFieldAccess.add(getprop); subClass.definedProperties.add(memberName); } } } private void copyStaticMembers( JavascriptClass superClass, JavascriptClass subClass, Node inheritsCall) { for (Node staticMember : superClass.staticMembers) { checkState(staticMember.isAssign(), staticMember); String memberName = staticMember.getFirstChild().getLastChild().getString(); if (superClass.definedProperties.contains(memberName)) { continue; } if (isOverriden(subClass, memberName)) { continue; } JSDocInfoBuilder info = JSDocInfoBuilder.maybeCopyFrom(staticMember.getJSDocInfo()); Node function = staticMember.getLastChild(); Node sourceInfoNode = function; if (function.isFunction()) { sourceInfoNode = function.getFirstChild(); Node params = NodeUtil.getFunctionParameters(function); checkState(params.isParamList(), params); for (Node param : params.children()) { if (param.getJSDocInfo() != null) { String name = param.getString(); info.recordParameter(name, param.getJSDocInfo().getType()); } } } Node subclassNameNode = inheritsCall.getSecondChild(); Node superclassNameNode = subclassNameNode.getNext(); Node assign = IR.assign( IR.getprop(subclassNameNode.cloneTree(), IR.string(memberName)), IR.getprop(superclassNameNode.cloneTree(), IR.string(memberName))); info.addSuppression("visibility"); assign.setJSDocInfo(info.build()); Node exprResult = IR.exprResult(assign); exprResult.useSourceInfoIfMissingFromForTree(sourceInfoNode); Node inheritsExpressionResult = inheritsCall.getParent(); inheritsExpressionResult.getParent().addChildAfter(exprResult, inheritsExpressionResult); compiler.reportChangeToEnclosingScope(inheritsExpressionResult); // Add the static member to the subclass so that subclasses also copy this member. subClass.staticMembers.add(assign); } } private boolean isOverriden(JavascriptClass subClass, String memberName) { for (Node subclassMember : subClass.staticMembers) { checkState(subclassMember.isAssign(), subclassMember); if (subclassMember.getFirstChild().getLastChild().getString().equals(memberName)) { // This subclass overrides the static method, so there is no need to copy the // method from the base class. return true; } } if (subClass.definedProperties.contains(memberName)) { return true; } return false; } private boolean isReferenceToClass(NodeTraversal t, Node n) { String className = n.getQualifiedName(); if (!classByAlias.containsKey(className)) { return false; } if (!n.isName()) { return true; } Var var = t.getScope().getVar(className); return var == null || !var.isLocal(); } private class FindStaticMembers extends AbstractPostOrderCallback { private final List<Node> inheritsCalls = new LinkedList<>(); @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getToken()) { case CALL: if (n.getFirstChild().matchesQualifiedName(Es6RewriteClass.INHERITS)) { inheritsCalls.add(n); } if (NodeUtil.isObjectDefinePropertiesDefinition(n)) { visitDefinedPropertiesCall(t, n); } break; case VAR: visitVar(n); break; case ASSIGN: visitAssign(t, n); break; case GETPROP: if (parent.isExprResult()) { visitGetProp(t, n); } break; case FUNCTION: visitFunctionClassDef(n); break; default: break; } } private void visitDefinedPropertiesCall(NodeTraversal t, Node definePropertiesCall) { Node object = definePropertiesCall.getSecondChild(); if (isReferenceToClass(t, object)) { String className = object.getQualifiedName(); JavascriptClass c = classByAlias.get(className); for (Node prop : NodeUtil.getObjectDefinedPropertiesKeys(definePropertiesCall)) { c.definedProperties.add(prop.getString()); } } } private void visitFunctionClassDef(Node n) { JSDocInfo classInfo = NodeUtil.getBestJSDocInfo(n); if (classInfo != null && classInfo.isConstructor()) { String name = NodeUtil.getName(n); if (classByAlias.containsKey(name)) { duplicateClassNames.add(name); } else { classByAlias.put(name, new JavascriptClass()); } } } private void setAlias(String original, String alias) { checkArgument(classByAlias.containsKey(original)); classByAlias.put(alias, classByAlias.get(original)); } private void visitGetProp(NodeTraversal t, Node n) { Node classNode = n.getFirstChild(); if (isReferenceToClass(t, classNode)) { classByAlias.get(classNode.getQualifiedName()).staticFieldAccess.add(n); } } private void visitAssign(NodeTraversal t, Node n) { // Alias for classes. We assume that the alias appears after the class declaration. String existingClassQname = n.getLastChild().getQualifiedName(); if (existingClassQname != null && classByAlias.containsKey(existingClassQname)) { String alias = n.getFirstChild().getQualifiedName(); if (alias != null) { setAlias(existingClassQname, alias); } } else if (n.getFirstChild().isGetProp()) { Node getProp = n.getFirstChild(); Node classNode = getProp.getFirstChild(); if (isReferenceToClass(t, classNode)) { classByAlias.get(classNode.getQualifiedName()).staticMembers.add(n); } } } private void visitVar(Node n) { Node child = n.getFirstChild(); if (!child.hasChildren()) { return; } String maybeOriginalName = child.getFirstChild().getQualifiedName(); if (classByAlias.containsKey(maybeOriginalName)) { String maybeAlias = child.getQualifiedName(); if (maybeAlias != null) { setAlias(maybeOriginalName, maybeAlias); } } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.logic.v2016_06_01.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.logic.v2016_06_01.ErrorResponseException; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Sessions. */ public class SessionsInner { /** The Retrofit service to perform REST calls. */ private SessionsService service; /** The service client containing this operation class. */ private LogicManagementClientImpl client; /** * Initializes an instance of SessionsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public SessionsInner(Retrofit retrofit, LogicManagementClientImpl client) { this.service = retrofit.create(SessionsService.class); this.client = client; } /** * The interface defining all the services for Sessions to be * used by Retrofit to perform actually REST calls. */ interface SessionsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.Sessions listByIntegrationAccounts" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/sessions") Observable<Response<ResponseBody>> listByIntegrationAccounts(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("integrationAccountName") String integrationAccountName, @Query("api-version") String apiVersion, @Query("$top") Integer top, @Query("$filter") String filter, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.Sessions get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/sessions/{sessionName}") Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("integrationAccountName") String integrationAccountName, @Path("sessionName") String sessionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.Sessions createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/sessions/{sessionName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("integrationAccountName") String integrationAccountName, @Path("sessionName") String sessionName, @Query("api-version") String apiVersion, @Body IntegrationAccountSessionInner session, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.Sessions delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/sessions/{sessionName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("integrationAccountName") String integrationAccountName, @Path("sessionName") String sessionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.Sessions listByIntegrationAccountsNext" }) @GET Observable<Response<ResponseBody>> listByIntegrationAccountsNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object if successful. */ public PagedList<IntegrationAccountSessionInner> listByIntegrationAccounts(final String resourceGroupName, final String integrationAccountName) { ServiceResponse<Page<IntegrationAccountSessionInner>> response = listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName).toBlocking().single(); return new PagedList<IntegrationAccountSessionInner>(response.body()) { @Override public Page<IntegrationAccountSessionInner> nextPage(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<IntegrationAccountSessionInner>> listByIntegrationAccountsAsync(final String resourceGroupName, final String integrationAccountName, final ListOperationCallback<IntegrationAccountSessionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName), new Func1<String, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<Page<IntegrationAccountSessionInner>> listByIntegrationAccountsAsync(final String resourceGroupName, final String integrationAccountName) { return listByIntegrationAccountsWithServiceResponseAsync(resourceGroupName, integrationAccountName) .map(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Page<IntegrationAccountSessionInner>>() { @Override public Page<IntegrationAccountSessionInner> call(ServiceResponse<Page<IntegrationAccountSessionInner>> response) { return response.body(); } }); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsWithServiceResponseAsync(final String resourceGroupName, final String integrationAccountName) { return listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName) .concatMap(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(ServiceResponse<Page<IntegrationAccountSessionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByIntegrationAccountsNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsSinglePageAsync(final String resourceGroupName, final String integrationAccountName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (integrationAccountName == null) { throw new IllegalArgumentException("Parameter integrationAccountName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final Integer top = null; final String filter = null; return service.listByIntegrationAccounts(this.client.subscriptionId(), resourceGroupName, integrationAccountName, this.client.apiVersion(), top, filter, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<IntegrationAccountSessionInner>> result = listByIntegrationAccountsDelegate(response); return Observable.just(new ServiceResponse<Page<IntegrationAccountSessionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param top The number of items to be included in the result. * @param filter The filter to apply on the operation. Options for filters include: ChangedTime. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object if successful. */ public PagedList<IntegrationAccountSessionInner> listByIntegrationAccounts(final String resourceGroupName, final String integrationAccountName, final Integer top, final String filter) { ServiceResponse<Page<IntegrationAccountSessionInner>> response = listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName, top, filter).toBlocking().single(); return new PagedList<IntegrationAccountSessionInner>(response.body()) { @Override public Page<IntegrationAccountSessionInner> nextPage(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param top The number of items to be included in the result. * @param filter The filter to apply on the operation. Options for filters include: ChangedTime. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<IntegrationAccountSessionInner>> listByIntegrationAccountsAsync(final String resourceGroupName, final String integrationAccountName, final Integer top, final String filter, final ListOperationCallback<IntegrationAccountSessionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName, top, filter), new Func1<String, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param top The number of items to be included in the result. * @param filter The filter to apply on the operation. Options for filters include: ChangedTime. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<Page<IntegrationAccountSessionInner>> listByIntegrationAccountsAsync(final String resourceGroupName, final String integrationAccountName, final Integer top, final String filter) { return listByIntegrationAccountsWithServiceResponseAsync(resourceGroupName, integrationAccountName, top, filter) .map(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Page<IntegrationAccountSessionInner>>() { @Override public Page<IntegrationAccountSessionInner> call(ServiceResponse<Page<IntegrationAccountSessionInner>> response) { return response.body(); } }); } /** * Gets a list of integration account sessions. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param top The number of items to be included in the result. * @param filter The filter to apply on the operation. Options for filters include: ChangedTime. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsWithServiceResponseAsync(final String resourceGroupName, final String integrationAccountName, final Integer top, final String filter) { return listByIntegrationAccountsSinglePageAsync(resourceGroupName, integrationAccountName, top, filter) .concatMap(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(ServiceResponse<Page<IntegrationAccountSessionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByIntegrationAccountsNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets a list of integration account sessions. * ServiceResponse<PageImpl<IntegrationAccountSessionInner>> * @param resourceGroupName The resource group name. ServiceResponse<PageImpl<IntegrationAccountSessionInner>> * @param integrationAccountName The integration account name. ServiceResponse<PageImpl<IntegrationAccountSessionInner>> * @param top The number of items to be included in the result. ServiceResponse<PageImpl<IntegrationAccountSessionInner>> * @param filter The filter to apply on the operation. Options for filters include: ChangedTime. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsSinglePageAsync(final String resourceGroupName, final String integrationAccountName, final Integer top, final String filter) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (integrationAccountName == null) { throw new IllegalArgumentException("Parameter integrationAccountName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.listByIntegrationAccounts(this.client.subscriptionId(), resourceGroupName, integrationAccountName, this.client.apiVersion(), top, filter, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<IntegrationAccountSessionInner>> result = listByIntegrationAccountsDelegate(response); return Observable.just(new ServiceResponse<Page<IntegrationAccountSessionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<IntegrationAccountSessionInner>> listByIntegrationAccountsDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<IntegrationAccountSessionInner>, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<IntegrationAccountSessionInner>>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Gets an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the IntegrationAccountSessionInner object if successful. */ public IntegrationAccountSessionInner get(String resourceGroupName, String integrationAccountName, String sessionName) { return getWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName).toBlocking().single().body(); } /** * Gets an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<IntegrationAccountSessionInner> getAsync(String resourceGroupName, String integrationAccountName, String sessionName, final ServiceCallback<IntegrationAccountSessionInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName), serviceCallback); } /** * Gets an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the IntegrationAccountSessionInner object */ public Observable<IntegrationAccountSessionInner> getAsync(String resourceGroupName, String integrationAccountName, String sessionName) { return getWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName).map(new Func1<ServiceResponse<IntegrationAccountSessionInner>, IntegrationAccountSessionInner>() { @Override public IntegrationAccountSessionInner call(ServiceResponse<IntegrationAccountSessionInner> response) { return response.body(); } }); } /** * Gets an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the IntegrationAccountSessionInner object */ public Observable<ServiceResponse<IntegrationAccountSessionInner>> getWithServiceResponseAsync(String resourceGroupName, String integrationAccountName, String sessionName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (integrationAccountName == null) { throw new IllegalArgumentException("Parameter integrationAccountName is required and cannot be null."); } if (sessionName == null) { throw new IllegalArgumentException("Parameter sessionName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(this.client.subscriptionId(), resourceGroupName, integrationAccountName, sessionName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<IntegrationAccountSessionInner>>>() { @Override public Observable<ServiceResponse<IntegrationAccountSessionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<IntegrationAccountSessionInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<IntegrationAccountSessionInner> getDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<IntegrationAccountSessionInner, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<IntegrationAccountSessionInner>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Creates or updates an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param session The integration account session. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the IntegrationAccountSessionInner object if successful. */ public IntegrationAccountSessionInner createOrUpdate(String resourceGroupName, String integrationAccountName, String sessionName, IntegrationAccountSessionInner session) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName, session).toBlocking().single().body(); } /** * Creates or updates an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param session The integration account session. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<IntegrationAccountSessionInner> createOrUpdateAsync(String resourceGroupName, String integrationAccountName, String sessionName, IntegrationAccountSessionInner session, final ServiceCallback<IntegrationAccountSessionInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName, session), serviceCallback); } /** * Creates or updates an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param session The integration account session. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the IntegrationAccountSessionInner object */ public Observable<IntegrationAccountSessionInner> createOrUpdateAsync(String resourceGroupName, String integrationAccountName, String sessionName, IntegrationAccountSessionInner session) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName, session).map(new Func1<ServiceResponse<IntegrationAccountSessionInner>, IntegrationAccountSessionInner>() { @Override public IntegrationAccountSessionInner call(ServiceResponse<IntegrationAccountSessionInner> response) { return response.body(); } }); } /** * Creates or updates an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param session The integration account session. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the IntegrationAccountSessionInner object */ public Observable<ServiceResponse<IntegrationAccountSessionInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String integrationAccountName, String sessionName, IntegrationAccountSessionInner session) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (integrationAccountName == null) { throw new IllegalArgumentException("Parameter integrationAccountName is required and cannot be null."); } if (sessionName == null) { throw new IllegalArgumentException("Parameter sessionName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } if (session == null) { throw new IllegalArgumentException("Parameter session is required and cannot be null."); } Validator.validate(session); return service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, integrationAccountName, sessionName, this.client.apiVersion(), session, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<IntegrationAccountSessionInner>>>() { @Override public Observable<ServiceResponse<IntegrationAccountSessionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<IntegrationAccountSessionInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<IntegrationAccountSessionInner> createOrUpdateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<IntegrationAccountSessionInner, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<IntegrationAccountSessionInner>() { }.getType()) .register(201, new TypeToken<IntegrationAccountSessionInner>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Deletes an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String integrationAccountName, String sessionName) { deleteWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName).toBlocking().single().body(); } /** * Deletes an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String integrationAccountName, String sessionName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName), serviceCallback); } /** * Deletes an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> deleteAsync(String resourceGroupName, String integrationAccountName, String sessionName) { return deleteWithServiceResponseAsync(resourceGroupName, integrationAccountName, sessionName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes an integration account session. * * @param resourceGroupName The resource group name. * @param integrationAccountName The integration account name. * @param sessionName The integration account session name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String integrationAccountName, String sessionName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (integrationAccountName == null) { throw new IllegalArgumentException("Parameter integrationAccountName is required and cannot be null."); } if (sessionName == null) { throw new IllegalArgumentException("Parameter sessionName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.delete(this.client.subscriptionId(), resourceGroupName, integrationAccountName, sessionName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = deleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> deleteDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } /** * Gets a list of integration account sessions. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorResponseException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object if successful. */ public PagedList<IntegrationAccountSessionInner> listByIntegrationAccountsNext(final String nextPageLink) { ServiceResponse<Page<IntegrationAccountSessionInner>> response = listByIntegrationAccountsNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<IntegrationAccountSessionInner>(response.body()) { @Override public Page<IntegrationAccountSessionInner> nextPage(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets a list of integration account sessions. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<IntegrationAccountSessionInner>> listByIntegrationAccountsNextAsync(final String nextPageLink, final ServiceFuture<List<IntegrationAccountSessionInner>> serviceFuture, final ListOperationCallback<IntegrationAccountSessionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByIntegrationAccountsNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets a list of integration account sessions. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<Page<IntegrationAccountSessionInner>> listByIntegrationAccountsNextAsync(final String nextPageLink) { return listByIntegrationAccountsNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Page<IntegrationAccountSessionInner>>() { @Override public Page<IntegrationAccountSessionInner> call(ServiceResponse<Page<IntegrationAccountSessionInner>> response) { return response.body(); } }); } /** * Gets a list of integration account sessions. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;IntegrationAccountSessionInner&gt; object */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsNextWithServiceResponseAsync(final String nextPageLink) { return listByIntegrationAccountsNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<IntegrationAccountSessionInner>>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(ServiceResponse<Page<IntegrationAccountSessionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByIntegrationAccountsNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets a list of integration account sessions. * ServiceResponse<PageImpl<IntegrationAccountSessionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;IntegrationAccountSessionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> listByIntegrationAccountsNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByIntegrationAccountsNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>>>() { @Override public Observable<ServiceResponse<Page<IntegrationAccountSessionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<IntegrationAccountSessionInner>> result = listByIntegrationAccountsNextDelegate(response); return Observable.just(new ServiceResponse<Page<IntegrationAccountSessionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<IntegrationAccountSessionInner>> listByIntegrationAccountsNextDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<IntegrationAccountSessionInner>, ErrorResponseException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<IntegrationAccountSessionInner>>() { }.getType()) .registerError(ErrorResponseException.class) .build(response); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.Weight; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.LongUnaryOperator; import java.util.stream.Collectors; import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.MAX_BUCKET_SETTING; final class CompositeAggregator extends BucketsAggregator { private final int size; private final List<String> sourceNames; private final int[] reverseMuls; private final List<DocValueFormat> formats; private final CompositeKey rawAfterKey; private final CompositeValuesSourceConfig[] sourceConfigs; private final SingleDimensionValuesSource<?>[] sources; private final CompositeValuesCollectorQueue queue; private final List<Entry> entries = new ArrayList<>(); private LeafReaderContext currentLeaf; private RoaringDocIdSet.Builder docIdSetBuilder; private BucketCollector deferredCollectors; private boolean earlyTerminated; CompositeAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, Map<String, Object> metadata, int size, CompositeValuesSourceConfig[] sourceConfigs, CompositeKey rawAfterKey) throws IOException { super(name, factories, context, parent, CardinalityUpperBound.MANY, metadata); this.size = size; this.sourceNames = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::name).collect(Collectors.toList()); this.reverseMuls = Arrays.stream(sourceConfigs).mapToInt(CompositeValuesSourceConfig::reverseMul).toArray(); this.formats = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::format).collect(Collectors.toList()); this.sources = new SingleDimensionValuesSource[sourceConfigs.length]; // check that the provided size is not greater than the search.max_buckets setting int bucketLimit = context.aggregations().multiBucketConsumer().getLimit(); if (size > bucketLimit) { throw new MultiBucketConsumerService.TooManyBucketsException("Trying to create too many buckets. Must be less than or equal" + " to: [" + bucketLimit + "] but was [" + size + "]. This limit can be set by changing the [" + MAX_BUCKET_SETTING.getKey() + "] cluster level setting.", bucketLimit); } this.sourceConfigs = sourceConfigs; for (int i = 0; i < sourceConfigs.length; i++) { this.sources[i] = sourceConfigs[i].createValuesSource( context.bigArrays(), context.searcher().getIndexReader(), size, this::addRequestCircuitBreakerBytes ); } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.rawAfterKey = rawAfterKey; } @Override protected void doClose() { try { Releasables.close(queue); } finally { Releasables.close(sources); } } @Override protected void doPreCollection() throws IOException { List<BucketCollector> collectors = Arrays.asList(subAggregators); deferredCollectors = MultiBucketCollector.wrap(collectors); collectableSubAggregators = BucketCollector.NO_OP_COLLECTOR; } @Override protected void doPostCollection() throws IOException { finishLeaf(); } @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { // Composite aggregator must be at the top of the aggregation tree assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0L; if (deferredCollectors != NO_OP_COLLECTOR) { // Replay all documents that contain at least one top bucket (collected during the first pass). runDeferredCollections(); } int num = Math.min(size, queue.size()); final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; long[] bucketOrdsToCollect = new long[queue.size()]; for (int i = 0; i < queue.size(); i++) { bucketOrdsToCollect[i] = i; } InternalAggregations[] subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); while (queue.size() > 0) { int slot = queue.pop(); CompositeKey key = queue.toCompositeKey(slot); InternalAggregations aggs = subAggsForBuckets[slot]; int docCount = queue.getDocCount(slot); buckets[queue.size()] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); } CompositeKey lastBucket = num > 0 ? buckets[num-1].getRawKey() : null; return new InternalAggregation[] { new InternalComposite(name, size, sourceNames, formats, Arrays.asList(buckets), lastBucket, reverseMuls, earlyTerminated, metadata()) }; } @Override public InternalAggregation buildEmptyAggregation() { return new InternalComposite(name, size, sourceNames, formats, Collections.emptyList(), null, reverseMuls, false, metadata()); } private void finishLeaf() { if (currentLeaf != null) { DocIdSet docIdSet = docIdSetBuilder.build(); entries.add(new Entry(currentLeaf, docIdSet)); currentLeaf = null; docIdSetBuilder = null; } } /** Return true if the provided field may have multiple values per document in the leaf **/ private boolean isMaybeMultivalued(LeafReaderContext context, SortField sortField) throws IOException { SortField.Type type = IndexSortConfig.getSortFieldType(sortField); switch (type) { case STRING: final SortedSetDocValues v1 = context.reader().getSortedSetDocValues(sortField.getField()); return v1 != null && DocValues.unwrapSingleton(v1) == null; case DOUBLE: case FLOAT: case LONG: case INT: final SortedNumericDocValues v2 = context.reader().getSortedNumericDocValues(sortField.getField()); return v2 != null && DocValues.unwrapSingleton(v2) == null; default: // we have no clue whether the field is multi-valued or not so we assume it is. return true; } } /** * Returns the {@link Sort} prefix that is eligible to index sort * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { Sort indexSort = context.reader().getMetaData().getSort(); if (indexSort == null) { return null; } List<SortField> sortFields = new ArrayList<>(); int end = Math.min(indexSort.getSort().length, sourceConfigs.length); for (int i = 0; i < end; i++) { CompositeValuesSourceConfig sourceConfig = sourceConfigs[i]; SingleDimensionValuesSource<?> source = sources[i]; SortField indexSortField = indexSort.getSort()[i]; if (source.fieldType == null // TODO: can we handle missing bucket when using index sort optimization ? || source.missingBucket || indexSortField.getField().equals(source.fieldType.name()) == false || isMaybeMultivalued(context, indexSortField) || sourceConfig.hasScript()) { break; } if (indexSortField.getReverse() != (source.reverseMul == -1)) { if (i == 0) { // the leading index sort matches the leading source field but the order is reversed // so we don't check the other sources. return new Sort(indexSortField); } break; } sortFields.add(indexSortField); if (sourceConfig.valuesSource() instanceof RoundingValuesSource) { // the rounding "squashes" many values together, that breaks the ordering of sub-values // so we ignore subsequent source even if they match the index sort. break; } } return sortFields.isEmpty() ? null : new Sort(sortFields.toArray(new SortField[0])); } /** * Return the number of leading sources that match the index sort. * * @param indexSortPrefix The index sort prefix that matches the sources * @return The length of the index sort prefix if the sort order matches * or -1 if the leading index sort is in the reverse order of the * leading source. A value of 0 indicates that the index sort is * not applicable. */ private int computeSortPrefixLen(Sort indexSortPrefix) { if (indexSortPrefix == null) { return 0; } if (indexSortPrefix.getSort()[0].getReverse() != (sources[0].reverseMul == -1)) { assert indexSortPrefix.getSort().length == 1; return -1; } else { return indexSortPrefix.getSort().length; } } /** * Rewrites the provided {@link Sort} to apply rounding on {@link SortField} that target * {@link RoundingValuesSource}. */ private Sort applySortFieldRounding(Sort sort) { SortField[] sortFields = new SortField[sort.getSort().length]; for (int i = 0; i < sort.getSort().length; i++) { if (sourceConfigs[i].valuesSource() instanceof RoundingValuesSource) { LongUnaryOperator round = ((RoundingValuesSource) sourceConfigs[i].valuesSource())::round; final SortedNumericSortField delegate = (SortedNumericSortField) sort.getSort()[i]; sortFields[i] = new SortedNumericSortField(delegate.getField(), delegate.getNumericType(), delegate.getReverse()) { @Override public boolean equals(Object obj) { return delegate.equals(obj); } @Override public int hashCode() { return delegate.hashCode(); } @Override public FieldComparator<?> getComparator(int numHits, int sortPos) { return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), sortPos) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { @Override protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException { NumericDocValues dvs = SortedNumericSelector.wrap( DocValues.getSortedNumeric(context.reader(), field), delegate.getSelector(), delegate.getNumericType()); return new NumericDocValues() { @Override public long longValue() throws IOException { return round.applyAsLong(dvs.longValue()); } @Override public boolean advanceExact(int target) throws IOException { return dvs.advanceExact(target); } @Override public int docID() { return dvs.docID(); } @Override public int nextDoc() throws IOException { return dvs.nextDoc(); } @Override public int advance(int target) throws IOException { return dvs.advance(target); } @Override public long cost() { return dvs.cost(); } }; } }; } }; } }; } else { sortFields[i] = sort.getSort()[i]; } } return new Sort(sortFields); } private void processLeafFromQuery(LeafReaderContext ctx, Sort indexSortPrefix) throws IOException { DocValueFormat[] formats = new DocValueFormat[indexSortPrefix.getSort().length]; for (int i = 0; i < formats.length; i++) { formats[i] = sources[i].format; } FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(new SortAndFormats(indexSortPrefix, formats), Arrays.copyOfRange(rawAfterKey.values(), 0, formats.length)); if (indexSortPrefix.getSort().length < sources.length) { // include all docs that belong to the partial bucket fieldDoc.doc = -1; } BooleanQuery newQuery = new BooleanQuery.Builder() .add(context.query(), BooleanClause.Occur.MUST) .add(new SearchAfterSortedDocQuery(applySortFieldRounding(indexSortPrefix), fieldDoc), BooleanClause.Occur.FILTER) .build(); Weight weight = context.searcher().createWeight(context.searcher().rewrite(newQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer scorer = weight.scorer(ctx); if (scorer != null) { DocIdSetIterator docIt = scorer.iterator(); final LeafBucketCollector inner = queue.getLeafCollector(ctx, getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length)); inner.setScorer(scorer); while (docIt.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { inner.collect(docIt.docID()); } } } @Override protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { finishLeaf(); boolean fillDocIdSet = deferredCollectors != NO_OP_COLLECTOR; Sort indexSortPrefix = buildIndexSortPrefix(ctx); int sortPrefixLen = computeSortPrefixLen(indexSortPrefix); SortedDocsProducer sortedDocsProducer = sortPrefixLen == 0 ? sources[0].createSortedDocsProducerOrNull(ctx.reader(), context.query()) : null; if (sortedDocsProducer != null) { // Visit documents sorted by the leading source of the composite definition and terminates // when the leading source value is guaranteed to be greater than the lowest composite bucket // in the queue. DocIdSet docIdSet = sortedDocsProducer.processLeaf(context.query(), queue, ctx, fillDocIdSet); if (fillDocIdSet) { entries.add(new Entry(ctx, docIdSet)); } // We can bypass search entirely for this segment, the processing is done in the previous call. // Throwing this exception will terminate the execution of the search for this root aggregation, // see {@link MultiCollector} for more details on how we handle early termination in aggregations. earlyTerminated = true; throw new CollectionTerminatedException(); } else { if (fillDocIdSet) { currentLeaf = ctx; docIdSetBuilder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc()); } if (rawAfterKey != null && sortPrefixLen > 0) { // We have an after key and index sort is applicable so we jump directly to the doc // that is after the index sort prefix using the rawAfterKey and we start collecting // document from there. processLeafFromQuery(ctx, indexSortPrefix); throw new CollectionTerminatedException(); } else { final LeafBucketCollector inner = queue.getLeafCollector(ctx, getFirstPassCollector(docIdSetBuilder, sortPrefixLen)); return new LeafBucketCollector() { @Override public void collect(int doc, long zeroBucket) throws IOException { assert zeroBucket == 0L; inner.collect(doc); } }; } } } /** * The first pass selects the top composite buckets from all matching documents. */ private LeafBucketCollector getFirstPassCollector(RoaringDocIdSet.Builder builder, int indexSortPrefix) { return new LeafBucketCollector() { int lastDoc = -1; @Override public void collect(int doc, long bucket) throws IOException { try { if (queue.addIfCompetitive(indexSortPrefix)) { if (builder != null && lastDoc != doc) { builder.add(doc); lastDoc = doc; } } } catch (CollectionTerminatedException exc) { earlyTerminated = true; throw exc; } } }; } /** * Replay the documents that might contain a top bucket and pass top buckets to * the {@link #deferredCollectors}. */ private void runDeferredCollections() throws IOException { final boolean needsScores = scoreMode().needsScores(); Weight weight = null; if (needsScores) { Query query = context.query(); weight = context.searcher().createWeight(context.searcher().rewrite(query), ScoreMode.COMPLETE, 1f); } deferredCollectors.preCollection(); for (Entry entry : entries) { DocIdSetIterator docIdSetIterator = entry.docIdSet.iterator(); if (docIdSetIterator == null) { continue; } final LeafBucketCollector subCollector = deferredCollectors.getLeafCollector(entry.context); final LeafBucketCollector collector = queue.getLeafCollector(entry.context, getSecondPassCollector(subCollector)); DocIdSetIterator scorerIt = null; if (needsScores) { Scorer scorer = weight.scorer(entry.context); if (scorer != null) { scorerIt = scorer.iterator(); subCollector.setScorer(scorer); } } int docID; while ((docID = docIdSetIterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (needsScores) { assert scorerIt != null && scorerIt.docID() < docID; scorerIt.advance(docID); // aggregations should only be replayed on matching documents assert scorerIt.docID() == docID; } collector.collect(docID); } } deferredCollectors.postCollection(); } /** * Replay the top buckets from the matching documents. */ private LeafBucketCollector getSecondPassCollector(LeafBucketCollector subCollector) { return new LeafBucketCollector() { @Override public void collect(int doc, long zeroBucket) throws IOException { assert zeroBucket == 0; Integer slot = queue.compareCurrent(); if (slot != null) { // The candidate key is a top bucket. // We can defer the collection of this document/bucket to the sub collector subCollector.collect(doc, slot); } } }; } private static class Entry { final LeafReaderContext context; final DocIdSet docIdSet; Entry(LeafReaderContext context, DocIdSet docIdSet) { this.context = context; this.docIdSet = docIdSet; } } }
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.store.jdbc; import java.util.Properties; import org.apache.lucene.store.jdbc.handler.MarkDeleteFileEntryHandler; import org.apache.lucene.store.jdbc.index.FetchOnBufferReadJdbcIndexInput; import org.apache.lucene.store.jdbc.index.RAMAndFileJdbcIndexOutput; /** * A file entry level settings. An abstract view of any type of setting that cab be * used by the actual file entry handler that uses it. * <p/> * Holds the {@link #FILE_ENTRY_HANDLER_TYPE} that defines the type of the * {@link org.apache.lucene.store.jdbc.handler.FileEntryHandler} that will be created * and initialized with the settings. * <p/> * Default values for a new instanciated instnce are: {@link MarkDeleteFileEntryHandler} for * the {@link #FILE_ENTRY_HANDLER_TYPE} setting, {@link FetchOnBufferReadJdbcIndexInput} for the * {@link #INDEX_INPUT_TYPE_SETTING} setting, and {@link RAMAndFileJdbcIndexOutput} for the * {@link #INDEX_OUTPUT_TYPE_SETTING} setting. * * @author kimchy */ public class JdbcFileEntrySettings { /** * The class name of the {@link org.apache.lucene.store.IndexInput}. Only applies * to {@link org.apache.lucene.store.jdbc.handler.FileEntryHandler}s that use it. */ public static final String INDEX_INPUT_TYPE_SETTING = "indexInput.type"; /** * The class name of the {@link org.apache.lucene.store.IndexOutput}. Only applies * to {@link org.apache.lucene.store.jdbc.handler.FileEntryHandler}s that use it. */ public static final String INDEX_OUTPUT_TYPE_SETTING = "indexOutput.type"; /** * The class name of the {@link org.apache.lucene.store.jdbc.handler.FileEntryHandler}. */ public static final String FILE_ENTRY_HANDLER_TYPE = "type"; private Properties settings = new Properties(); /** * Creates a new file entry settings, and intialize it to default values. */ public JdbcFileEntrySettings() { setClassSetting(JdbcFileEntrySettings.FILE_ENTRY_HANDLER_TYPE, MarkDeleteFileEntryHandler.class); setClassSetting(JdbcFileEntrySettings.INDEX_INPUT_TYPE_SETTING, FetchOnBufferReadJdbcIndexInput.class); setClassSetting(JdbcFileEntrySettings.INDEX_OUTPUT_TYPE_SETTING, RAMAndFileJdbcIndexOutput.class); } /** * Returns the inner java properties. */ public Properties getProperties() { return settings; } /** * Returns the value match for the given setting. <code>null</code> if no * setting is found. * * @param setting The setting name * @return The value of the setting, or <code>null</code> if none is found */ public String getSetting(String setting) { return settings.getProperty(setting); } /** * Returns the value that matches the given setting. If none is found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public String getSetting(String setting, String defaultValue) { return settings.getProperty(setting, defaultValue); } /** * Returns the float value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public float getSettingAsFloat(String setting, float defaultValue) { String sValue = getSetting(setting); if (sValue == null) { return defaultValue; } return Float.parseFloat(sValue); } /** * Returns the int value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public int getSettingAsInt(String setting, int defaultValue) { String sValue = getSetting(setting); if (sValue == null) { return defaultValue; } return Integer.parseInt(sValue); } /** * Returns the long value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public long getSettingAsLong(String setting, long defaultValue) { String sValue = getSetting(setting); if (sValue == null) { return defaultValue; } return Long.parseLong(sValue); } /** * Returns the class value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public Class getSettingAsClass(String setting, Class defaultValue) throws ClassNotFoundException { return getSettingAsClass(setting, defaultValue, Thread.currentThread().getContextClassLoader()); } /** * Returns the class value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @param classLoader The class loader to be used to load the class * @return The value of the setting, or defaultValue if none is found. * @throws ClassNotFoundException */ public Class getSettingAsClass(String setting, Class defaultValue, ClassLoader classLoader) throws ClassNotFoundException { String sValue = getSetting(setting); if (sValue == null) { return defaultValue; } return Class.forName(sValue, true, classLoader); } /** * Returns the boolean value that matches the given setting. If none if found, * the default value is used. * * @param setting The setting name * @param defaultValue The default value to be used if no setting is found * @return The value of the setting, or defaultValue if none is found. */ public boolean getSettingAsBoolean(String setting, boolean defaultValue) { String sValue = getSetting(setting); if (sValue == null) { return defaultValue; } return Boolean.valueOf(sValue).booleanValue(); } public JdbcFileEntrySettings setSetting(String setting, String value) { this.settings.setProperty(setting, value); return this; } public JdbcFileEntrySettings setBooleanSetting(String setting, boolean value) { setSetting(setting, String.valueOf(value)); return this; } public JdbcFileEntrySettings setFloatSetting(String setting, float value) { setSetting(setting, String.valueOf(value)); return this; } public JdbcFileEntrySettings setIntSetting(String setting, int value) { setSetting(setting, String.valueOf(value)); return this; } public JdbcFileEntrySettings setLongSetting(String setting, long value) { setSetting(setting, String.valueOf(value)); return this; } public JdbcFileEntrySettings setClassSetting(String setting, Class clazz) { setSetting(setting, clazz.getName()); return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.core.metadata.schema.table; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException; import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider; import org.apache.carbondata.core.metadata.schema.datamap.DataMapProperty; import static org.apache.carbondata.core.constants.CarbonCommonConstants.INDEX_COLUMNS; import com.google.gson.Gson; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang.StringUtils; /** * It is the new schama of datamap and it has less fields compare to {{@link DataMapSchema}} */ public class DataMapSchema implements Serializable, Writable { private static final long serialVersionUID = -8394577999061329687L; protected String dataMapName; /** * There are two kind of DataMaps: * 1. Index DataMap: provider name is class name of implementation class of DataMapFactory * 2. OLAP DataMap: provider name is one of the {@link DataMapClassProvider#shortName} */ // the old version the field name for providerName was className, so to de-serialization // old schema provided the old field name in the alternate filed using annotation @SerializedName(value = "providerName", alternate = "className") protected String providerName; /** * identifiers of the mapped table */ protected RelationIdentifier relationIdentifier; /** * Query which is used to create a datamap. This is optional in case of index datamap. */ protected String ctasQuery; /** * relation properties */ protected Map<String, String> properties; /** * Identifiers of parent tables */ protected List<RelationIdentifier> parentTables; /** * child table schema */ protected TableSchema childSchema; public DataMapSchema(String dataMapName, String providerName) { this.dataMapName = dataMapName; this.providerName = providerName; } public DataMapSchema() { } public String getDataMapName() { return dataMapName; } public void setDataMapName(String dataMapName) { this.dataMapName = dataMapName; } public String getProviderName() { return providerName; } public void setProviderName(String providerName) { this.providerName = providerName; } public RelationIdentifier getRelationIdentifier() { return relationIdentifier; } public void setRelationIdentifier(RelationIdentifier relationIdentifier) { this.relationIdentifier = relationIdentifier; } public String getCtasQuery() { return ctasQuery; } public void setCtasQuery(String ctasQuery) { this.ctasQuery = ctasQuery; } public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> properties) { this.properties = properties; } public void setPropertiesJson(Gson gson, String propertiesJson) { if (propertiesJson != null) { this.properties = gson.fromJson(propertiesJson, Map.class); } } public void setParentTables(List<RelationIdentifier> parentTables) { this.parentTables = parentTables; } public List<RelationIdentifier> getParentTables() { return parentTables; } public TableSchema getChildSchema() { return childSchema; } public void setChildSchema(TableSchema childSchema) { this.childSchema = childSchema; } /** * Return true if this datamap is an Index DataMap * @return */ public boolean isIndexDataMap() { if (providerName.equalsIgnoreCase(DataMapClassProvider.PREAGGREGATE.getShortName()) || providerName.equalsIgnoreCase(DataMapClassProvider.TIMESERIES.getShortName()) || providerName.equalsIgnoreCase(DataMapClassProvider.MV.getShortName()) || ctasQuery != null) { return false; } else { return true; } } /** * Return true if this datamap is lazy (created with DEFERRED REBUILD syntax) */ public boolean isLazy() { String deferredRebuild = getProperties().get(DataMapProperty.DEFERRED_REBUILD); return deferredRebuild != null && deferredRebuild.equalsIgnoreCase("true"); } @Override public void write(DataOutput out) throws IOException { out.writeUTF(dataMapName); out.writeUTF(providerName); boolean isRelationIdentifierExists = null != relationIdentifier; out.writeBoolean(isRelationIdentifierExists); if (isRelationIdentifierExists) { this.relationIdentifier.write(out); } boolean isChildSchemaExists = null != this.childSchema; out.writeBoolean(isChildSchemaExists); if (isChildSchemaExists) { this.childSchema.write(out); } if (properties == null) { out.writeShort(0); } else { out.writeShort(properties.size()); for (Map.Entry<String, String> entry : properties.entrySet()) { out.writeUTF(entry.getKey()); out.writeUTF(entry.getValue()); } } } @Override public void readFields(DataInput in) throws IOException { this.dataMapName = in.readUTF(); this.providerName = in.readUTF(); boolean isRelationIdentifierExists = in.readBoolean(); if (isRelationIdentifierExists) { this.relationIdentifier = new RelationIdentifier(null, null, null); this.relationIdentifier.readFields(in); } boolean isChildSchemaExists = in.readBoolean(); if (isChildSchemaExists) { this.childSchema = new TableSchema(); this.childSchema.readFields(in); } int mapSize = in.readShort(); this.properties = new HashMap<>(mapSize); for (int i = 0; i < mapSize; i++) { String key = in.readUTF(); String value = in.readUTF(); this.properties.put(key, value); } } /** * Return the list of column name */ public String[] getIndexColumns() throws MalformedDataMapCommandException { String columns = getProperties().get(INDEX_COLUMNS); if (columns == null) { columns = getProperties().get(INDEX_COLUMNS.toLowerCase()); } if (columns == null) { throw new MalformedDataMapCommandException(INDEX_COLUMNS + " DMPROPERTY is required"); } else if (StringUtils.isBlank(columns)) { throw new MalformedDataMapCommandException(INDEX_COLUMNS + " DMPROPERTY is blank"); } else { return columns.split(",", -1); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataMapSchema that = (DataMapSchema) o; return Objects.equals(dataMapName, that.dataMapName); } @Override public int hashCode() { return Objects.hash(dataMapName); } }
package net.ME1312.SubServers.Bungee.Host.Internal; import net.ME1312.Galaxi.Library.Container.ContainedPair; import net.ME1312.Galaxi.Library.Container.Container; import net.ME1312.Galaxi.Library.Container.Value; import net.ME1312.Galaxi.Library.Map.ObjectMap; import net.ME1312.Galaxi.Library.Map.ObjectMapValue; import net.ME1312.Galaxi.Library.Try; import net.ME1312.Galaxi.Library.Util; import net.ME1312.Galaxi.Library.Version.Version; import net.ME1312.SubData.Server.SubDataClient; import net.ME1312.SubServers.Bungee.Event.*; import net.ME1312.SubServers.Bungee.Host.*; import net.ME1312.SubServers.Bungee.Library.Compatibility.Logger; import net.ME1312.SubServers.Bungee.Library.Exception.InvalidServerException; import net.ME1312.SubServers.Bungee.Network.Packet.PacketOutExEditServer; import net.ME1312.SubServers.Bungee.Network.Packet.PacketOutExEditServer.Edit; import net.ME1312.SubServers.Bungee.SubAPI; import net.ME1312.SubServers.Bungee.SubProxy; import net.md_5.bungee.api.ChatColor; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.UUID; import java.util.jar.JarFile; import java.util.jar.JarInputStream; /** * Internal SubServer Class */ public class InternalSubServer extends SubServerImpl { private InternalHost host; private boolean enabled; private Value<Boolean> log; private String dir; private File directory; private String executable; private String stopcmd; private StopAction stopaction; private LinkedList<LoggedCommand> history; private Process process; private InternalSubLogger logger; private Thread thread; private BufferedWriter command; private boolean allowrestart; private boolean lock; /** * Creates an Internal SubServer * * @param host Host * @param name Name * @param enabled Enabled Status * @param port Port Number * @param motd MOTD * @param log Logging Status * @param directory Directory * @param executable Executable String * @param stopcmd Stop Command * @param hidden Hidden Status * @param restricted Restricted Status * @throws InvalidServerException */ public static InternalSubServer construct(InternalHost host, String name, boolean enabled, int port, String motd, boolean log, String directory, String executable, String stopcmd, boolean hidden, boolean restricted) throws InvalidServerException { try { return new InternalSubServer(host, name, enabled, port, motd, log, directory, executable, stopcmd, hidden, restricted); } catch (NoSuchMethodError e) { return new InternalSubServer(host, name, enabled, (Integer) port, motd, log, directory, executable, stopcmd, hidden, restricted); } } /** * Super Method 2 (newest) * @see #construct(InternalHost, String, boolean, int, String, boolean, String, String, String, boolean, boolean) for method details */ protected InternalSubServer(InternalHost host, String name, boolean enabled, int port, String motd, boolean log, String directory, String executable, String stopcmd, boolean hidden, boolean restricted) throws InvalidServerException { super(host, name, port, motd, hidden, restricted); init(host, name, enabled, port, motd, log, directory, executable, stopcmd, hidden, restricted); } /** * Super Method 1 (oldest) * @see #construct(InternalHost, String, boolean, int, String, boolean, String, String, String, boolean, boolean) for method details */ protected InternalSubServer(InternalHost host, String name, boolean enabled, Integer port, String motd, boolean log, String directory, String executable, String stopcmd, boolean hidden, boolean restricted) throws InvalidServerException { super(host, name, port, motd, hidden, restricted); init(host, name, enabled, port, motd, log, directory, executable, stopcmd, hidden, restricted); } private void init(InternalHost host, String name, boolean enabled, int port, String motd, boolean log, String directory, String executable, String stopcmd, boolean hidden, boolean restricted) throws InvalidServerException { Util.nullpo(host, name, enabled, port, motd, log, directory, executable, stopcmd, hidden, restricted); this.host = host; this.enabled = enabled; this.log = new Container<Boolean>(log); this.dir = directory; this.directory = new File(host.getPath(), directory); this.executable = executable; this.stopcmd = stopcmd; this.stopaction = StopAction.NONE; this.history = new LinkedList<LoggedCommand>(); this.process = null; this.logger = new InternalSubLogger(null, this, getName(), this.log, null); this.thread = null; this.command = null; final File[] locations = new File[] { new File(this.directory, "plugins/SubServers.Client.jar"), new File(this.directory, "mods/SubServers.Client.jar") }; for (File location : locations) { if (location.exists()) { try { JarInputStream updated = new JarInputStream(SubProxy.class.getResourceAsStream("/net/ME1312/SubServers/Bungee/Library/Files/client.jar")); JarFile existing = new JarFile(location); if (existing.getManifest().getMainAttributes().getValue("Implementation-Title") != null && existing.getManifest().getMainAttributes().getValue("Implementation-Title").startsWith("SubServers.Client") && existing.getManifest().getMainAttributes().getValue("Specification-Title") != null && updated.getManifest().getMainAttributes().getValue("Implementation-Title") != null && updated.getManifest().getMainAttributes().getValue("Implementation-Title").startsWith("SubServers.Client") && updated.getManifest().getMainAttributes().getValue("Specification-Title") != null) { if (new Version(existing.getManifest().getMainAttributes().getValue("Specification-Title")).compareTo(new Version(updated.getManifest().getMainAttributes().getValue("Specification-Title"))) < 0) { location.delete(); Util.copyFromJar(SubProxy.class.getClassLoader(), "net/ME1312/SubServers/Bungee/Library/Files/client.jar", location.getPath()); } } existing.close(); updated.close(); } catch (Throwable e) { System.out.println("Couldn't auto-update SubServers.Client for subserver: " + name); e.printStackTrace(); } } } this.lock = false; } void registered(boolean value) { registered = value; } void updating(boolean value) { updating = value; } private void run() { boolean locked = lock; allowrestart = true; started = false; try { ProcessBuilder pb = new ProcessBuilder().command(Executable.parse(host.getCreator().getBashDirectory(), executable)).directory(directory); pb.environment().put("java", System.getProperty("java.home") + File.separator + "bin" + File.separator + "java"); pb.environment().put("name", getName()); pb.environment().put("host", host.getName()); pb.environment().put("address", host.getAddress().getHostAddress()); pb.environment().put("port", Integer.toString(getAddress().getPort())); logger.init(); process = pb.start(); Logger.get("SubServers").info("Now starting " + getName()); logger.process = process; logger.start(); lock = locked = false; command = new BufferedWriter(new OutputStreamWriter(process.getOutputStream())); for (LoggedCommand command : history) if (process.isAlive()) { this.command.write(command.getCommand()); this.command.newLine(); this.command.flush(); } if (process.isAlive()) process.waitFor(); } catch (IOException | InterruptedException e) { e.printStackTrace(); if (locked) lock = false; allowrestart = false; } logger.destroy(); Logger.get("SubServers").info(getName() + " has stopped"); process = null; command = null; history.clear(); SubStoppedEvent event = new SubStoppedEvent(this); host.plugin.getPluginManager().callEvent(event); if (stopaction == StopAction.REMOVE_SERVER || stopaction == StopAction.RECYCLE_SERVER || stopaction == StopAction.DELETE_SERVER) { try { if (stopaction == StopAction.RECYCLE_SERVER) { host.recycleSubServer(null, getName(), false, false); } else if (stopaction == StopAction.DELETE_SERVER) { host.deleteSubServer(null, getName(), false, false); } else { try { if (host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { host.plugin.servers.get().getMap("Servers").remove(getName()); host.plugin.servers.save(); } } catch (IOException e) { e.printStackTrace(); } host.removeSubServer(getName()); } } catch (InterruptedException e) { e.printStackTrace(); } } else if (stopaction == StopAction.RESTART) { if (allowrestart) { new Thread(() -> { try { while (thread != null && thread.isAlive()) { Thread.sleep(250); } start(); } catch (InterruptedException e) { e.printStackTrace(); } }, "SubServers.Bungee::Internal_Server_Restart_Handler(" + getName() + ')').start(); } } } @Override public boolean start(UUID player) { if (!lock && isAvailable() && isEnabled() && !(thread != null && thread.isAlive()) && getCurrentIncompatibilities().size() == 0) { lock = true; SubStartEvent event = new SubStartEvent(player, this); host.plugin.getPluginManager().callEvent(event); if (!event.isCancelled()) { (thread = new Thread(this::run, "SubServers.Bungee::Internal_Server_Process_Handler(" + getName() + ')')).start(); return true; } else { lock = false; return false; } } else return false; } @Override public boolean stop(UUID player) { if (thread != null && thread.isAlive()) { SubStopEvent event = new SubStopEvent(player, this, false); host.plugin.getPluginManager().callEvent(event); if (!event.isCancelled()) { try { allowrestart = false; history.add(new LoggedCommand(player, stopcmd)); if (process != null && process.isAlive()) { command.write(stopcmd); command.newLine(); command.flush(); } return true; } catch (IOException e) { e.printStackTrace(); return false; } } else return false; } else return false; } @Override public boolean terminate(UUID player) { if (thread != null && thread.isAlive()) { SubStopEvent event = new SubStopEvent(player, this, true); host.plugin.getPluginManager().callEvent(event); if (!event.isCancelled()) { allowrestart = false; if (process != null && process.isAlive()) Executable.terminate(process); return true; } else return false; } else return false; } @Override public boolean command(UUID player, String command) { Util.nullpo(command); if (thread != null && thread.isAlive()) { SubSendCommandEvent event = new SubSendCommandEvent(player, this, command, null); host.plugin.getPluginManager().callEvent(event); if (!event.isCancelled() && (player == null || !DISALLOWED_COMMANDS.matcher(command).find())) { try { if (event.getCommand().equalsIgnoreCase(stopcmd)) allowrestart = false; history.add(new LoggedCommand(player, event.getCommand())); if (process != null && process.isAlive()) { this.command.write(event.getCommand()); this.command.newLine(); this.command.flush(); } return true; } catch (IOException e) { e.printStackTrace(); return false; } } else return false; } else return false; } @SuppressWarnings({"deprecation", "unchecked"}) @Override protected int edit(UUID player, ObjectMap<String> edit, boolean perma) { if (isAvailable()) { int c = 0; boolean state = isRunning(); SubServer forward = null; ObjectMap<String> pending = edit.clone(); for (String key : edit.getKeys()) { pending.remove(key); ObjectMapValue value = edit.get(key); boolean allowed = true; if (perma) { SubEditServerEvent event = new SubEditServerEvent(player, this, new ContainedPair<String, ObjectMapValue>(key, value)); host.plugin.getPluginManager().callEvent(event); allowed = !event.isCancelled(); } if (allowed) { try { switch (key.toLowerCase()) { case "name": if (value.isString() && host.removeSubServer(player, getName())) { SubServer server = host.constructSubServer(value.asString(), isEnabled(), getAddress().getPort(), getMotd(), isLogging(), getPath(), getExecutable(), getStopCommand(), isHidden(), isRestricted()); if (server != null) { if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { ObjectMap<String> config = this.host.plugin.servers.get().getMap("Servers").getMap(getName()); this.host.plugin.servers.get().getMap("Servers").remove(getName()); this.host.plugin.servers.get().getMap("Servers").set(server.getName(), config); this.host.plugin.servers.save(); } forward = server; c++; } } break; case "display": if (value.isString()) { setDisplayName(value.asString()); logger.name = getDisplayName(); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { if (getName().equals(getDisplayName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).remove("Display"); } else { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Display", getDisplayName()); } this.host.plugin.servers.save(); } c++; } break; case "enabled": if (value.isBoolean()) { enabled = value.asBoolean(); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Enabled", isEnabled()); this.host.plugin.servers.save(); } c++; } break; case "group": if (value.isList()) { Util.reflect(ServerImpl.class.getDeclaredField("groups"), this, value.asStringList()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Group", value.asStringList()); this.host.plugin.servers.save(); } c++; } break; case "host": if (value.isString() && host.removeSubServer(player, getName())) { SubServer server = this.host.plugin.api.getHost(value.asString()).constructSubServer(getName(), isEnabled(), getAddress().getPort(), getMotd(), isLogging(), getPath(), getExecutable(), getStopCommand(), isHidden(), isRestricted()); if (server != null) { if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Host", server.getHost().getName()); this.host.plugin.servers.save(); } forward = server; c++; } } break; case "template": if (value.isString()) { setTemplate(value.asString()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Template", value.asString()); this.host.plugin.servers.save(); } c++; } break; case "port": if (value.isNumber() && host.removeSubServer(player, getName())) { SubServer server = host.constructSubServer(getName(), isEnabled(), value.asInt(), getMotd(), isLogging(), getPath(), getExecutable(), getStopCommand(), isHidden(), isRestricted()); if (server != null) { if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Port", server.getAddress().getPort()); this.host.plugin.servers.save(); } forward = server; c++; } } break; case "motd": if (value.isString()) { setMotd(ChatColor.translateAlternateColorCodes('&', Util.unescapeJavaString(value.asString()))); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Motd", value.asString()); this.host.plugin.servers.save(); } c++; } break; case "log": if (value.isBoolean()) { log.value(value.asBoolean()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Log", isLogging()); this.host.plugin.servers.save(); } c++; } break; case "dir": case "directory": if (value.isString()) { if (isRunning()) { stop(player); waitFor(); } dir = value.asString(); directory = new File(getHost().getPath(), value.asString()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Directory", getPath()); this.host.plugin.servers.save(); } c++; } break; case "exec": case "executable": if (value.isString()) { if (isRunning()) { stop(player); waitFor(); } executable = value.asString(); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Executable", value.asString()); this.host.plugin.servers.save(); } c++; } break; case "stop-cmd": case "stop-command": if (value.isString()) { stopcmd = value.asString(); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Stop-Command", getStopCommand()); this.host.plugin.servers.save(); } c++; } break; case "stop-action": if (value.isString()) { StopAction action = Try.all.get(() -> StopAction.valueOf(value.asString().toUpperCase().replace('-', '_').replace(' ', '_'))); if (action != null) { stopaction = action; if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Stop-Action", getStopAction().toString()); this.host.plugin.servers.save(); } c++; } } break; case "state": if (value.isBoolean()) { state = value.asBoolean(); } break; case "auto-run": case "run-on-launch": if (value.isBoolean()) { if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Run-On-Launch", value.asBoolean()); this.host.plugin.servers.save(); } c++; } break; case "incompatible": if (value.isList()) { for (SubServer oserver : getIncompatibilities()) toggleCompatibility(oserver); for (String oname : (List<String>) value.asStringList()) { SubServer oserver = host.plugin.api.getSubServer(oname); if (oserver != null && isCompatible(oserver)) toggleCompatibility(oserver); } if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Incompatible", value.asStringList()); this.host.plugin.servers.save(); } c++; } break; case "restricted": if (value.isBoolean()) { setRestricted(value.asBoolean()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Restricted", isRestricted()); this.host.plugin.servers.save(); } c++; } break; case "hidden": if (value.isBoolean()) { setHidden(value.asBoolean()); if (perma && this.host.plugin.servers.get().getMap("Servers").getKeys().contains(getName())) { this.host.plugin.servers.get().getMap("Servers").getMap(getName()).set("Hidden", isHidden()); this.host.plugin.servers.save(); } c++; } break; case "whitelist": if (value.isList()) { Util.reflect(ServerImpl.class.getDeclaredField("whitelist"), this, value.asUUIDList()); if (isRegistered()) for (Proxy proxy : SubAPI.getInstance().getProxies().values()) if (proxy.getSubData()[0] != null) { ((SubDataClient) proxy.getSubData()[0]).sendPacket(new PacketOutExEditServer(this, Edit.WHITELIST_SET, value.asUUIDList())); } c++; } break; } if (forward != null) { forward.setStopAction(getStopAction()); if (!getName().equals(getDisplayName())) forward.setDisplayName(getDisplayName()); forward.setTemplate(getTemplate()); List<String> groups = new ArrayList<String>(); groups.addAll(getGroups()); for (String group : groups) { removeGroup(group); forward.addGroup(group); } for (SubServer server : getIncompatibilities()) { toggleCompatibility(server); forward.toggleCompatibility(server); } for (String extra : getExtra().getKeys()) forward.addExtra(extra, getExtra(extra)); forward.getHost().addSubServer(player, forward); if (state) pending.set("state", true); c += (perma)?forward.permaEdit(player, pending):forward.edit(player, pending); break; } } catch (Exception e) { e.printStackTrace(); } } } if (!isRunning() && forward == null && state) start(player); return c; } else return -1; } @Override public void waitFor() throws InterruptedException { while (thread != null && thread.isAlive()) { Thread.sleep(250); } } @Override public boolean isRunning() { return (process != null && process.isAlive()) || lock; } @Override public void setDisplayName(String value) { super.setDisplayName(value); logger.name = getDisplayName(); } @Override public Host getHost() { return host; } @Override public boolean isEnabled() { return enabled && host.isEnabled(); } @Override public void setEnabled(boolean value) { Util.nullpo(value); enabled = value; } @Override public boolean isLogging() { return log.value(); } @Override public void setLogging(boolean value) { Util.nullpo(value); log.value(value); } @Override public SubLogger getLogger() { return logger; } @Override public LinkedList<LoggedCommand> getCommandHistory() { return new LinkedList<LoggedCommand>(history); } @Override public String getPath() { return dir; } @Override public String getExecutable() { return executable; } @Override public String getStopCommand() { return stopcmd; } @Override public void setStopCommand(String value) { Util.nullpo(value); stopcmd = value; } @Override public StopAction getStopAction() { return stopaction; } @Override public void setStopAction(StopAction action) { Util.nullpo(action); stopaction = action; } }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.thoughtworks.go.config.exceptions.GoConfigInvalidException; import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry; import com.thoughtworks.go.security.GoCipher; import com.thoughtworks.go.util.GoConstants; import com.thoughtworks.go.util.XmlUtils; import org.apache.log4j.Logger; import org.jdom.*; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static com.thoughtworks.go.config.ConfigCache.annotationFor; import static com.thoughtworks.go.config.ConfigCache.isAnnotationPresent; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.util.ExceptionUtils.bombIf; import static com.thoughtworks.go.util.ObjectUtil.nullSafeEquals; import static com.thoughtworks.go.util.XmlUtils.buildXmlDocument; import static java.text.MessageFormat.format; import static org.apache.commons.io.IOUtils.toInputStream; public class MagicalGoConfigXmlWriter { private static final Logger LOGGER = Logger.getLogger(MagicalGoConfigXmlWriter.class); public static final String XML_NS = "http://www.w3.org/2001/XMLSchema-instance"; private ConfigCache configCache; private final ConfigElementImplementationRegistry registry; public MagicalGoConfigXmlWriter(ConfigCache configCache, ConfigElementImplementationRegistry registry) { this.configCache = configCache; this.registry = registry; } private Document createEmptyCruiseConfigDocument() { Element root = new Element("cruise"); Namespace xsiNamespace = Namespace.getNamespace("xsi", XML_NS); root.addNamespaceDeclaration(xsiNamespace); registry.registerNamespacesInto(root); root.setAttribute("noNamespaceSchemaLocation", "cruise-config.xsd", xsiNamespace); String xsds = registry.xsds(); if (!xsds.isEmpty()) { root.setAttribute("schemaLocation", xsds, xsiNamespace); } root.setAttribute("schemaVersion", Integer.toString(GoConstants.CONFIG_SCHEMA_VERSION)); return new Document(root); } public void write(CruiseConfig configForEdit, OutputStream output, boolean skipPreprocessingAndValidation) throws Exception { LOGGER.debug("[Serializing Config] Starting to write. Validation skipped? " + skipPreprocessingAndValidation); MagicalGoConfigXmlLoader loader = new MagicalGoConfigXmlLoader(configCache, registry); if (!configForEdit.getOrigin().isLocal()) { throw new GoConfigInvalidException(configForEdit,"Attempted to save merged configuration with patials"); } if (!skipPreprocessingAndValidation) { loader.preprocessAndValidate(configForEdit); LOGGER.debug("[Serializing Config] Done with cruise config validators."); } Document document = createEmptyCruiseConfigDocument(); write(configForEdit, document.getRootElement(), configCache, registry); LOGGER.debug("[Serializing Config] XSD and DOM validation."); verifyXsdValid(document); MagicalGoConfigXmlLoader.validateDom(document.getRootElement(), registry); LOGGER.info("[Serializing Config] Generating config partial."); XmlUtils.writeXml(document, output); LOGGER.debug("[Serializing Config] Finished writing config partial."); } private void verifyXsdValid(Document document) throws Exception { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); XmlUtils.writeXml(document, buffer); InputStream content = toInputStream(buffer.toString()); buildXmlDocument(content, GoConfigSchema.getCurrentSchema(), registry.xsds()); } public String toXmlPartial(Object domainObject) { bombIf(!isAnnotationPresent(domainObject.getClass(), ConfigTag.class), "Object " + domainObject + " does not have a ConfigTag"); Element element = elementFor(domainObject.getClass(), configCache); write(domainObject, element, configCache, registry); if (isAnnotationPresent(domainObject.getClass(), ConfigCollection.class) && domainObject instanceof Collection) { for (Object item : (Collection) domainObject) { if (isAnnotationPresent(item.getClass(), ConfigCollection.class) && item instanceof Collection) { new ExplicitCollectionXmlFieldWithValue(domainObject.getClass(), null, (Collection) item, configCache, registry).populate(element); continue; } Element childElement = elementFor(item.getClass(), configCache); element.addContent(childElement); write(item, childElement, configCache, registry); } } try { ByteArrayOutputStream output = new ByteArrayOutputStream(); XmlUtils.writeXml(element, output); return output.toString(); } catch (IOException e) { throw bomb("Unable to write xml to String"); } } private static Namespace namespaceFor(ConfigTag annotation) { return Namespace.getNamespace(annotation.namespacePrefix(), annotation.namespaceURI()); } private static void write(Object o, Element element, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { for (XmlFieldWithValue xmlFieldWithValue : allFields(o, configCache, registry)) { if (xmlFieldWithValue.isDefault() && !xmlFieldWithValue.alwaysWrite()) { continue; } xmlFieldWithValue.populate(element); } } private static List<XmlFieldWithValue> allFields(Object o, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { List<XmlFieldWithValue> list = new ArrayList<>(); Class originalClass = o.getClass(); for (GoConfigFieldWriter field : allFieldsWithInherited(originalClass, o, configCache, registry)) { Field configField = field.getConfigField(); if (field.isImplicitCollection()) { list.add(new ImplicitCollectionXmlFieldWithValue(originalClass, configField, (Collection) field.getValue(), configCache, registry)); } else if (field.isConfigCollection()) { list.add(new ExplicitCollectionXmlFieldWithValue(originalClass, configField, (Collection) field.getValue(), configCache, registry)); } else if (field.isSubtag()) { list.add(new SubTagXmlFieldWithValue(originalClass, configField, field.getValue(), configCache, registry)); } else if (field.isAttribute()) { final Object value = field.getValue(); list.add(new AttributeXmlFieldWithValue(originalClass, configField, value, configCache, registry)); } else if (field.isConfigValue()) { list.add(new ValueXmlFieldWithValue(configField, field.getValue(), originalClass, configCache, registry)); } } return list; } private static List<GoConfigFieldWriter> allFieldsWithInherited(Class aClass, Object o, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { return new GoConfigClassWriter(aClass, configCache, registry).getAllFields(o); } private abstract static class XmlFieldWithValue<T> { protected final Field field; protected final Class originalClass; protected final T value; protected final ConfigCache configCache; protected final ConfigElementImplementationRegistry registry; private XmlFieldWithValue(Class originalClass, Field field, T value, ConfigCache configCache, ConfigElementImplementationRegistry registry) { this.originalClass = originalClass; this.value = value; this.field = field; this.configCache = configCache; this.registry = registry; } public boolean isDefault() { try { Object defaultObject = ConfigElementInstantiator.instantiateConfigElement(new GoCipher(), originalClass); Object defaultValue = field.get(defaultObject); return nullSafeEquals(value, defaultValue); } catch (Exception e) { return false; } } public abstract void populate(Element parent); public abstract boolean alwaysWrite(); protected String valueString() { String valueString = null; ConfigAttributeValue attributeValue = value.getClass().getAnnotation(ConfigAttributeValue.class); if (attributeValue != null) { try { Field field = getField(value.getClass(), attributeValue); field.setAccessible(true); valueString = field.get(value).toString(); } catch (NoSuchFieldException e) { //noinspection ThrowableResultOfMethodCallIgnored bomb(e); } catch (IllegalAccessException e) { //noinspection ThrowableResultOfMethodCallIgnored bomb(e); } } else { valueString = value.toString(); } return valueString; } private Field getField(Class clazz, ConfigAttributeValue attributeValue) throws NoSuchFieldException { try { return clazz.getDeclaredField(attributeValue.fieldName()); } catch (NoSuchFieldException e) { Class klass = clazz.getSuperclass(); if (klass == null) { throw e; } return getField(klass, attributeValue); } } } private static Element elementFor(Class<?> aClass, ConfigCache configCache) { ConfigTag configTag = annotationFor(aClass, ConfigTag.class); if(configTag == null) throw bomb(format("Cannot get config tag for {0}",aClass)); return new Element(configTag.value(), namespaceFor(configTag)); } private static class SubTagXmlFieldWithValue extends XmlFieldWithValue<Object> { public SubTagXmlFieldWithValue(Class oringinalClass, Field field, Object value, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { super(oringinalClass, field, value, configCache, registry); } public void populate(Element parent) { Element child = elementFor(value.getClass(), configCache); parent.addContent(child); write(value, child, configCache, registry); } public boolean alwaysWrite() { return false; } } private static class AttributeXmlFieldWithValue extends XmlFieldWithValue<Object> { public AttributeXmlFieldWithValue(Class oringinalClass, Field field, Object current, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { super(oringinalClass, field, current, configCache, registry); } public void populate(Element parent) { if (value == null && !isDefault()) { if (!isDefault()) { throw bomb( format("Try to write null value into configuration! [{0}.{1}]", field.getDeclaringClass().getName(), field.getName())); } throw bomb(format("A non default field {0}(on {1}) had null value", field.getName(), field.getDeclaringClass().getName())); } String attributeName = field.getAnnotation(ConfigAttribute.class).value(); parent.setAttribute(new Attribute(attributeName, valueString())); } public boolean alwaysWrite() { return field.getAnnotation(ConfigAttribute.class).alwaysWrite(); } } private static class ImplicitCollectionXmlFieldWithValue extends XmlFieldWithValue<Collection> { public ImplicitCollectionXmlFieldWithValue( Class oringinalClass, Field field, Collection value, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { super(oringinalClass, field, value, configCache, registry); } public void populate(Element parent) { new CollectionXmlFieldWithValue(value, parent, originalClass, configCache, registry).populate(); } public boolean alwaysWrite() { return false; } } private static class CollectionXmlFieldWithValue { private final Collection value; private final Element parent; private final Class originalClass; private final ConfigCache configCache; private final ConfigElementImplementationRegistry registry; public CollectionXmlFieldWithValue(Collection value, Element parent, Class originalClass, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { this.value = value; this.parent = parent; this.originalClass = originalClass; this.configCache = configCache; this.registry = registry; } public void populate() { Collection defaultCollection = generateDefaultCollection(); for (XmlFieldWithValue xmlFieldWithValue : allFields(value, configCache, registry)) { if (!xmlFieldWithValue.isDefault()) { xmlFieldWithValue.populate(parent); } } for (Object item : value) { if (defaultCollection.contains(item)) { continue; } if (item.getClass().isAnnotationPresent(ConfigCollection.class) && item instanceof Collection) { new ExplicitCollectionXmlFieldWithValue(originalClass, null, (Collection) item, configCache, registry).populate(parent); continue; } Element childElement = elementFor(item.getClass(), configCache); parent.addContent(childElement); write(item, childElement, configCache, registry); } } protected Collection generateDefaultCollection() { Class<? extends Collection> clazz = value.getClass(); try { return clazz.newInstance(); } catch (Exception e) { throw bomb("Error creating default instance of " + clazz.getName(), e); } } } private static class ExplicitCollectionXmlFieldWithValue extends XmlFieldWithValue<Collection> { public ExplicitCollectionXmlFieldWithValue(Class oringinalClass, Field field, Collection value, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { super(oringinalClass, field, value, configCache, registry); } public void populate(Element parent) { Element containerElement = elementFor(value.getClass(), configCache); new CollectionXmlFieldWithValue(value, containerElement, originalClass, configCache, registry).populate(); parent.addContent(containerElement); } public boolean alwaysWrite() { return false; } } private static class ValueXmlFieldWithValue extends XmlFieldWithValue<Object> { private boolean requireCdata; public ValueXmlFieldWithValue(Field field, Object value, Class oringinalClass, ConfigCache configCache, final ConfigElementImplementationRegistry registry) { super(oringinalClass, field, value, configCache, registry); ConfigValue configValue = field.getAnnotation(ConfigValue.class); requireCdata = configValue.requireCdata(); } public void populate(Element parent) { if (requireCdata) { parent.addContent(new CDATA(valueString())); } else { parent.setText(valueString()); } } public boolean alwaysWrite() { return false; } } }
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Feb 18, 2015 12:09-0500 for FHIR v0.4.0 import java.util.*; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; import org.hl7.fhir.instance.model.annotations.DatatypeDef; /** * A time period defined by a start and end date and optionally time. */ @DatatypeDef(name="Period") public class Period extends Type implements ICompositeType { /** * The start of the period. The boundary is inclusive. */ @Child(name = "start", type = {DateTimeType.class}, order = 0, min = 0, max = 1) @Description(shortDefinition="Starting time with inclusive boundary", formalDefinition="The start of the period. The boundary is inclusive." ) protected DateTimeType start; /** * The end of the period. If the end of the period is missing, it means that the period is ongoing. */ @Child(name = "end", type = {DateTimeType.class}, order = 1, min = 0, max = 1) @Description(shortDefinition="End time with inclusive boundary, if not ongoing", formalDefinition="The end of the period. If the end of the period is missing, it means that the period is ongoing." ) protected DateTimeType end; private static final long serialVersionUID = 649791751L; public Period() { super(); } /** * @return {@link #start} (The start of the period. The boundary is inclusive.). This is the underlying object with id, value and extensions. The accessor "getStart" gives direct access to the value */ public DateTimeType getStartElement() { if (this.start == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Period.start"); else if (Configuration.doAutoCreate()) this.start = new DateTimeType(); // bb return this.start; } public boolean hasStartElement() { return this.start != null && !this.start.isEmpty(); } public boolean hasStart() { return this.start != null && !this.start.isEmpty(); } /** * @param value {@link #start} (The start of the period. The boundary is inclusive.). This is the underlying object with id, value and extensions. The accessor "getStart" gives direct access to the value */ public Period setStartElement(DateTimeType value) { this.start = value; return this; } /** * @return The start of the period. The boundary is inclusive. */ public Date getStart() { return this.start == null ? null : this.start.getValue(); } /** * @param value The start of the period. The boundary is inclusive. */ public Period setStart(Date value) { if (value == null) this.start = null; else { if (this.start == null) this.start = new DateTimeType(); this.start.setValue(value); } return this; } /** * @return {@link #end} (The end of the period. If the end of the period is missing, it means that the period is ongoing.). This is the underlying object with id, value and extensions. The accessor "getEnd" gives direct access to the value */ public DateTimeType getEndElement() { if (this.end == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Period.end"); else if (Configuration.doAutoCreate()) this.end = new DateTimeType(); // bb return this.end; } public boolean hasEndElement() { return this.end != null && !this.end.isEmpty(); } public boolean hasEnd() { return this.end != null && !this.end.isEmpty(); } /** * @param value {@link #end} (The end of the period. If the end of the period is missing, it means that the period is ongoing.). This is the underlying object with id, value and extensions. The accessor "getEnd" gives direct access to the value */ public Period setEndElement(DateTimeType value) { this.end = value; return this; } /** * @return The end of the period. If the end of the period is missing, it means that the period is ongoing. */ public Date getEnd() { return this.end == null ? null : this.end.getValue(); } /** * @param value The end of the period. If the end of the period is missing, it means that the period is ongoing. */ public Period setEnd(Date value) { if (value == null) this.end = null; else { if (this.end == null) this.end = new DateTimeType(); this.end.setValue(value); } return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("start", "dateTime", "The start of the period. The boundary is inclusive.", 0, java.lang.Integer.MAX_VALUE, start)); childrenList.add(new Property("end", "dateTime", "The end of the period. If the end of the period is missing, it means that the period is ongoing.", 0, java.lang.Integer.MAX_VALUE, end)); } public Period copy() { Period dst = new Period(); copyValues(dst); dst.start = start == null ? null : start.copy(); dst.end = end == null ? null : end.copy(); return dst; } protected Period typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof Period)) return false; Period o = (Period) other; return compareDeep(start, o.start, true) && compareDeep(end, o.end, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof Period)) return false; Period o = (Period) other; return compareValues(start, o.start, true) && compareValues(end, o.end, true); } public boolean isEmpty() { return super.isEmpty() && (start == null || start.isEmpty()) && (end == null || end.isEmpty()) ; } }
/* * Copyright 2001-2011 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time.base; import java.io.Serializable; import java.util.Locale; import org.joda.time.Chronology; import org.joda.time.DateTimeField; import org.joda.time.DateTimeUtils; import org.joda.time.ReadablePartial; import org.joda.time.convert.ConverterManager; import org.joda.time.convert.PartialConverter; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * BasePartial is an abstract implementation of ReadablePartial that stores * data in array and <code>Chronology</code> fields. * <p> * This class should generally not be used directly by API users. * The {@link org.joda.time.ReadablePartial} interface should be used when different * kinds of partial objects are to be referenced. * <p> * BasePartial subclasses may be mutable and not thread-safe. * * @author Stephen Colebourne * @since 1.0 */ public abstract class BasePartial extends AbstractPartial implements ReadablePartial, Serializable { /** Serialization version */ private static final long serialVersionUID = 2353678632973660L; /** The chronology in use */ private final Chronology iChronology; /** The values of each field in this partial */ private final int[] iValues; //----------------------------------------------------------------------- /** * Constructs a partial with the current time, using ISOChronology in * the default zone to extract the fields. * <p> * The constructor uses the default time zone, resulting in the local time * being initialised. Once the constructor is complete, all further calculations * are performed without reference to a timezone (by switching to UTC). */ protected BasePartial() { this(DateTimeUtils.currentTimeMillis(), null); } /** * Constructs a partial with the current time, using the specified chronology * and zone to extract the fields. * <p> * The constructor uses the time zone of the chronology specified. * Once the constructor is complete, all further calculations are performed * without reference to a timezone (by switching to UTC). * * @param chronology the chronology, null means ISOChronology in the default zone */ protected BasePartial(Chronology chronology) { this(DateTimeUtils.currentTimeMillis(), chronology); } /** * Constructs a partial extracting the partial fields from the specified * milliseconds using the ISOChronology in the default zone. * <p> * The constructor uses the default time zone, resulting in the local time * being initialised. Once the constructor is complete, all further calculations * are performed without reference to a timezone (by switching to UTC). * * @param instant the milliseconds from 1970-01-01T00:00:00Z */ protected BasePartial(long instant) { this(instant, null); } /** * Constructs a partial extracting the partial fields from the specified * milliseconds using the chronology provided. * <p> * The constructor uses the time zone of the chronology specified. * Once the constructor is complete, all further calculations are performed * without reference to a timezone (by switching to UTC). * * @param instant the milliseconds from 1970-01-01T00:00:00Z * @param chronology the chronology, null means ISOChronology in the default zone */ protected BasePartial(long instant, Chronology chronology) { super(); chronology = DateTimeUtils.getChronology(chronology); iChronology = chronology.withUTC(); iValues = chronology.get(this, instant); } /** * Constructs a partial from an Object that represents a time, using the * specified chronology. * <p> * The recognised object types are defined in * {@link org.joda.time.convert.ConverterManager ConverterManager} and * include ReadableInstant, String, Calendar and Date. * <p> * The constructor uses the time zone of the chronology specified. * Once the constructor is complete, all further calculations are performed * without reference to a timezone (by switching to UTC). * * @param instant the datetime object * @param chronology the chronology, null means use converter * @throws IllegalArgumentException if the date is invalid */ protected BasePartial(Object instant, Chronology chronology) { super(); PartialConverter converter = ConverterManager.getInstance().getPartialConverter(instant); chronology = converter.getChronology(instant, chronology); chronology = DateTimeUtils.getChronology(chronology); iChronology = chronology.withUTC(); iValues = converter.getPartialValues(this, instant, chronology); } /** * Constructs a partial from an Object that represents a time, using the * specified chronology. * <p> * The recognised object types are defined in * {@link org.joda.time.convert.ConverterManager ConverterManager} and * include ReadableInstant, String, Calendar and Date. * <p> * The constructor uses the time zone of the chronology specified. * Once the constructor is complete, all further calculations are performed * without reference to a timezone (by switching to UTC). * * @param instant the datetime object * @param chronology the chronology, null means use converter * @param parser if converting from a String, the given parser is preferred * @throws IllegalArgumentException if the date is invalid * @since 1.3 */ protected BasePartial(Object instant, Chronology chronology, DateTimeFormatter parser) { super(); PartialConverter converter = ConverterManager.getInstance().getPartialConverter(instant); chronology = converter.getChronology(instant, chronology); chronology = DateTimeUtils.getChronology(chronology); iChronology = chronology.withUTC(); iValues = converter.getPartialValues(this, instant, chronology, parser); } /** * Constructs a partial with specified time field values and chronology. * <p> * The constructor uses the time zone of the chronology specified. * Once the constructor is complete, all further calculations are performed * without reference to a timezone (by switching to UTC). * <p> * The array of values is assigned (not cloned) to the new instance. * * @param values the new set of values * @param chronology the chronology, null means ISOChronology in the default zone * @throws IllegalArgumentException if the values are invalid */ protected BasePartial(int[] values, Chronology chronology) { super(); chronology = DateTimeUtils.getChronology(chronology); iChronology = chronology.withUTC(); chronology.validate(this, values); iValues = values; } /** * Private constructor to be used by subclasses only which performs no validation. * <p> * Data is assigned (not cloned) to the new instance. * * @param base the base partial * @param values the new set of values, not cloned, null means use base */ protected BasePartial(BasePartial base, int[] values) { super(); iChronology = base.iChronology; iValues = values; } /** * Private constructor to be used by subclasses only which performs no validation. * <p> * Data is assigned (not cloned) to the new instance. * This should not be used by mutable subclasses. * * @param base the base partial * @param chrono the chronology to use, null means use base */ protected BasePartial(BasePartial base, Chronology chrono) { super(); iChronology = chrono.withUTC(); iValues = base.iValues; } //----------------------------------------------------------------------- /** * Gets the value of the field at the specifed index. * * @param index the index * @return the value * @throws IndexOutOfBoundsException if the index is invalid */ public int getValue(int index) { return iValues[index]; } /** * Gets an array of the value of each of the fields that this partial supports. * <p> * The fields are returned largest to smallest, for example Hour, Minute, Second. * Each value corresponds to the same array index as <code>getFields()</code> * * @return the current values of each field (cloned), largest to smallest */ public int[] getValues() { return (int[]) iValues.clone(); } /** * Gets the chronology of the partial which is never null. * <p> * The {@link org.joda.time.Chronology} is the calculation engine behind the partial and * provides conversion and validation of the fields in a particular calendar system. * * @return the chronology, never null */ public Chronology getChronology() { return iChronology; } //----------------------------------------------------------------------- /** * Sets the value of the field at the specified index. * <p> * In version 2.0 and later, this method copies the array into the original. * This is because the instance variable has been changed to be final to satisfy the Java Memory Model. * This only impacts subclasses that are mutable. * * @param index the index * @param value the value to set * @throws IndexOutOfBoundsException if the index is invalid */ protected void setValue(int index, int value) { DateTimeField field = getField(index); int[] values = field.set(this, index, iValues, value); System.arraycopy(values, 0, iValues, 0, iValues.length); } /** * Sets the values of all fields. * <p> * In version 2.0 and later, this method copies the array into the original. * This is because the instance variable has been changed to be final to satisfy the Java Memory Model. * This only impacts subclasses that are mutable. * * @param values the array of values */ protected void setValues(int[] values) { getChronology().validate(this, values); System.arraycopy(values, 0, iValues, 0, iValues.length); } //----------------------------------------------------------------------- /** * Output the date using the specified format pattern. * * @param pattern the pattern specification, null means use <code>toString</code> * @see org.joda.time.format.DateTimeFormat */ public String toString(String pattern) { if (pattern == null) { return toString(); } return DateTimeFormat.forPattern(pattern).print(this); } /** * Output the date using the specified format pattern. * * @param pattern the pattern specification, null means use <code>toString</code> * @param locale Locale to use, null means default * @see org.joda.time.format.DateTimeFormat */ public String toString(String pattern, Locale locale) throws IllegalArgumentException { if (pattern == null) { return toString(); } return DateTimeFormat.forPattern(pattern).withLocale(locale).print(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableSet; import org.apache.cassandra.db.*; import org.apache.cassandra.db.rows.UnfilteredRowIterator; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.CorruptSSTableException; import org.apache.cassandra.io.sstable.IndexSummary; import org.apache.cassandra.io.sstable.KeyIterator; import org.apache.cassandra.io.sstable.SSTableIdentityIterator; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.sstable.metadata.MetadataComponent; import org.apache.cassandra.io.sstable.metadata.MetadataType; import org.apache.cassandra.io.sstable.metadata.ValidationMetadata; import org.apache.cassandra.io.util.DataIntegrityMetadata; import org.apache.cassandra.io.util.DataIntegrityMetadata.FileDigestValidator; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.io.util.RandomAccessReader; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.BloomFilterSerializer; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.IFilter; import org.apache.cassandra.utils.OutputHandler; import org.apache.cassandra.utils.UUIDGen; import java.io.BufferedInputStream; import java.io.Closeable; import java.io.DataInputStream; import java.io.File; import java.io.IOError; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; import java.util.function.Function; import java.util.function.LongPredicate; public class Verifier implements Closeable { private final ColumnFamilyStore cfs; private final SSTableReader sstable; private final CompactionController controller; private final RandomAccessReader dataFile; private final RandomAccessReader indexFile; private final VerifyInfo verifyInfo; private final RowIndexEntry.IndexSerializer rowIndexEntrySerializer; private final Options options; private final boolean isOffline; /** * Given a keyspace, return the set of local and pending token ranges. By default {@link StorageService#getLocalAndPendingRanges(String)} * is expected, but for the standalone verifier case we can't use that, so this is here to allow the CLI to provide * the token ranges. */ private final Function<String, ? extends Collection<Range<Token>>> tokenLookup; private int goodRows; private final OutputHandler outputHandler; private FileDigestValidator validator; public Verifier(ColumnFamilyStore cfs, SSTableReader sstable, boolean isOffline, Options options) { this(cfs, sstable, new OutputHandler.LogOutput(), isOffline, options); } public Verifier(ColumnFamilyStore cfs, SSTableReader sstable, OutputHandler outputHandler, boolean isOffline, Options options) { this.cfs = cfs; this.sstable = sstable; this.outputHandler = outputHandler; this.rowIndexEntrySerializer = sstable.descriptor.version.getSSTableFormat().getIndexSerializer(cfs.metadata(), sstable.descriptor.version, sstable.header); this.controller = new VerifyController(cfs); this.dataFile = isOffline ? sstable.openDataReader() : sstable.openDataReader(CompactionManager.instance.getRateLimiter()); this.indexFile = RandomAccessReader.open(new File(sstable.descriptor.filenameFor(Component.PRIMARY_INDEX))); this.verifyInfo = new VerifyInfo(dataFile, sstable); this.options = options; this.isOffline = isOffline; this.tokenLookup = options.tokenLookup; } public void verify() { boolean extended = options.extendedVerification; long rowStart = 0; outputHandler.output(String.format("Verifying %s (%s)", sstable, FBUtilities.prettyPrintMemory(dataFile.length()))); if (options.checkVersion && !sstable.descriptor.version.isLatestVersion()) { String msg = String.format("%s is not the latest version, run upgradesstables", sstable); outputHandler.output(msg); // don't use markAndThrow here because we don't want a CorruptSSTableException for this. throw new RuntimeException(msg); } outputHandler.output(String.format("Deserializing sstable metadata for %s ", sstable)); try { EnumSet<MetadataType> types = EnumSet.of(MetadataType.VALIDATION, MetadataType.STATS, MetadataType.HEADER); Map<MetadataType, MetadataComponent> sstableMetadata = sstable.descriptor.getMetadataSerializer().deserialize(sstable.descriptor, types); if (sstableMetadata.containsKey(MetadataType.VALIDATION) && !((ValidationMetadata)sstableMetadata.get(MetadataType.VALIDATION)).partitioner.equals(sstable.getPartitioner().getClass().getCanonicalName())) throw new IOException("Partitioner does not match validation metadata"); } catch (Throwable t) { outputHandler.warn(t.getMessage()); markAndThrow(false); } try { outputHandler.debug("Deserializing index for "+sstable); deserializeIndex(sstable); } catch (Throwable t) { outputHandler.warn(t.getMessage()); markAndThrow(); } try { outputHandler.debug("Deserializing index summary for "+sstable); deserializeIndexSummary(sstable); } catch (Throwable t) { outputHandler.output("Index summary is corrupt - if it is removed it will get rebuilt on startup "+sstable.descriptor.filenameFor(Component.SUMMARY)); outputHandler.warn(t.getMessage()); markAndThrow(false); } try { outputHandler.debug("Deserializing bloom filter for "+sstable); deserializeBloomFilter(sstable); } catch (Throwable t) { outputHandler.warn(t.getMessage()); markAndThrow(); } if (options.checkOwnsTokens && !isOffline) { outputHandler.debug("Checking that all tokens are owned by the current node"); try (KeyIterator iter = new KeyIterator(sstable.descriptor, sstable.metadata())) { List<Range<Token>> ownedRanges = Range.normalize(tokenLookup.apply(cfs.metadata.keyspace)); if (ownedRanges.isEmpty()) return; RangeOwnHelper rangeOwnHelper = new RangeOwnHelper(ownedRanges); while (iter.hasNext()) { DecoratedKey key = iter.next(); rangeOwnHelper.validate(key); } } catch (Throwable t) { outputHandler.warn(t.getMessage()); markAndThrow(); } } if (options.quick) return; // Verify will use the Digest files, which works for both compressed and uncompressed sstables outputHandler.output(String.format("Checking computed hash of %s ", sstable)); try { validator = null; if (new File(sstable.descriptor.filenameFor(Component.DIGEST)).exists()) { validator = DataIntegrityMetadata.fileDigestValidator(sstable.descriptor); validator.validate(); } else { outputHandler.output("Data digest missing, assuming extended verification of disk values"); extended = true; } } catch (IOException e) { outputHandler.warn(e.getMessage()); markAndThrow(); } finally { FileUtils.closeQuietly(validator); } if (!extended) return; outputHandler.output("Extended Verify requested, proceeding to inspect values"); try { ByteBuffer nextIndexKey = ByteBufferUtil.readWithShortLength(indexFile); { long firstRowPositionFromIndex = rowIndexEntrySerializer.deserializePositionAndSkip(indexFile); if (firstRowPositionFromIndex != 0) markAndThrow(); } List<Range<Token>> ownedRanges = isOffline ? Collections.emptyList() : Range.normalize(tokenLookup.apply(cfs.metadata().keyspace)); RangeOwnHelper rangeOwnHelper = new RangeOwnHelper(ownedRanges); DecoratedKey prevKey = null; while (!dataFile.isEOF()) { if (verifyInfo.isStopRequested()) throw new CompactionInterruptedException(verifyInfo.getCompactionInfo()); rowStart = dataFile.getFilePointer(); outputHandler.debug("Reading row at " + rowStart); DecoratedKey key = null; try { key = sstable.decorateKey(ByteBufferUtil.readWithShortLength(dataFile)); } catch (Throwable th) { throwIfFatal(th); // check for null key below } if (options.checkOwnsTokens && ownedRanges.size() > 0) { try { rangeOwnHelper.validate(key); } catch (Throwable t) { outputHandler.warn(String.format("Key %s in sstable %s not owned by local ranges %s", key, sstable, ownedRanges), t); markAndThrow(); } } ByteBuffer currentIndexKey = nextIndexKey; long nextRowPositionFromIndex = 0; try { nextIndexKey = indexFile.isEOF() ? null : ByteBufferUtil.readWithShortLength(indexFile); nextRowPositionFromIndex = indexFile.isEOF() ? dataFile.length() : rowIndexEntrySerializer.deserializePositionAndSkip(indexFile); } catch (Throwable th) { markAndThrow(); } long dataStart = dataFile.getFilePointer(); long dataStartFromIndex = currentIndexKey == null ? -1 : rowStart + 2 + currentIndexKey.remaining(); long dataSize = nextRowPositionFromIndex - dataStartFromIndex; // avoid an NPE if key is null String keyName = key == null ? "(unreadable key)" : ByteBufferUtil.bytesToHex(key.getKey()); outputHandler.debug(String.format("row %s is %s", keyName, FBUtilities.prettyPrintMemory(dataSize))); assert currentIndexKey != null || indexFile.isEOF(); try { if (key == null || dataSize > dataFile.length()) markAndThrow(); //mimic the scrub read path, intentionally unused try (UnfilteredRowIterator iterator = SSTableIdentityIterator.create(sstable, dataFile, key)) { } if ( (prevKey != null && prevKey.compareTo(key) > 0) || !key.getKey().equals(currentIndexKey) || dataStart != dataStartFromIndex ) markAndThrow(); goodRows++; prevKey = key; outputHandler.debug(String.format("Row %s at %s valid, moving to next row at %s ", goodRows, rowStart, nextRowPositionFromIndex)); dataFile.seek(nextRowPositionFromIndex); } catch (Throwable th) { markAndThrow(); } } } catch (Throwable t) { throw Throwables.propagate(t); } finally { controller.close(); } outputHandler.output("Verify of " + sstable + " succeeded. All " + goodRows + " rows read successfully"); } /** * Use the fact that check(..) is called with sorted tokens - we keep a pointer in to the normalized ranges * and only bump the pointer if the key given is out of range. This is done to avoid calling .contains(..) many * times for each key (with vnodes for example) */ @VisibleForTesting public static class RangeOwnHelper { private final List<Range<Token>> normalizedRanges; private int rangeIndex = 0; private DecoratedKey lastKey; public RangeOwnHelper(List<Range<Token>> normalizedRanges) { this.normalizedRanges = normalizedRanges; Range.assertNormalized(normalizedRanges); } /** * check if the given key is contained in any of the given ranges * * Must be called in sorted order - key should be increasing * * @param key the key * @throws RuntimeException if the key is not contained */ public void validate(DecoratedKey key) { if (!check(key)) throw new RuntimeException("Key " + key + " is not contained in the given ranges"); } /** * check if the given key is contained in any of the given ranges * * Must be called in sorted order - key should be increasing * * @param key the key * @return boolean */ public boolean check(DecoratedKey key) { assert lastKey == null || key.compareTo(lastKey) > 0; lastKey = key; if (normalizedRanges.isEmpty()) // handle tests etc where we don't have any ranges return true; if (rangeIndex > normalizedRanges.size() - 1) throw new IllegalStateException("RangeOwnHelper can only be used to find the first out-of-range-token"); while (!normalizedRanges.get(rangeIndex).contains(key.getToken())) { rangeIndex++; if (rangeIndex > normalizedRanges.size() - 1) return false; } return true; } } private void deserializeIndex(SSTableReader sstable) throws IOException { try (RandomAccessReader primaryIndex = RandomAccessReader.open(new File(sstable.descriptor.filenameFor(Component.PRIMARY_INDEX)))) { long indexSize = primaryIndex.length(); while ((primaryIndex.getFilePointer()) != indexSize) { ByteBuffer key = ByteBufferUtil.readWithShortLength(primaryIndex); RowIndexEntry.Serializer.skip(primaryIndex, sstable.descriptor.version); } } } private void deserializeIndexSummary(SSTableReader sstable) throws IOException { File file = new File(sstable.descriptor.filenameFor(Component.SUMMARY)); TableMetadata metadata = cfs.metadata(); try (DataInputStream iStream = new DataInputStream(Files.newInputStream(file.toPath()))) { try (IndexSummary indexSummary = IndexSummary.serializer.deserialize(iStream, cfs.getPartitioner(), metadata.params.minIndexInterval, metadata.params.maxIndexInterval)) { ByteBufferUtil.readWithLength(iStream); ByteBufferUtil.readWithLength(iStream); } } } private void deserializeBloomFilter(SSTableReader sstable) throws IOException { try (DataInputStream stream = new DataInputStream(new BufferedInputStream(Files.newInputStream(Paths.get(sstable.descriptor.filenameFor(Component.FILTER))))); IFilter bf = BloomFilterSerializer.deserialize(stream, sstable.descriptor.version.hasOldBfFormat())) {} } public void close() { FileUtils.closeQuietly(dataFile); FileUtils.closeQuietly(indexFile); } private void throwIfFatal(Throwable th) { if (th instanceof Error && !(th instanceof AssertionError || th instanceof IOError)) throw (Error) th; } private void markAndThrow() { markAndThrow(true); } private void markAndThrow(boolean mutateRepaired) { if (mutateRepaired && options.mutateRepairStatus) // if we are able to mutate repaired flag, an incremental repair should be enough { try { sstable.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable.descriptor, ActiveRepairService.UNREPAIRED_SSTABLE, sstable.getPendingRepair(), sstable.isTransient()); sstable.reloadSSTableMetadata(); cfs.getTracker().notifySSTableRepairedStatusChanged(Collections.singleton(sstable)); } catch(IOException ioe) { outputHandler.output("Error mutating repairedAt for SSTable " + sstable.getFilename() + ", as part of markAndThrow"); } } Exception e = new Exception(String.format("Invalid SSTable %s, please force %srepair", sstable.getFilename(), (mutateRepaired && options.mutateRepairStatus) ? "" : "a full ")); if (options.invokeDiskFailurePolicy) throw new CorruptSSTableException(e, sstable.getFilename()); else throw new RuntimeException(e); } public CompactionInfo.Holder getVerifyInfo() { return verifyInfo; } private static class VerifyInfo extends CompactionInfo.Holder { private final RandomAccessReader dataFile; private final SSTableReader sstable; private final UUID verificationCompactionId; public VerifyInfo(RandomAccessReader dataFile, SSTableReader sstable) { this.dataFile = dataFile; this.sstable = sstable; verificationCompactionId = UUIDGen.getTimeUUID(); } public CompactionInfo getCompactionInfo() { try { return new CompactionInfo(sstable.metadata(), OperationType.VERIFY, dataFile.getFilePointer(), dataFile.length(), verificationCompactionId, ImmutableSet.of(sstable)); } catch (Exception e) { throw new RuntimeException(); } } public boolean isGlobal() { return false; } } private static class VerifyController extends CompactionController { public VerifyController(ColumnFamilyStore cfs) { super(cfs, Integer.MAX_VALUE); } @Override public LongPredicate getPurgeEvaluator(DecoratedKey key) { return time -> false; } } public static Options.Builder options() { return new Options.Builder(); } public static class Options { public final boolean invokeDiskFailurePolicy; public final boolean extendedVerification; public final boolean checkVersion; public final boolean mutateRepairStatus; public final boolean checkOwnsTokens; public final boolean quick; public final Function<String, ? extends Collection<Range<Token>>> tokenLookup; private Options(boolean invokeDiskFailurePolicy, boolean extendedVerification, boolean checkVersion, boolean mutateRepairStatus, boolean checkOwnsTokens, boolean quick, Function<String, ? extends Collection<Range<Token>>> tokenLookup) { this.invokeDiskFailurePolicy = invokeDiskFailurePolicy; this.extendedVerification = extendedVerification; this.checkVersion = checkVersion; this.mutateRepairStatus = mutateRepairStatus; this.checkOwnsTokens = checkOwnsTokens; this.quick = quick; this.tokenLookup = tokenLookup; } @Override public String toString() { return "Options{" + "invokeDiskFailurePolicy=" + invokeDiskFailurePolicy + ", extendedVerification=" + extendedVerification + ", checkVersion=" + checkVersion + ", mutateRepairStatus=" + mutateRepairStatus + ", checkOwnsTokens=" + checkOwnsTokens + ", quick=" + quick + '}'; } public static class Builder { private boolean invokeDiskFailurePolicy = false; // invoking disk failure policy can stop the node if we find a corrupt stable private boolean extendedVerification = false; private boolean checkVersion = false; private boolean mutateRepairStatus = false; // mutating repair status can be dangerous private boolean checkOwnsTokens = false; private boolean quick = false; private Function<String, ? extends Collection<Range<Token>>> tokenLookup = StorageService.instance::getLocalAndPendingRanges; public Builder invokeDiskFailurePolicy(boolean param) { this.invokeDiskFailurePolicy = param; return this; } public Builder extendedVerification(boolean param) { this.extendedVerification = param; return this; } public Builder checkVersion(boolean param) { this.checkVersion = param; return this; } public Builder mutateRepairStatus(boolean param) { this.mutateRepairStatus = param; return this; } public Builder checkOwnsTokens(boolean param) { this.checkOwnsTokens = param; return this; } public Builder quick(boolean param) { this.quick = param; return this; } public Builder tokenLookup(Function<String, ? extends Collection<Range<Token>>> tokenLookup) { this.tokenLookup = tokenLookup; return this; } public Options build() { return new Options(invokeDiskFailurePolicy, extendedVerification, checkVersion, mutateRepairStatus, checkOwnsTokens, quick, tokenLookup); } } } }
package jToolkit4FixedPipeline.keyframeanim; import static org.lwjgl.opengl.ARBMultitexture.GL_TEXTURE0_ARB; import static org.lwjgl.opengl.ARBMultitexture.glActiveTextureARB; import static org.lwjgl.opengl.GL11.*; import java.awt.image.BufferedImage; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import java.util.logging.Level; import java.util.logging.Logger; import jToolkit4FixedPipeline.image.texture.TextureUploader; import jToolkit4FixedPipeline.object.datastructure.*; import jToolkit4FixedPipeline.object.datastructure.Face; import jToolkit4FixedPipeline.object.instruments.OBJloader; import jToolkit4FixedPipeline.vector.Vector2f; import jToolkit4FixedPipeline.vector.Vector3f; import jToolkit4FixedPipeline.vector.Vector4f; import main.FixedPipeline; import org.lwjgl.opengl.Display; /** * ToDo: Change hardcoded paths! Further improvement is to convert this class as a generic, weakly connected code * @author Astemir Eleev */ public class Character { private int texture2; // char texture id private int texture3; // eyes texture id private String tex2 = "jToolkit/res/gobilnlwjglchar.png"; private String tex3 = "jToolkit/res/gobilnlwjgleyes.png"; private int displaylistch; private int displaylistchar; private int displaylistcharj1; private int displaylistcharj2; private int displaylistchar6; private int displaydead; private int displaydeadeye; private int[] displaylist; private int[] displaylistside; private OBJloader obj; private static String locj1 = "jToolkit/res/lwjglcharactergoblin18.obj"; private static String locj2 = "jToolkit/res/lwjglcharactergoblin13.obj"; private static String locg = "jToolkit/res/lwjglcharactergoblin10_000001.obj"; private static String locge = "jToolkit/res/lwjglcharactergoblin12.obj"; private Model m = null; private int motioncounter = 0; private int motioncounterback = 20; private int motionsideright = 0; private int motionsideleft = 9; private Vector3f position = new Vector3f(); private float rotationAngle; private Vector3f rotation = new Vector3f(); private Vector3f scale = new Vector3f(); private Vector4f color = new Vector4f(); public Character() { loadTexture(); } /** * This methods builds the character. * It catches call back from main pipeline */ public void build() { glActiveTextureARB(GL_TEXTURE0_ARB); glEnable(GL_TEXTURE_2D); //glCallList(displaylistchar); glBindTexture(GL_TEXTURE_2D, texture3); if (FixedPipeline.dead == false ) { glCallList(displaylistchar6); } else { if (FixedPipeline.height == 0) glCallList(displaydeadeye); } glBindTexture(GL_TEXTURE_2D, texture2); if (FixedPipeline.movingforward | FixedPipeline.movingback | FixedPipeline.movingsideright | FixedPipeline.movingsideleft && !FixedPipeline.isJump && FixedPipeline.height == 0 & FixedPipeline.dead == false) { if (FixedPipeline.movingforward) glCallList(displaylist[motioncounter]); if (FixedPipeline.movingback) glCallList(displaylist[motioncounterback]); if (FixedPipeline.movingsideright) glCallList(displaylistside[motionsideright]); if (FixedPipeline.movingsideleft) glCallList(displaylistside[motionsideleft]); }else { if (FixedPipeline.height == 0 & FixedPipeline.dead == false) { glCallList(displaylistchar); }else { if (FixedPipeline.height > 0 & FixedPipeline.height < 3 & FixedPipeline.dead == false) { glCallList(displaylistcharj1); } else { if (FixedPipeline.dead == false) { //glCallList(displaylistcharj2); glCallList(displaylistcharj2); } } } if (FixedPipeline.dead & FixedPipeline.height ==0) { glCallList(displaydead); } else { if (FixedPipeline.dead & FixedPipeline.height > 0) glCallList(displaylistcharj2); } } } /** * This method responds on call - back from the main FixedPipeline */ public void motionListener () { if (motioncounter == 20) motioncounter = 0; ++motioncounter; if (motioncounterback == 0) motioncounterback = 20; --motioncounterback; if (motionsideright == 9) motionsideright = 0; ++motionsideright; if (motionsideleft == 0) motionsideleft = 9; --motionsideleft; } /** * Resource dealloc */ public void destroy() { glDeleteTextures(texture2); glDeleteTextures(texture3); glDeleteLists(displaylistchar6, 1); glDeleteLists(displaylistchar, 1); glDeleteLists(displaylistcharj1, 1); glDeleteLists(displaylistcharj2, 1); for (int i = 0;i < displaylist.length;i++) { glDeleteLists(displaylist[i], 1); } for (int i = 0;i < displaylistside.length;i++) { glDeleteLists(displaylistside[i], 1); } } /** * This method loads obj file and put coords together as triangles * @param location - physical location of file */ public void loadFrame(String location) { try { m = OBJloader.loadModel(location); } catch (FileNotFoundException e) { Logger.getLogger(Character.class.getCanonicalName()).log(Level.WARNING, "Loading model exception", e); e.printStackTrace(); Display.destroy(); System.exit(1); } catch (IOException e) { Logger.getLogger(Character.class.getCanonicalName()).log(Level.WARNING, "IO exception", e); e.printStackTrace(); Display.destroy(); System.exit(1); } glBegin(GL_TRIANGLES); for (Face face : m.faces) { Vector2f t1 = m.textures.get((int) face.textures.x - 1); glTexCoord2f(t1.x, 1 - t1.y); Vector3f n1 = m.normals.get((int) face.normals.x - 1); glNormal3f(n1.x, n1.y, n1.z); Vector3f v1 = m.vertices.get((int) face.vertex.x - 1); glVertex3f(v1.x, v1.y, v1.z); Vector2f t2 = m.textures.get((int) face.textures.y - 1); glTexCoord2f(t2.x, 1 - t2.y); Vector3f n2 = m.normals.get((int) face.normals.y - 1); glNormal3f(n2.x, n2.y, n2.z); Vector3f v2 = m.vertices.get((int) face.vertex.y - 1); glVertex3f(v2.x, v2.y, v2.z); Vector2f t3 = m.textures.get((int) face.textures.z - 1); glTexCoord2f(t3.x, 1 - t3.y); Vector3f n3 = m.normals.get((int) face.normals.z - 1); glNormal3f(n3.x, n3.y, n3.z); Vector3f v3 = m.vertices.get((int) face.vertex.z - 1); glVertex3f(v3.x, v3.y, v3.z); } glEnd(); } /** * Wrapper under build() method. It gives more control */ public void draw () { glPushMatrix(); { glTranslatef(position.getX(), position.getY(), position.getZ()); glRotatef(rotationAngle, rotation.getX(), rotation.getY(), rotation.getZ()); glScalef(scale.getX(), scale.getY(), scale.getZ()); glColor4f(color.getX(), color.getY(), color.getZ(), color.getW()); // [self build]. It happens when you love 2 languages :) this.build(); } glPopMatrix(); } /** * ToDo: There are some problems with architecture. Change all the lines which have HARDCODED PATHS * Actually it's better to add for cycle for adding frame animation fragments */ public void createCharacter() { // load stand frame displaylistchar = glGenLists(1); glNewList(displaylistchar, GL_COMPILE); loadFrame(locg); glEndList(); // load eye frame displaylistchar6 = glGenLists(1); glNewList(displaylistchar6, GL_COMPILE); loadFrame(locge); glEndList(); // load two frames for jump displaylistcharj2 = glGenLists(1); glNewList(displaylistcharj2, GL_COMPILE); loadFrame(locj2); glEndList(); Display.update(); displaylistcharj1 = glGenLists(1); glNewList(displaylistcharj1, GL_COMPILE); loadFrame(locj1); glEndList(); displaydead = glGenLists(1); glNewList(displaydead, GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblindead.obj"); glEndList(); displaydeadeye = glGenLists(1); glNewList(displaydeadeye, GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblindeadeye.obj"); glEndList(); // load frames for walking sideways displaylistside = new int[10]; displaylistside[0] = glGenLists(1); glNewList(displaylistside[0], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin17_000001.obj"); glEndList(); displaylistside[1] = displaylistside[0]; displaylistside[2] = glGenLists(1); glNewList(displaylistside[2], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin17_000002.obj"); glEndList(); displaylistside[3] = displaylistside[2]; displaylistside[4] = glGenLists(1); glNewList(displaylistside[4], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin17_000003.obj"); glEndList(); displaylistside[5] = displaylistside[4]; displaylistside[6] = glGenLists(1); glNewList(displaylistside[6], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin17_000004.obj"); glEndList(); displaylistside[7] = displaylistside[6]; displaylistside[8] = displaylistside[0]; displaylistside[9] = displaylistside[8]; //load frames for walk displaylist = new int[21]; displaylist[0] = glGenLists(1); glNewList(displaylist[0], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000005.obj"); glEndList(); displaylist[1] = glGenLists(1); glNewList(displaylist[1], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000006.obj"); glEndList(); displaylist[2] = glGenLists(1); glNewList(displaylist[2], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000007.obj"); glEndList(); displaylist[3] = glGenLists(1); glNewList(displaylist[3], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000008.obj"); glEndList(); displaylist[4] = glGenLists(1); glNewList(displaylist[4], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000009.obj"); glEndList(); displaylist[5] = glGenLists(1); glNewList(displaylist[5], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000010.obj"); glEndList(); displaylist[6] = glGenLists(1); glNewList(displaylist[6], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000011.obj"); glEndList(); displaylist[7] = glGenLists(1); glNewList(displaylist[7], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000012.obj"); glEndList(); displaylist[8] = glGenLists(1); glNewList(displaylist[8], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000013.obj"); glEndList(); displaylist[9] = glGenLists(1); glNewList(displaylist[9], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000014.obj"); glEndList(); displaylist[10] = glGenLists(1); glNewList(displaylist[10], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000015.obj"); glEndList(); displaylist[11] = glGenLists(1); glNewList(displaylist[11], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000016.obj"); glEndList(); displaylist[12] = glGenLists(1); glNewList(displaylist[12], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000017.obj"); glEndList(); displaylist[13] = glGenLists(1); glNewList(displaylist[13], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000018.obj"); glEndList(); displaylist[14] = glGenLists(1); glNewList(displaylist[14], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000019.obj"); glEndList(); displaylist[15] = glGenLists(1); glNewList(displaylist[15], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000020.obj"); glEndList(); displaylist[16] = glGenLists(1); glNewList(displaylist[16], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000021.obj"); glEndList(); displaylist[17] = glGenLists(1); glNewList(displaylist[17], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000022.obj"); glEndList(); displaylist[18] = glGenLists(1); glNewList(displaylist[18], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000023.obj"); glEndList(); displaylist[19] = glGenLists(1); glNewList(displaylist[19], GL_COMPILE); loadFrame("jToolkit/res/lwjglcharactergoblin11_000024.obj"); glEndList(); displaylist[20] = displaylist[0]; } public void loadTexture() { // loader for textures BufferedImage image2 = TextureUploader.loadImage(tex2); texture2 = TextureUploader.loadTexture(image2); BufferedImage image3 = TextureUploader.loadImage(tex3); texture3 = TextureUploader.loadTexture(image3); } public Vector3f getPosition() { return position; } public void setPosition(Vector3f position) { this.position = position; } public float getRotationAngle() { return rotationAngle; } public void setRotationAngle(float rotationAngle) { this.rotationAngle = rotationAngle; } public Vector3f getRotation() { return rotation; } public void setRotation(Vector3f rotation) { this.rotation = rotation; } public Vector3f getScale() { return scale; } public void setScale(Vector3f scale) { this.scale = scale; } public Vector4f getColor() { return color; } public void setColor(Vector4f color) { this.color = color; } @Override public String toString() { return "Character{" + "texture2=" + texture2 + ", texture3=" + texture3 + ", tex2='" + tex2 + '\'' + ", tex3='" + tex3 + '\'' + ", displaylistch=" + displaylistch + ", displaylistchar=" + displaylistchar + ", displaylistcharj1=" + displaylistcharj1 + ", displaylistcharj2=" + displaylistcharj2 + ", displaylistchar6=" + displaylistchar6 + ", displaydead=" + displaydead + ", displaydeadeye=" + displaydeadeye + ", displaylist=" + Arrays.toString(displaylist) + ", displaylistside=" + Arrays.toString(displaylistside) + ", obj=" + obj + ", m=" + m + ", motioncounter=" + motioncounter + ", motioncounterback=" + motioncounterback + ", motionsideright=" + motionsideright + ", motionsideleft=" + motionsideleft + ", position=" + position + ", rotationAngle=" + rotationAngle + ", rotation=" + rotation + ", scale=" + scale + ", color=" + color + '}'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.parser; import java.util.Iterator; import java.util.Map; import org.apache.wicket.markup.parser.IXmlPullParser.HttpTagType; import org.apache.wicket.util.lang.Objects; import org.apache.wicket.util.string.AppendingStringBuffer; import org.apache.wicket.util.string.StringValue; import org.apache.wicket.util.string.Strings; import org.apache.wicket.util.value.IValueMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A subclass of MarkupElement which represents a tag including namespace and its optional * attributes. XmlTags are returned by the XML parser. * * @author Jonathan Locke */ public class XmlTag { /** Log. */ private static final Logger log = LoggerFactory.getLogger(XmlTag.class); /** * Enumerated type for different kinds of component tags. */ public static enum TagType { /** A close tag, like &lt;/TAG&gt;. */ CLOSE("CLOSE"), /** An open tag, like &lt;TAG componentId = "xyz"&gt;. */ OPEN("OPEN"), /** An open/close tag, like &lt;TAG componentId = "xyz"/&gt;. */ OPEN_CLOSE("OPEN_CLOSE"); private String name; TagType(final String name) { this.name = name; } } TextSegment text; /** Attribute map. */ private IValueMap attributes; /** Name of tag, such as "img" or "input". */ String name; /** Namespace of the tag, if available, such as &lt;wicket:link ...&gt; */ String namespace; /** The tag type (OPEN, CLOSE or OPEN_CLOSE). */ TagType type; /** Any component tag that this tag closes. */ private XmlTag closes; /** If mutable, the immutable tag that this tag is a mutable copy of. */ private XmlTag copyOf = this; /** True if this tag is mutable, false otherwise. */ private boolean isMutable = true; private HttpTagType httpTagType; /** * Construct. */ public XmlTag() { super(); } /** * Construct. * * @param text * @param type */ public XmlTag(final TextSegment text, final TagType type) { this.text = text; this.type = type; } /** * Gets whether this tag closes the provided open tag. * * @param open * The open tag * @return True if this tag closes the given open tag */ public final boolean closes(final XmlTag open) { return (closes == open) || ((closes == open.copyOf) && (this != open)); } /** * @param element * @return true, if namespace, name and attributes are the same */ public final boolean equalTo(final XmlTag element) { final XmlTag that = element; if (!Objects.equal(getNamespace(), that.getNamespace())) { return false; } if (!getName().equals(that.getName())) { return false; } return getAttributes().equals(that.getAttributes()); } /** * Gets a hashmap of this tag's attributes. * * @return The tag's attributes */ public IValueMap getAttributes() { if (attributes == null) { if ((copyOf == this) || (copyOf == null) || (copyOf.attributes == null)) { attributes = new TagAttributes(); } else { attributes = new TagAttributes(copyOf.attributes); } } return attributes; } /** * @return true if there 1 or more attributes. */ public boolean hasAttributes() { return attributes != null && attributes.size() > 0; } /** * Get the column number. * * @return Returns the columnNumber. */ public int getColumnNumber() { return (text != null ? text.columnNumber : 0); } /** * Gets the length of the tag in characters. * * @return The tag's length */ public int getLength() { return (text != null ? text.text.length() : 0); } /** * Get the line number. * * @return Returns the lineNumber. */ public int getLineNumber() { return (text != null ? text.lineNumber : 0); } /** * Gets the name of the tag, for example the tag <code>&lt;b&gt;</code>'s name would be 'b'. * * @return The tag's name */ public String getName() { return name; } /** * Namespace of the tag, if available. For example, &lt;wicket:link&gt;. * * @return The tag's namespace */ public String getNamespace() { return namespace; } /** * Assuming this is a close tag, return the corresponding open tag * * @return The open tag. Null, if no open tag available */ public final XmlTag getOpenTag() { return closes; } /** * Gets the location of the tag in the input string. * * @return Tag location (index in input string) */ public int getPos() { return (text != null ? text.pos : 0); } /** * Get a string attribute. * * @param key * The key * @return The string value */ public CharSequence getAttribute(final String key) { return getAttributes().getCharSequence(key); } /** * Get the tag type. * * @return the tag type (OPEN, CLOSE or OPEN_CLOSE). */ public TagType getType() { return type; } /** * Gets whether this is a close tag. * * @return True if this tag is a close tag */ public boolean isClose() { return type == TagType.CLOSE; } /** * * @return True, if tag is mutable */ public final boolean isMutable() { return isMutable; } /** * Gets whether this is an open tag. * * @return True if this tag is an open tag */ public boolean isOpen() { return type == TagType.OPEN; } /** * Gets whether this tag is an open/ close tag. * * @return True if this tag is an open and a close tag */ public boolean isOpenClose() { return type == TagType.OPEN_CLOSE; } /** * Makes this tag object immutable by making the attribute map unmodifiable. Immutable tags * cannot be made mutable again. They can only be copied into new mutable tag objects. * * @return this */ public XmlTag makeImmutable() { if (isMutable) { isMutable = false; if (attributes != null) { attributes.makeImmutable(); text = null; } } return this; } /** * Gets this tag if it is already mutable, or a mutable copy of this tag if it is immutable. * * @return This tag if it is already mutable, or a mutable copy of this tag if it is immutable. */ public XmlTag mutable() { if (isMutable) { return this; } else { final XmlTag tag = new XmlTag(); copyPropertiesTo(tag); return tag; } } /** * Copies all internal properties from this tag to <code>dest</code>. This is basically cloning * without instance creation. * * @param dest * tag whose properties will be set */ void copyPropertiesTo(final XmlTag dest) { dest.namespace = namespace; dest.name = name; dest.text = text; dest.type = type; dest.isMutable = true; dest.closes = closes; dest.copyOf = copyOf; if (attributes != null) { dest.attributes = new TagAttributes(attributes); } } /** * Puts a boolean attribute. * * @param key * The key * @param value * The value * @return previous value associated with specified key, or null if there was no mapping for * key. A null return can also indicate that the map previously associated null with the * specified key, if the implementation supports null values. */ public Object put(final String key, final boolean value) { return put(key, Boolean.toString(value)); } /** * Puts an int attribute. * * @param key * The key * @param value * The value * @return previous value associated with specified key, or null if there was no mapping for * key. A null return can also indicate that the map previously associated null with the * specified key, if the implementation supports null values. */ public Object put(final String key, final int value) { return put(key, Integer.toString(value)); } /** * Puts a string attribute. * * @param key * The key * @param value * The value * @return previous value associated with specified key, or null if there was no mapping for * key. A null return can also indicate that the map previously associated null with the * specified key, if the implementation supports null values. */ public Object put(final String key, final CharSequence value) { return getAttributes().put(key, value); } /** * Puts a {@link StringValue}attribute. * * @param key * The key * @param value * The value * @return previous value associated with specified key, or null if there was no mapping for * key. A null return can also indicate that the map previously associated null with the * specified key, if the implementation supports null values. */ public Object put(final String key, final StringValue value) { return getAttributes().put(key, (value != null) ? value.toString() : null); } /** * Puts all attributes in map * * @param map * A key/value map */ public void putAll(final Map<String, Object> map) { for (final Map.Entry<String, Object> entry : map.entrySet()) { Object value = entry.getValue(); put(entry.getKey(), (value != null) ? value.toString() : null); } } /** * Removes an attribute. * * @param key * The key to remove */ public void remove(final String key) { getAttributes().remove(key); } /** * Sets the tag name. * * @param name * New tag name */ public void setName(final String name) { if (isMutable) { this.name = name; } else { throw new UnsupportedOperationException("Attempt to set name of immutable tag"); } } /** * Sets the tag namespace. * * @param namespace * New tag name */ public void setNamespace(final String namespace) { if (isMutable) { this.namespace = namespace; } else { throw new UnsupportedOperationException("Attempt to set namespace of immutable tag"); } } /** * Assuming this is a close tag, assign it's corresponding open tag. * * @param tag * the open-tag * @throws RuntimeException * if 'this' is not a close tag */ public void setOpenTag(final XmlTag tag) { closes = tag; } /** * Sets type of this tag if it is not immutable. * * @param type * The new type */ public void setType(final TagType type) { if (isMutable) { this.type = type; } else { throw new UnsupportedOperationException("Attempt to set type of immutable tag"); } } /** * Converts this object to a string representation. * * @return String version of this object */ public String toDebugString() { return "[Tag name = " + name + ", pos = " + text.pos + ", line = " + text.lineNumber + ", attributes = [" + getAttributes() + "], type = " + type + "]"; } /** * Converts this object to a string representation. * * @return String version of this object */ @Override public String toString() { return toCharSequence().toString(); } /** * @return The string representation of the tag */ public CharSequence toCharSequence() { if (!isMutable && (text != null)) { return text.text; } return toXmlString(null); } /** * String representation with line and column number * * @return String version of this object */ public String toUserDebugString() { return " '" + toString() + "' (line " + getLineNumber() + ", column " + getColumnNumber() + ")"; } /** * Assuming some attributes have been changed, toXmlString() rebuilds the String on based on the * tags informations. * * @param attributeToBeIgnored * @return A xml string matching the tag */ public CharSequence toXmlString(final String attributeToBeIgnored) { final AppendingStringBuffer buffer = new AppendingStringBuffer(); buffer.append('<'); if (type == TagType.CLOSE) { buffer.append('/'); } if (namespace != null) { buffer.append(namespace); buffer.append(':'); } buffer.append(name); final IValueMap attributes = getAttributes(); if (attributes.size() > 0) { final Iterator<String> iterator = attributes.keySet().iterator(); for (; iterator.hasNext();) { final String key = iterator.next(); if ((key != null) && ((attributeToBeIgnored == null) || !key.equalsIgnoreCase(attributeToBeIgnored))) { buffer.append(" "); buffer.append(key); CharSequence value = getAttribute(key); // Attributes without values are possible, e.g. 'disabled' if (value != null) { buffer.append("=\""); value = Strings.escapeMarkup(value); buffer.append(value); buffer.append("\""); } } } } if (type == TagType.OPEN_CLOSE) { buffer.append('/'); } buffer.append('>'); return buffer; } static class TextSegment { /** Column number. */ final int columnNumber; /** Line number. */ final int lineNumber; /** Position of this tag in the input that was parsed. */ final int pos; /** Full text of tag. */ final CharSequence text; TextSegment(final CharSequence text, final int pos, final int line, final int col) { this.text = text; this.pos = pos; lineNumber = line; columnNumber = col; } /** * * @return The xml markup text */ public final CharSequence getText() { return text; } /** * @see java.lang.Object#toString() */ @Override public String toString() { return text.toString(); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime.commands; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.RunfilesSupport; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.RunUnder; import com.google.devtools.build.lib.buildtool.BuildRequest; import com.google.devtools.build.lib.buildtool.BuildRequest.BuildRequestOptions; import com.google.devtools.build.lib.buildtool.BuildResult; import com.google.devtools.build.lib.buildtool.BuildTool; import com.google.devtools.build.lib.buildtool.OutputDirectoryLinksUtils; import com.google.devtools.build.lib.buildtool.TargetValidator; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.exec.SymlinkTreeHelper; import com.google.devtools.build.lib.packages.NonconfigurableAttributeMapper; import com.google.devtools.build.lib.packages.OutputFile; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.pkgcache.LoadingFailedException; import com.google.devtools.build.lib.runtime.BlazeCommand; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.Command; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.shell.AbnormalTerminationException; import com.google.devtools.build.lib.shell.BadExitStatusException; import com.google.devtools.build.lib.shell.CommandException; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.CommandBuilder; import com.google.devtools.build.lib.util.CommandDescriptionForm; import com.google.devtools.build.lib.util.CommandFailureUtils; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.OptionsUtils; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsProvider; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Builds and run a target with the given command line arguments. */ @Command(name = "run", builds = true, options = { RunCommand.RunOptions.class }, inherits = { BuildCommand.class }, shortDescription = "Runs the specified target.", help = "resource:run.txt", allowResidue = true, binaryStdOut = true, completion = "label-bin", binaryStdErr = true) public class RunCommand implements BlazeCommand { public static class RunOptions extends OptionsBase { @Option(name = "script_path", category = "run", defaultValue = "null", converter = OptionsUtils.PathFragmentConverter.class, help = "If set, write a shell script to the given file which invokes the " + "target. If this option is set, the target is not run from %{product}. " + "Use '%{product} run --script_path=foo //foo && foo' to invoke target '//foo' " + "This differs from '%{product} run //foo' in that the %{product} lock is released " + "and the executable is connected to the terminal's stdin.") public PathFragment scriptPath; } @VisibleForTesting public static final String SINGLE_TARGET_MESSAGE = "Blaze can only run a single target. " + "Do not use wildcards that match more than one target"; @VisibleForTesting public static final String NO_TARGET_MESSAGE = "No targets found to run"; private static final String PROCESS_WRAPPER = "process-wrapper"; // Value of --run_under as of the most recent command invocation. private RunUnder currentRunUnder; private static final FileType RUNFILES_MANIFEST = FileType.of(".runfiles_manifest"); @VisibleForTesting // productionVisibility = Visibility.PRIVATE protected BuildResult processRequest(final CommandEnvironment env, BuildRequest request) { return new BuildTool(env).processRequest(request, new TargetValidator() { @Override public void validateTargets(Collection<Target> targets, boolean keepGoing) throws LoadingFailedException { RunCommand.this.validateTargets(env.getReporter(), targets, keepGoing); } }); } @Override public void editOptions(CommandEnvironment env, OptionsParser optionsParser) { } @Override public ExitCode exec(CommandEnvironment env, OptionsProvider options) { BlazeRuntime runtime = env.getRuntime(); RunOptions runOptions = options.getOptions(RunOptions.class); // This list should look like: ["//executable:target", "arg1", "arg2"] List<String> targetAndArgs = options.getResidue(); // The user must at the least specify an executable target. if (targetAndArgs.isEmpty()) { env.getReporter().handle(Event.error("Must specify a target to run")); return ExitCode.COMMAND_LINE_ERROR; } String targetString = targetAndArgs.get(0); List<String> runTargetArgs = targetAndArgs.subList(1, targetAndArgs.size()); RunUnder runUnder = options.getOptions(BuildConfiguration.Options.class).runUnder; OutErr outErr = env.getReporter().getOutErr(); List<String> targets = (runUnder != null) && (runUnder.getLabel() != null) ? ImmutableList.of(targetString, runUnder.getLabel().toString()) : ImmutableList.of(targetString); BuildRequest request = BuildRequest.create( this.getClass().getAnnotation(Command.class).name(), options, runtime.getStartupOptionsProvider(), targets, outErr, env.getCommandId(), env.getCommandStartTime()); currentRunUnder = runUnder; BuildResult result; try { result = processRequest(env, request); } finally { currentRunUnder = null; } if (!result.getSuccess()) { env.getReporter().handle(Event.error("Build failed. Not running target")); return result.getExitCondition(); } // Make sure that we have exactly 1 built target (excluding --run_under), // and that it is executable. // These checks should only fail if keepGoing is true, because we already did // validation before the build began. See {@link #validateTargets()}. Collection<ConfiguredTarget> targetsBuilt = result.getSuccessfulTargets(); ConfiguredTarget targetToRun = null; ConfiguredTarget runUnderTarget = null; if (targetsBuilt != null) { int maxTargets = runUnder != null && runUnder.getLabel() != null ? 2 : 1; if (targetsBuilt.size() > maxTargets) { env.getReporter().handle(Event.error(SINGLE_TARGET_MESSAGE)); return ExitCode.COMMAND_LINE_ERROR; } for (ConfiguredTarget target : targetsBuilt) { ExitCode targetValidation = fullyValidateTarget(env, target); if (targetValidation != ExitCode.SUCCESS) { return targetValidation; } if (runUnder != null && target.getLabel().equals(runUnder.getLabel())) { if (runUnderTarget != null) { env.getReporter().handle(Event.error( null, "Can't identify the run_under target from multiple options?")); return ExitCode.COMMAND_LINE_ERROR; } runUnderTarget = target; } else if (targetToRun == null) { targetToRun = target; } else { env.getReporter().handle(Event.error(SINGLE_TARGET_MESSAGE)); return ExitCode.COMMAND_LINE_ERROR; } } } // Handle target & run_under referring to the same target. if ((targetToRun == null) && (runUnderTarget != null)) { targetToRun = runUnderTarget; } if (targetToRun == null) { env.getReporter().handle(Event.error(NO_TARGET_MESSAGE)); return ExitCode.COMMAND_LINE_ERROR; } Path executablePath = Preconditions.checkNotNull( targetToRun.getProvider(FilesToRunProvider.class).getExecutable().getPath()); BuildConfiguration configuration = targetToRun.getConfiguration(); if (configuration == null) { // The target may be an input file, which doesn't have a configuration. In that case, we // choose any target configuration. configuration = result.getBuildConfigurationCollection().getTargetConfigurations().get(0); } Path workingDir; try { workingDir = ensureRunfilesBuilt(env, targetToRun); } catch (CommandException e) { env.getReporter().handle(Event.error("Error creating runfiles: " + e.getMessage())); return ExitCode.LOCAL_ENVIRONMENTAL_ERROR; } List<String> args = runTargetArgs; FilesToRunProvider provider = targetToRun.getProvider(FilesToRunProvider.class); RunfilesSupport runfilesSupport = provider == null ? null : provider.getRunfilesSupport(); if (runfilesSupport != null && runfilesSupport.getArgs() != null) { List<String> targetArgs = runfilesSupport.getArgs(); if (!targetArgs.isEmpty()) { args = Lists.newArrayListWithCapacity(targetArgs.size() + runTargetArgs.size()); args.addAll(targetArgs); args.addAll(runTargetArgs); } } // // We now have a unique executable ready to be run. // // We build up two different versions of the command to run: one with an absolute path, which // we'll actually run, and a prettier one with the long absolute path to the executable // replaced with a shorter relative path that uses the symlinks in the workspace. PathFragment prettyExecutablePath = OutputDirectoryLinksUtils.getPrettyPath(executablePath, runtime.getWorkspaceName(), runtime.getWorkspace(), options.getOptions(BuildRequestOptions.class).getSymlinkPrefix()); List<String> cmdLine = new ArrayList<>(); if (runOptions.scriptPath == null) { PathFragment processWrapperPath = runtime.getBinTools().getExecPath(PROCESS_WRAPPER); Preconditions.checkNotNull( processWrapperPath, PROCESS_WRAPPER + " not found in embedded tools"); cmdLine.add(runtime.getDirectories().getExecRoot() .getRelative(processWrapperPath).getPathString()); cmdLine.add("-1"); cmdLine.add("15"); cmdLine.add("-"); cmdLine.add("-"); } List<String> prettyCmdLine = new ArrayList<>(); // Insert the command prefix specified by the "--run_under=<command-prefix>" option // at the start of the command line. if (runUnder != null) { String runUnderValue = runUnder.getValue(); if (runUnderTarget != null) { // --run_under specifies a target. Get the corresponding executable. // This must be an absolute path, because the run_under target is only // in the runfiles of test targets. runUnderValue = runUnderTarget .getProvider(FilesToRunProvider.class).getExecutable().getPath().getPathString(); // If the run_under command contains any options, make sure to add them // to the command line as well. List<String> opts = runUnder.getOptions(); if (!opts.isEmpty()) { runUnderValue += " " + ShellEscaper.escapeJoinAll(opts); } } cmdLine.add(configuration.getShExecutable().getPathString()); cmdLine.add("-c"); cmdLine.add(runUnderValue + " " + executablePath.getPathString() + " " + ShellEscaper.escapeJoinAll(args)); prettyCmdLine.add(configuration.getShExecutable().getPathString()); prettyCmdLine.add("-c"); prettyCmdLine.add(runUnderValue + " " + prettyExecutablePath.getPathString() + " " + ShellEscaper.escapeJoinAll(args)); } else { cmdLine.add(executablePath.getPathString()); cmdLine.addAll(args); prettyCmdLine.add(prettyExecutablePath.getPathString()); prettyCmdLine.addAll(args); } // Add a newline between the blaze output and the binary's output. outErr.printErrLn(""); if (runOptions.scriptPath != null) { String unisolatedCommand = CommandFailureUtils.describeCommand( CommandDescriptionForm.COMPLETE_UNISOLATED, cmdLine, null, workingDir.getPathString()); if (writeScript(env, runOptions.scriptPath, unisolatedCommand)) { return ExitCode.SUCCESS; } else { return ExitCode.RUN_FAILURE; } } env.getReporter().handle(Event.info( null, "Running command line: " + ShellEscaper.escapeJoinAll(prettyCmdLine))); com.google.devtools.build.lib.shell.Command command = new CommandBuilder() .addArgs(cmdLine).setEnv(env.getClientEnv()).setWorkingDir(workingDir).build(); try { // Restore a raw EventHandler if it is registered. This allows for blaze run to produce the // actual output of the command being run even if --color=no is specified. env.getReporter().switchToAnsiAllowingHandler(); // The command API is a little strange in that the following statement // will return normally only if the program exits with exit code 0. // If it ends with any other code, we have to catch BadExitStatusException. command.execute(com.google.devtools.build.lib.shell.Command.NO_INPUT, com.google.devtools.build.lib.shell.Command.NO_OBSERVER, outErr.getOutputStream(), outErr.getErrorStream(), true /* interruptible */).getTerminationStatus().getExitCode(); return ExitCode.SUCCESS; } catch (BadExitStatusException e) { String message = "Non-zero return code '" + e.getResult().getTerminationStatus().getExitCode() + "' from command: " + e.getMessage(); env.getReporter().handle(Event.error(message)); return ExitCode.RUN_FAILURE; } catch (AbnormalTerminationException e) { // The process was likely terminated by a signal in this case. return ExitCode.INTERRUPTED; } catch (CommandException e) { env.getReporter().handle(Event.error("Error running program: " + e.getMessage())); return ExitCode.RUN_FAILURE; } } /** * Ensures that runfiles are built for the specified target. If they already * are, does nothing, otherwise builds them. * * @param target the target to build runfiles for. * @return the path of the runfiles directory. * @throws CommandException */ private Path ensureRunfilesBuilt(CommandEnvironment env, ConfiguredTarget target) throws CommandException { FilesToRunProvider provider = target.getProvider(FilesToRunProvider.class); RunfilesSupport runfilesSupport = provider == null ? null : provider.getRunfilesSupport(); if (runfilesSupport == null) { return env.getWorkingDirectory(); } Artifact manifest = runfilesSupport.getRunfilesManifest(); PathFragment runfilesDir = runfilesSupport.getRunfilesDirectoryExecPath(); Path workingDir = env.getRuntime().getExecRoot() .getRelative(runfilesDir) .getRelative(runfilesSupport.getRunfiles().getSuffix()); // When runfiles are not generated, getManifest() returns the // .runfiles_manifest file, otherwise it returns the MANIFEST file. This is // a handy way to check whether runfiles were built or not. if (!RUNFILES_MANIFEST.matches(manifest.getFilename())) { // Runfiles already built, nothing to do. return workingDir; } SymlinkTreeHelper helper = new SymlinkTreeHelper( manifest.getExecPath(), runfilesDir, false); helper.createSymlinksUsingCommand(env.getRuntime().getExecRoot(), target.getConfiguration(), env.getRuntime().getBinTools()); return workingDir; } private boolean writeScript(CommandEnvironment env, PathFragment scriptPathFrag, String cmd) { final String SH_SHEBANG = "#!/bin/sh"; Path scriptPath = env.getWorkingDirectory().getRelative(scriptPathFrag); try { FileSystemUtils.writeContent(scriptPath, StandardCharsets.ISO_8859_1, SH_SHEBANG + "\n" + cmd + " \"$@\""); scriptPath.setExecutable(true); } catch (IOException e) { env.getReporter().handle(Event.error("Error writing run script:" + e.getMessage())); return false; } return true; } // Make sure we are building exactly 1 binary target. // If keepGoing, we'll build all the targets even if they are non-binary. private void validateTargets(Reporter reporter, Collection<Target> targets, boolean keepGoing) throws LoadingFailedException { Target targetToRun = null; Target runUnderTarget = null; boolean singleTargetWarningWasOutput = false; int maxTargets = currentRunUnder != null && currentRunUnder.getLabel() != null ? 2 : 1; if (targets.size() > maxTargets) { warningOrException(reporter, SINGLE_TARGET_MESSAGE, keepGoing); singleTargetWarningWasOutput = true; } for (Target target : targets) { String targetError = validateTarget(target); if (targetError != null) { warningOrException(reporter, targetError, keepGoing); } if (currentRunUnder != null && target.getLabel().equals(currentRunUnder.getLabel())) { // It's impossible to have two targets with the same label. Preconditions.checkState(runUnderTarget == null); runUnderTarget = target; } else if (targetToRun == null) { targetToRun = target; } else { if (!singleTargetWarningWasOutput) { warningOrException(reporter, SINGLE_TARGET_MESSAGE, keepGoing); } return; } } // Handle target & run_under referring to the same target. if ((targetToRun == null) && (runUnderTarget != null)) { targetToRun = runUnderTarget; } if (targetToRun == null) { warningOrException(reporter, NO_TARGET_MESSAGE, keepGoing); } } // If keepGoing, print a warning and return the given collection. // Otherwise, throw InvalidTargetException. private void warningOrException(Reporter reporter, String message, boolean keepGoing) throws LoadingFailedException { if (keepGoing) { reporter.handle(Event.warn(message + ". Will continue anyway")); } else { throw new LoadingFailedException(message); } } private static String notExecutableError(Target target) { return "Cannot run target " + target.getLabel() + ": Not executable"; } /** Returns null if the target is a runnable rule, or an appropriate error message otherwise. */ private static String validateTarget(Target target) { return isExecutable(target) ? null : notExecutableError(target); } /** * Performs all available validation checks on an individual target. * * @param target ConfiguredTarget to validate * @return ExitCode.SUCCESS if all checks succeeded, otherwise a different error code. */ private ExitCode fullyValidateTarget(CommandEnvironment env, ConfiguredTarget target) { String targetError = validateTarget(target.getTarget()); if (targetError != null) { env.getReporter().handle(Event.error(targetError)); return ExitCode.COMMAND_LINE_ERROR; } Artifact executable = target.getProvider(FilesToRunProvider.class).getExecutable(); if (executable == null) { env.getReporter().handle(Event.error(notExecutableError(target.getTarget()))); return ExitCode.COMMAND_LINE_ERROR; } // Shouldn't happen: We just validated the target. Preconditions.checkState(executable != null, "Could not find executable for target %s", target); Path executablePath = executable.getPath(); try { if (!executablePath.exists() || !executablePath.isExecutable()) { env.getReporter().handle(Event.error( null, "Non-existent or non-executable " + executablePath)); return ExitCode.BLAZE_INTERNAL_ERROR; } } catch (IOException e) { env.getReporter().handle(Event.error( "Error checking " + executablePath.getPathString() + ": " + e.getMessage())); return ExitCode.LOCAL_ENVIRONMENTAL_ERROR; } return ExitCode.SUCCESS; } /** * Return true iff {@code target} is a rule that has an executable file. This includes * *_test rules, *_binary rules, and generated outputs. */ private static boolean isExecutable(Target target) { return isOutputFile(target) || isExecutableNonTestRule(target) || TargetUtils.isTestRule(target); } /** * Return true iff {@code target} is a rule that generates an executable file and is user-executed * code. */ private static boolean isExecutableNonTestRule(Target target) { if (!(target instanceof Rule)) { return false; } Rule rule = ((Rule) target); if (rule.getRuleClassObject().hasAttr("$is_executable", Type.BOOLEAN)) { return NonconfigurableAttributeMapper.of(rule).get("$is_executable", Type.BOOLEAN); } return false; } private static boolean isOutputFile(Target target) { return (target instanceof OutputFile); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.pagemem.store.IgnitePageStoreManager; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.persistence.IgniteCacheDatabaseSharedManager; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointProgress; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetastorageLifecycleListener; import org.apache.ignite.internal.processors.cache.persistence.metastorage.ReadOnlyMetastorage; import org.apache.ignite.internal.processors.cache.persistence.metastorage.ReadWriteMetastorage; import org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashSet; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.thread.IgniteThread; import org.apache.ignite.thread.OomExceptionHandler; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.IgniteSystemProperties.IGNITE_DISABLE_WAL_DURING_REBALANCING; import static org.apache.ignite.IgniteSystemProperties.IGNITE_PENDING_TX_TRACKER_ENABLED; import static org.apache.ignite.internal.GridTopic.TOPIC_WAL; import static org.apache.ignite.internal.managers.communication.GridIoPolicy.SYSTEM_POOL; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.MOVING; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.RENTING; import static org.apache.ignite.internal.processors.cache.persistence.CheckpointState.FINISHED; import static org.apache.ignite.internal.processors.cache.persistence.CheckpointState.LOCK_RELEASED; /** * Write-ahead log state manager. Manages WAL enable and disable. */ public class WalStateManager extends GridCacheSharedManagerAdapter { /** */ public static final String ENABLE_DURABILITY_AFTER_REBALANCING = "enable-durability-rebalance-finished-"; /** History size for to track stale messages. */ private static final int HIST_SIZE = 1000; /** @see IgniteSystemProperties#IGNITE_DISABLE_WAL_DURING_REBALANCING */ public static final boolean DFLT_DISABLE_WAL_DURING_REBALANCING = true; /** ID history for discovery messages. */ private final GridBoundedConcurrentLinkedHashSet<T2<UUID, Boolean>> discoMsgIdHist = new GridBoundedConcurrentLinkedHashSet<>(HIST_SIZE); /** History of already completed operations. */ private final GridBoundedConcurrentLinkedHashSet<UUID> completedOpIds = new GridBoundedConcurrentLinkedHashSet<>(HIST_SIZE); /** Client futures. */ private final Map<UUID, GridFutureAdapter<Boolean>> userFuts = new HashMap<>(); /** Finished results awaiting discovery finish message. */ private final Map<UUID, WalStateResult> ress = new HashMap<>(); /** Active distributed processes. */ private final Map<UUID, WalStateDistributedProcess> procs = new HashMap<>(); /** Pending results created on cache processor start based on available discovery data. */ private final Collection<WalStateResult> initialRess = new LinkedList<>(); /** Pending acknowledge messages (i.e. received before node completed it's local part). */ private final Collection<WalStateAckMessage> pendingAcks = new HashSet<>(); /** Whether this is a server node. */ private final boolean srv; /** IO message listener. */ private final GridMessageListener ioLsnr; /** Operation mutex. */ private final Object mux = new Object(); /** Logger. */ private final IgniteLogger log; /** Current coordinator node. */ private ClusterNode crdNode; /** Disconnected flag. */ private boolean disconnected; /** */ private volatile WALDisableContext walDisableContext; /** Denies or allows WAL disabling. */ private volatile boolean prohibitDisabling; /** * Constructor. * * @param kernalCtx Kernal context. */ public WalStateManager(GridKernalContext kernalCtx) { if (kernalCtx != null) { IgniteConfiguration cfg = kernalCtx.config(); boolean client = cfg.isClientMode() != null && cfg.isClientMode(); srv = !client && !cfg.isDaemon(); log = kernalCtx.log(WalStateManager.class); } else { srv = false; log = null; } if (srv) { ioLsnr = new GridMessageListener() { @Override public void onMessage(UUID nodeId, Object msg, byte plc) { if (msg instanceof WalStateAckMessage) { WalStateAckMessage msg0 = (WalStateAckMessage) msg; msg0.senderNodeId(nodeId); onAck(msg0); } else U.warn(log, "Unexpected IO message (will ignore): " + msg); } }; } else ioLsnr = null; } /** {@inheritDoc} */ @Override protected void start0() throws IgniteCheckedException { if (srv) cctx.kernalContext().io().addMessageListener(TOPIC_WAL, ioLsnr); walDisableContext = new WALDisableContext( cctx.cache().context().database(), cctx.pageStore(), log ); cctx.kernalContext().internalSubscriptionProcessor().registerMetastorageListener(walDisableContext); } /** {@inheritDoc} */ @Override protected void stop0(boolean cancel) { if (srv) cctx.kernalContext().io().removeMessageListener(TOPIC_WAL, ioLsnr); } /** * Callback invoked when caches info is collected inside cache processor start routine. Discovery is not * active at this point. */ public void onCachesInfoCollected() { if (!srv) return; synchronized (mux) { // Process top pending requests. for (CacheGroupDescriptor grpDesc : cacheProcessor().cacheGroupDescriptors().values()) { WalStateProposeMessage msg = grpDesc.nextWalChangeRequest(); if (msg != null) { if (log.isDebugEnabled()) log.debug("Processing WAL state message on start: " + msg); boolean enabled = grpDesc.walEnabled(); WalStateResult res; if (F.eq(enabled, msg.enable())) res = new WalStateResult(msg, false); else { res = new WalStateResult(msg, true); grpDesc.walEnabled(!enabled); } initialRess.add(res); addResult(res); } } } } /** * Handle cache processor kernal start. At this point we already collected discovery data from other nodes * (discovery already active), but exchange worker is not active yet. We need to iterate over available group * descriptors and perform top operations, taking in count that no cache operations are possible at this point, * so checkpoint is not needed. */ public void onKernalStart() { if (!srv) return; synchronized (mux) { for (WalStateResult res : initialRess) onCompletedLocally(res); initialRess.clear(); } } /** {@inheritDoc} */ @Override public void onDisconnected(IgniteFuture reconnectFut) { Collection<GridFutureAdapter<Boolean>> userFuts0; synchronized (mux) { assert !disconnected; disconnected = true; userFuts0 = new ArrayList<>(userFuts.values()); userFuts.clear(); } for (GridFutureAdapter<Boolean> userFut : userFuts0) completeWithError(userFut, "Client node was disconnected from topology (operation result is unknown)."); } /** {@inheritDoc} */ @Override public void onReconnected(boolean active) { synchronized (mux) { assert disconnected; disconnected = false; } } /** * Denies or allows WAL disabling with subsequent {@link #init(Collection, boolean)} call. * * @param val denial status. */ public void prohibitWALDisabling(boolean val) { prohibitDisabling = val; } /** * Reports whether WAL disabling with subsequent {@link #init(Collection, boolean)} is denied. * * @return denial status. */ public boolean prohibitWALDisabling() { return prohibitDisabling; } /** * Change WAL mode. * * @param cacheNames Cache names. * @param enabled Enabled flag. * @return Future completed when operation finished. */ public IgniteInternalFuture<Boolean> changeWalMode(Collection<String> cacheNames, boolean enabled) { cctx.tm().checkEmptyTransactions(() -> String.format("Cache WAL mode cannot be changed within lock or transaction " + "[cacheNames=%s, walEnabled=%s]", cacheNames, enabled)); return init(cacheNames, enabled); } /** * Initiate WAL mode change operation. * * @param cacheNames Cache names. * @param enabled Enabled flag. * @return Future completed when operation finished. */ private IgniteInternalFuture<Boolean> init(Collection<String> cacheNames, boolean enabled) { if (!enabled && prohibitDisabling) return errorFuture("WAL disabling is prohibited."); if (F.isEmpty(cacheNames)) return errorFuture("Cache names cannot be empty."); synchronized (mux) { if (disconnected) return errorFuture("Failed to initiate WAL mode change because client node is disconnected."); // Prepare cache and group infos. Map<String, IgniteUuid> caches = new HashMap<>(cacheNames.size()); CacheGroupDescriptor grpDesc = null; for (String cacheName : cacheNames) { DynamicCacheDescriptor cacheDesc = cacheProcessor().cacheDescriptor(cacheName); if (cacheDesc == null) return errorFuture("Cache doesn't exist: " + cacheName); caches.put(cacheName, cacheDesc.deploymentId()); CacheGroupDescriptor curGrpDesc = cacheDesc.groupDescriptor(); if (grpDesc == null) grpDesc = curGrpDesc; else if (!F.eq(grpDesc.deploymentId(), curGrpDesc.deploymentId())) { return errorFuture("Cannot change WAL mode for caches from different cache groups [" + "cache1=" + cacheNames.iterator().next() + ", grp1=" + grpDesc.groupName() + ", cache2=" + cacheName + ", grp2=" + curGrpDesc.groupName() + ']'); } } assert grpDesc != null; HashSet<String> grpCaches = new HashSet<>(grpDesc.caches().keySet()); grpCaches.removeAll(cacheNames); if (!grpCaches.isEmpty()) { return errorFuture("Cannot change WAL mode because not all cache names belonging to the group are " + "provided [group=" + grpDesc.groupName() + ", missingCaches=" + grpCaches + ']'); } if (grpDesc.config().getCacheMode() == CacheMode.LOCAL) return errorFuture("WAL mode cannot be changed for LOCAL cache(s): " + cacheNames); // WAL mode change makes sense only for persistent groups. if (!grpDesc.persistenceEnabled()) return errorFuture("Cannot change WAL mode because persistence is not enabled for cache(s) [" + "caches=" + cacheNames + ", dataRegion=" + grpDesc.config().getDataRegionName() + ']'); // Send request. final UUID opId = UUID.randomUUID(); GridFutureAdapter<Boolean> fut = new GridFutureAdapter<>(); fut.listen(new IgniteInClosure<IgniteInternalFuture<Boolean>>() { @Override public void apply(IgniteInternalFuture<Boolean> fut) { synchronized (mux) { userFuts.remove(opId); } } }); WalStateProposeMessage msg = new WalStateProposeMessage(opId, grpDesc.groupId(), grpDesc.deploymentId(), cctx.localNodeId(), caches, enabled); userFuts.put(opId, fut); try { cctx.discovery().sendCustomEvent(msg); if (log.isDebugEnabled()) log.debug("Initiated WAL state change operation: " + msg); } catch (Exception e) { IgniteCheckedException e0 = new IgniteCheckedException("Failed to initiate WAL mode change due to unexpected exception.", e); fut.onDone(e0); } return fut; } } /** * Change local WAL state before exchange is done. This method will disable WAL for groups without partitions * in OWNING state if such feature is enabled. * * @param fut Exchange future. */ public void disableGroupDurabilityForPreloading(GridDhtPartitionsExchangeFuture fut) { if (fut.changedBaseline() && IgniteSystemProperties.getBoolean(IGNITE_PENDING_TX_TRACKER_ENABLED) || !IgniteSystemProperties.getBoolean(IGNITE_DISABLE_WAL_DURING_REBALANCING, DFLT_DISABLE_WAL_DURING_REBALANCING)) return; Collection<CacheGroupContext> grpContexts = cctx.cache().cacheGroups(); for (CacheGroupContext grp : grpContexts) { if (grp.isLocal() || !grp.affinityNode() || !grp.persistenceEnabled() || !grp.localWalEnabled() || !grp.rebalanceEnabled() || !grp.shared().isRebalanceEnabled()) continue; List<GridDhtLocalPartition> locParts = grp.topology().localPartitions(); int cnt = 0; for (GridDhtLocalPartition locPart : locParts) { if (locPart.state() == MOVING || locPart.state() == RENTING) cnt++; } if (!locParts.isEmpty() && cnt == locParts.size()) grp.localWalEnabled(false, true); } } /** * Handle propose message in discovery thread. * * @param msg Message. */ public void onProposeDiscovery(WalStateProposeMessage msg) { if (isDuplicate(msg)) return; synchronized (mux) { if (disconnected) return; // Validate current caches state before deciding whether to process message further. if (validateProposeDiscovery(msg)) { if (log.isDebugEnabled()) log.debug("WAL state change message is valid (will continue processing): " + msg); CacheGroupDescriptor grpDesc = cacheProcessor().cacheGroupDescriptors().get(msg.groupId()); assert grpDesc != null; IgnitePredicate<ClusterNode> nodeFilter = grpDesc.config().getNodeFilter(); boolean affNode = srv && (nodeFilter == null || nodeFilter.apply(cctx.localNode())); msg.affinityNode(affNode); if (grpDesc.addWalChangeRequest(msg)) { msg.exchangeMessage(msg); if (log.isDebugEnabled()) log.debug("WAL state change message will be processed in exchange thread: " + msg); } else { if (log.isDebugEnabled()) log.debug("WAL state change message is added to pending set and will be processed later: " + msg); } } else { if (log.isDebugEnabled()) log.debug("WAL state change message is invalid (will ignore): " + msg); } } } /** * Validate propose message. * * @param msg Message. * @return {@code True} if message should be processed further, {@code false} if no further processing is needed. */ private boolean validateProposeDiscovery(WalStateProposeMessage msg) { GridFutureAdapter<Boolean> userFut = userFuts.get(msg.operationId()); String errMsg = validate(msg); if (errMsg != null) { completeWithError(userFut, errMsg); return false; } return true; } /** * Validate propose message. * * @param msg Message. * @return Error message or {@code null} if everything is OK. */ @Nullable private String validate(WalStateProposeMessage msg) { // Is group still there? CacheGroupDescriptor grpDesc = cacheProcessor().cacheGroupDescriptors().get(msg.groupId()); if (grpDesc == null) return "Failed to change WAL mode because some caches no longer exist: " + msg.caches().keySet(); // Are specified caches still there? for (Map.Entry<String, IgniteUuid> cache : msg.caches().entrySet()) { String cacheName = cache.getKey(); DynamicCacheDescriptor cacheDesc = cacheProcessor().cacheDescriptor(cacheName); if (cacheDesc == null || !F.eq(cacheDesc.deploymentId(), cache.getValue())) return "Cache doesn't exist: " + cacheName; } // Are there any new caches in the group? HashSet<String> grpCacheNames = new HashSet<>(grpDesc.caches().keySet()); grpCacheNames.removeAll(msg.caches().keySet()); if (!grpCacheNames.isEmpty()) { return "Cannot change WAL mode because not all cache names belonging to the " + "group are provided [group=" + grpDesc.groupName() + ", missingCaches=" + grpCacheNames + ']'; } return null; } /** * Handle propose message which is synchronized with other cache state actions through exchange thread. * If operation is no-op (i.e. state is not changed), then no additional processing is needed, and coordinator will * trigger finish request right away. Otherwise all nodes start asynchronous checkpoint flush, and send responses * to coordinator. Once all responses are received, coordinator node will trigger finish message. * * @param msg Message. */ public void onProposeExchange(WalStateProposeMessage msg) { if (!srv) return; synchronized (mux) { WalStateResult res = null; if (msg.affinityNode()) { // Affinity node, normal processing. CacheGroupContext grpCtx = cacheProcessor().cacheGroup(msg.groupId()); if (grpCtx == null) { // Related caches were destroyed concurrently. res = new WalStateResult(msg, "Failed to change WAL mode because some caches " + "no longer exist: " + msg.caches().keySet()); } else { if (F.eq(msg.enable(), grpCtx.globalWalEnabled())) // Nothing changed -> no-op. res = new WalStateResult(msg, false); else { // Initiate a checkpoint. CheckpointProgress cpFut = triggerCheckpoint("wal-state-change-grp-" + msg.groupId()); if (cpFut != null) { try { // Wait for checkpoint mark synchronously before releasing the control. cpFut.futureFor(LOCK_RELEASED).get(); if (msg.enable()) { grpCtx.globalWalEnabled(true); // Enable: it is enough to release cache operations once mark is finished because // not-yet-flushed dirty pages have been logged. WalStateChangeWorker worker = new WalStateChangeWorker(msg, cpFut); IgniteThread thread = new IgniteThread(worker); thread.setUncaughtExceptionHandler(new OomExceptionHandler( cctx.kernalContext())); thread.start(); } else { // Disable: not-yet-flushed operations are not logged, so wait for them // synchronously in exchange thread. Otherwise, we cannot define a point in // when it is safe to continue cache operations. res = awaitCheckpoint(cpFut, msg); // WAL state is persisted after checkpoint if finished. Otherwise in case of crash // and restart we will think that WAL is enabled, but data might be corrupted. grpCtx.globalWalEnabled(false); } } catch (Exception e) { U.warn(log, "Failed to change WAL mode due to unexpected exception [" + "msg=" + msg + ']', e); res = new WalStateResult(msg, "Failed to change WAL mode due to unexpected " + "exception (see server logs for more information): " + e.getMessage()); } } else { res = new WalStateResult(msg, "Failed to initiate a checkpoint (checkpoint thread " + "is not available)."); } } } } else { // We cannot know result on non-affinity server node, so just complete operation with "false" flag, // which will be ignored anyway. res = new WalStateResult(msg, false); } if (res != null) { addResult(res); onCompletedLocally(res); } } } /** * Handle local operation completion. * * @param res Result. */ private void onCompletedLocally(WalStateResult res) { assert res != null; synchronized (mux) { ClusterNode crdNode = coordinator(); UUID opId = res.message().operationId(); WalStateAckMessage msg = new WalStateAckMessage(opId, res.message().affinityNode(), res.changed(), res.errorMessage()); // Handle distributed completion. if (crdNode.isLocal()) { Collection<ClusterNode> srvNodes = cctx.discovery().aliveServerNodes(); Collection<UUID> srvNodeIds = new ArrayList<>(srvNodes.size()); for (ClusterNode srvNode : srvNodes) { if (cctx.discovery().alive(srvNode)) srvNodeIds.add(srvNode.id()); } WalStateDistributedProcess proc = new WalStateDistributedProcess(res.message(), srvNodeIds); procs.put(res.message().operationId(), proc); unwindPendingAcks(proc); proc.onNodeFinished(cctx.localNodeId(), msg); sendFinishMessageIfNeeded(proc); } else { // Just send message to coordinator. try { cctx.kernalContext().io().sendToGridTopic(crdNode, TOPIC_WAL, msg, SYSTEM_POOL); } catch (IgniteCheckedException e) { U.warn(log, "Failed to send ack message to coordinator node [opId=" + opId + ", node=" + crdNode.id() + ']'); } } } } /** * Unwind pending ack messages for the given distributed process. * * @param proc Process. */ private void unwindPendingAcks(WalStateDistributedProcess proc) { assert Thread.holdsLock(mux); Iterator<WalStateAckMessage> iter = pendingAcks.iterator(); while (iter.hasNext()) { WalStateAckMessage ackMsg = iter.next(); if (F.eq(proc.operationId(), ackMsg.operationId())) { proc.onNodeFinished(ackMsg.senderNodeId(), ackMsg); iter.remove(); } } } /** * Handle ack message. * * @param msg Ack message. */ public void onAck(WalStateAckMessage msg) { synchronized (mux) { if (completedOpIds.contains(msg.operationId())) // Skip stale messages. return; WalStateDistributedProcess proc = procs.get(msg.operationId()); if (proc == null) // If process if not initialized yet, add to pending set. pendingAcks.add(msg); else { // Notify process on node completion. proc.onNodeFinished(msg.senderNodeId(), msg); sendFinishMessageIfNeeded(proc); } } } /** * Send finish message for the given distributed process if needed. * * @param proc Process. */ private void sendFinishMessageIfNeeded(WalStateDistributedProcess proc) { if (proc.completed()) sendFinishMessage(proc.prepareFinishMessage()); } /** * Send finish message. * * @param finishMsg Finish message. */ private void sendFinishMessage(WalStateFinishMessage finishMsg) { try { cctx.discovery().sendCustomEvent(finishMsg); } catch (Exception e) { U.error(log, "Failed to send WAL mode change finish message due to unexpected exception: " + finishMsg, e); } } /** * Handle finish message in discovery thread. * * @param msg Message. */ public void onFinishDiscovery(WalStateFinishMessage msg) { if (isDuplicate(msg)) return; synchronized (mux) { if (disconnected) return; // Complete user future, if any. GridFutureAdapter<Boolean> userFut = userFuts.get(msg.operationId()); if (userFut != null) { if (msg.errorMessage() != null) completeWithError(userFut, msg.errorMessage()); else complete(userFut, msg.changed()); } // Clear pending data. WalStateResult res = ress.remove(msg.operationId()); if (res == null && srv) U.warn(log, "Received finish message for unknown operation (will ignore): " + msg.operationId()); procs.remove(msg.operationId()); CacheGroupDescriptor grpDesc = cacheProcessor().cacheGroupDescriptors().get(msg.groupId()); if (grpDesc != null && F.eq(grpDesc.deploymentId(), msg.groupDeploymentId())) { // Toggle WAL mode in descriptor. if (msg.changed()) grpDesc.walEnabled(!grpDesc.walEnabled()); // Remove now-outdated message from the queue. WalStateProposeMessage oldProposeMsg = grpDesc.nextWalChangeRequest(); assert oldProposeMsg != null; assert F.eq(oldProposeMsg.operationId(), msg.operationId()); grpDesc.removeWalChangeRequest(); // Move next message to exchange thread. WalStateProposeMessage nextProposeMsg = grpDesc.nextWalChangeRequest(); if (nextProposeMsg != null) msg.exchangeMessage(nextProposeMsg); } if (srv) { // Remember operation ID to handle duplicates. completedOpIds.add(msg.operationId()); // Remove possible stale messages. Iterator<WalStateAckMessage> ackIter = pendingAcks.iterator(); while (ackIter.hasNext()) { WalStateAckMessage ackMsg = ackIter.next(); if (F.eq(ackMsg.operationId(), msg.operationId())) ackIter.remove(); } } } } /** * Handle node leave event. * * @param nodeId Node ID. */ public void onNodeLeft(UUID nodeId) { if (!srv) return; synchronized (mux) { if (crdNode == null) { assert ress.isEmpty(); assert procs.isEmpty(); return; } if (F.eq(crdNode.id(), nodeId)) { // Coordinator exited, re-send to new, or initialize new distirbuted processes. crdNode = null; for (WalStateResult res : ress.values()) onCompletedLocally(res); } else if (F.eq(cctx.localNodeId(), crdNode.id())) { // Notify distributed processes on node leave. for (Map.Entry<UUID, WalStateDistributedProcess> procEntry : procs.entrySet()) { WalStateDistributedProcess proc = procEntry.getValue(); proc.onNodeLeft(nodeId); sendFinishMessageIfNeeded(proc); } } } } /** * Create future with error. * * @param errMsg Error message. * @return Future. */ @SuppressWarnings("Convert2Diamond") private static IgniteInternalFuture<Boolean> errorFuture(String errMsg) { return new GridFinishedFuture<Boolean>(new IgniteCheckedException(errMsg)); } /** * Complete user future with normal result. * * @param userFut User future. * @param res Result. */ private static void complete(@Nullable GridFutureAdapter<Boolean> userFut, boolean res) { if (userFut != null) userFut.onDone(res); } /** * Complete user future with error. * * @param errMsg Error message. */ private static void completeWithError(@Nullable GridFutureAdapter<Boolean> userFut, String errMsg) { if (userFut != null) userFut.onDone(new IgniteCheckedException(errMsg)); } /** * @return Cache processor. */ private GridCacheProcessor cacheProcessor() { return cctx.cache(); } /** * Get current coordinator node. * * @return Coordinator node. */ private ClusterNode coordinator() { assert Thread.holdsLock(mux); if (crdNode != null) return crdNode; else { ClusterNode res = null; for (ClusterNode node : cctx.discovery().aliveServerNodes()) { if (res == null || res.order() > node.order()) res = node; } assert res != null; crdNode = res; return res; } } /** * Check if discovery message has already been received. * * @param msg Message. * @return {@code True} if this is a duplicate. */ private boolean isDuplicate(WalStateAbstractMessage msg) { T2<UUID, Boolean> key; if (msg instanceof WalStateProposeMessage) key = new T2<>(msg.operationId(), true); else { assert msg instanceof WalStateFinishMessage; key = new T2<>(msg.operationId(), false); } if (!discoMsgIdHist.add(key)) { U.warn(log, "Received duplicate WAL mode change discovery message (will ignore): " + msg); return true; } return false; } /** * Add locally result to pending map. * * @param res Result. */ private void addResult(WalStateResult res) { ress.put(res.message().operationId(), res); } /** * Force checkpoint. * * @param msg Message. * @return Checkpoint future or {@code null} if failed to get checkpointer. */ @Nullable private CheckpointProgress triggerCheckpoint(String msg) { return cctx.database().forceCheckpoint(msg); } /** * Await for the checkpoint to finish. * * @param cpFut Checkpoint future. * @param msg Orignial message which triggered the process. * @return Result. */ private WalStateResult awaitCheckpoint(CheckpointProgress cpFut, WalStateProposeMessage msg) { WalStateResult res; try { assert msg.affinityNode(); if (cpFut != null) cpFut.futureFor(FINISHED).get(); res = new WalStateResult(msg, true); } catch (Exception e) { U.warn(log, "Failed to change WAL mode due to unexpected exception [msg=" + msg + ']', e); res = new WalStateResult(msg, "Failed to change WAL mode due to unexpected exception " + "(see server logs for more information): " + e.getMessage()); } return res; } /** * Checks WAL disabled for cache group. * * @param grpId Group id. * @return {@code True} if WAL disable for group. {@code False} If not. */ public boolean isDisabled(int grpId) { CacheGroupContext ctx = cctx.cache().cacheGroup(grpId); return ctx != null && !ctx.walEnabled(); } /** * @return WAL disable context. */ public WALDisableContext walDisableContext() { return walDisableContext; } /** * None record will be logged in closure call. * * @param cls Closure to execute out of WAL scope. * @throws IgniteCheckedException If operation failed. */ public void runWithOutWAL(IgniteRunnable cls) throws IgniteCheckedException { WALDisableContext ctx = walDisableContext; if (ctx == null) throw new IgniteCheckedException("Disable WAL context is not initialized."); ctx.execute(cls); } /** * WAL state change worker. */ private class WalStateChangeWorker extends GridWorker { /** Message. */ private final WalStateProposeMessage msg; /** Checkpoint future. */ private final CheckpointProgress cpFut; /** * Constructor. * * @param msg Propose message. */ private WalStateChangeWorker(WalStateProposeMessage msg, CheckpointProgress cpFut) { super(cctx.igniteInstanceName(), "wal-state-change-worker-" + msg.groupId(), WalStateManager.this.log); this.msg = msg; this.cpFut = cpFut; } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { WalStateResult res = awaitCheckpoint(cpFut, msg); addResult(res); onCompletedLocally(res); } } /** * Temporary storage for disabled WALs of group. */ public static class WALDisableContext implements MetastorageLifecycleListener { /** */ public static final String WAL_DISABLED = "wal-disabled"; /** */ private final IgniteLogger log; /** */ private final IgniteCacheDatabaseSharedManager dbMgr; /** */ private volatile ReadWriteMetastorage metaStorage; /** */ private final IgnitePageStoreManager pageStoreMgr; /** */ private volatile boolean resetWalFlag; /** */ private volatile boolean disableWal; /** * @param dbMgr Database manager. * @param pageStoreMgr Page store manager. * @param log * */ public WALDisableContext( IgniteCacheDatabaseSharedManager dbMgr, IgnitePageStoreManager pageStoreMgr, @Nullable IgniteLogger log ) { this.dbMgr = dbMgr; this.pageStoreMgr = pageStoreMgr; this.log = log; } /** * @param cls Closure to execute with disabled WAL. * @throws IgniteCheckedException If execution failed. */ public void execute(IgniteRunnable cls) throws IgniteCheckedException { if (cls == null) throw new IgniteCheckedException("Task to execute is not specified."); if (metaStorage == null) throw new IgniteCheckedException("Meta storage is not ready."); writeMetaStoreDisableWALFlag(); dbMgr.waitForCheckpoint("Checkpoint before apply updates on recovery."); disableWAL(true); try { cls.run(); } catch (IgniteException e) { throw new IgniteCheckedException(e); } finally { disableWAL(false); dbMgr.waitForCheckpoint("Checkpoint after apply updates on recovery."); removeMetaStoreDisableWALFlag(); } } /** * @throws IgniteCheckedException If write meta store flag failed. */ protected void writeMetaStoreDisableWALFlag() throws IgniteCheckedException { dbMgr.checkpointReadLock(); try { metaStorage.write(WAL_DISABLED, Boolean.TRUE); } finally { dbMgr.checkpointReadUnlock(); } } /** * @throws IgniteCheckedException If remove meta store flag failed. */ protected void removeMetaStoreDisableWALFlag() throws IgniteCheckedException { dbMgr.checkpointReadLock(); try { metaStorage.remove(WAL_DISABLED); } finally { dbMgr.checkpointReadUnlock(); } } /** * @param disable Flag wal disable. */ protected void disableWAL(boolean disable) throws IgniteCheckedException { dbMgr.checkpointReadLock(); try { disableWal = disable; if (log != null) log.info("WAL logging " + (disable ? "disabled" : "enabled")); } finally { dbMgr.checkpointReadUnlock(); } } /** {@inheritDoc} */ @Override public void onReadyForRead(ReadOnlyMetastorage ms) throws IgniteCheckedException { Boolean disabled = (Boolean)ms.read(WAL_DISABLED); // Node crash when WAL was disabled. if (disabled != null && disabled) { resetWalFlag = true; pageStoreMgr.cleanupPersistentSpace(); dbMgr.cleanupTempCheckpointDirectory(); dbMgr.cleanupCheckpointDirectory(); } } /** {@inheritDoc} */ @Override public void onReadyForReadWrite(ReadWriteMetastorage ms) throws IgniteCheckedException { // On new node start WAL always enabled. Remove flag from metastore. if (resetWalFlag) ms.remove(WAL_DISABLED); metaStorage = ms; } /** * @return {@code true} If WAL is disabled. */ public boolean check() { return disableWal; } } /** * Checkpoint reason for enabling group durability. * * @param grpId Group id. * @param topVer Topology version. */ public static String reason(long grpId, AffinityTopologyVersion topVer) { return ENABLE_DURABILITY_AFTER_REBALANCING + grpId + "-" + topVer; } }
/* * Copyright 2006 Martin B. Smith * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mbs3.jkaraoke; import java.util.BitSet; /* * Created on Jul 8, 2006 TODO Nothing yet. */ /** * @author Martin Smith typedef struct { char command; char instruction; char * parityQ[2]; char data[16]; char parityP[4]; } SubCode; The 16 bytes * of data are each divided like so: Channel# P Q R S T U V W Bit# 0-15 * 7 6 5 4 3 2 1 0 */ public class Packet { // the entire packet private byte[] packetBytes; // instruction and command mask public static final long SC_MASK = 0x3F; public static final long SC_CDG_COMMAND = 0x09; // instruction constants // Set the screen to a particular color public static final int INSTR_MEMORY_PRESET = 1; // Set the border of the screen to a particular color public static final int INSTR_BORDER_PRESET = 2; // Load a 12 x 6, 2 color tile and display it normally. public static final int INSTR_TILE_BLOCK = 6; // Scroll the image, filling in the new area with a color. public static final int INSTR_SCROLL_PRESET = 20; // Scroll the image, rotating the bits back around. public static final int INSTR_SCROLL_COPY = 24; // Define a specific color as being transparent. public static final int INSTR_DEFINE_TRANSPARENT = 28; // Load in the lower 8 entries of the color table (0-7). public static final int INSTR_LOAD_COLOR_TABLE_LOWER = 30; // Load in the upper 8 entries of the color table (8-15). public static final int INSTR_LOAD_COLOR_TABLE_UPPER = 31; // Load a 12 x 6, 2 color tile and display it using the XOR method. public static final int INSTR_TILE_BLOCK_XOR = 38; /** * */ public Packet (byte[] packetBytes) { super(); this.packetBytes = new byte[packetBytes.length]; for (int i = 0; i < packetBytes.length; i++) this.packetBytes[i] = packetBytes[i]; } public byte getCommand () { return (byte)(this.packetBytes[0] & SC_MASK); } public byte getInstruction () { return (byte)(this.packetBytes[1] & SC_MASK); } public byte[] getParityQ () { byte[] ret = new byte[2]; ret[0] = this.packetBytes[2]; ret[1] = this.packetBytes[3]; return ret; } public byte[] getData () { byte[] ret = new byte[16]; for (int i = 0; i < 16; i++) ret[i] = (byte)(this.packetBytes[i + 4] & 0x3F); return ret; } public String toString () { StringBuffer sb = new StringBuffer(); sb.append("Command: " + constToString(this.getCommand()) + "\n"); sb.append("Instruction: " + constToString(this.getInstruction()) + "\n"); sb.append("Data: " + byteArrayToBitString(this.getData()) + "\n"); return sb.toString(); } public static String byteArrayToIntString (byte[] bytes) { StringBuffer sb = new StringBuffer(); sb.append("["); for (int i = 0; i < bytes.length; i++) { sb.append(bytes[i]); if (i != bytes.length - 1) sb.append(","); } sb.append("]"); return sb.toString(); } public static String byteArrayToBitString (byte[] bytes) { StringBuffer sb = new StringBuffer(); sb.append("["); for (int i = 0; i < bytes.length; i++) { sb.append(Integer.toBinaryString((int)bytes[i])); if (i != bytes.length - 1) sb.append(","); } sb.append("]"); return sb.toString(); } static String byteArrayToHexString (byte in[]) { byte ch = 0x00; int i = 0; if (in == null || in.length <= 0) return null; String pseudo[] = { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" }; StringBuffer out = new StringBuffer(in.length * 2); while (i < in.length) { ch = (byte) (in[i] & 0xF0); // Strip off high nibble ch = (byte) (ch >>> 4); // shift the bits down ch = (byte) (ch & 0x0F); // must do this is high order bit is on! out.append(pseudo[(int) ch]); // convert the nibble to a String // Character ch = (byte) (in[i] & 0x0F); // Strip off low nibble out.append(pseudo[(int) ch]); // convert the nibble to a String // Character i++; } String rslt = new String(out); return rslt; } // Returns a bitset containing the values in bytes. // The byte-ordering of bytes must be big-endian which means the most // significant bit is in element 0. public static BitSet toBitSet (byte[] bytes) { BitSet bits = new BitSet(); for (int i = 0; i < bytes.length * 8; i++) { if ((bytes[bytes.length - i / 8 - 1] & (1 << (i % 8))) > 0) { bits.set(i); } } return bits; } public static BitSet toBitSet (byte myByte) { byte[] b = new byte[1]; b[0] = myByte; return toBitSet(b); } // Returns a byte array of at least length 1. // The most significant bit in the result is guaranteed not to be a 1 // (since BitSet does not support sign extension). // The byte-ordering of the result is big-endian which means the most // significant bit is in element 0. // The bit at index 0 of the bit set is assumed to be the least significant // bit. public static byte[] toByteArray (BitSet bits) { byte[] bytes = new byte[bits.length() / 8 + 1]; for (int i = 0; i < bits.length(); i++) { if (bits.get(i)) { bytes[bytes.length - i / 8 - 1] |= 1 << (i % 8); } } return bytes; } public String constToString (long constVal) { if(constVal == SC_CDG_COMMAND) return "SC_CDG_COMMAND"; else if (constVal == INSTR_MEMORY_PRESET) return "INSTR_MEMORY_PRESET"; else if (constVal == INSTR_BORDER_PRESET) return "INSTR_BORDER_PRESET"; else if (constVal == INSTR_TILE_BLOCK) return "INSTR_TILE_BLOCK"; else if (constVal == INSTR_SCROLL_PRESET) return "INSTR_SCROLL_PRESET"; else if (constVal == INSTR_SCROLL_COPY) return "INSTR_SCROLL_COPY"; else if (constVal == INSTR_DEFINE_TRANSPARENT) return "INSTR_DEFINE_TRANSPARENT"; else if (constVal == INSTR_LOAD_COLOR_TABLE_LOWER) return "INSTR_LOAD_COLOR_TABLE_LOWER"; else if (constVal == INSTR_LOAD_COLOR_TABLE_UPPER) return "INSTR_LOAD_COLOR_TABLE_UPPER"; else if (constVal == INSTR_TILE_BLOCK_XOR) return "INSTR_TILE_BLOCK_XOR"; else return "UNKNOWN - " + constVal; } public static String bitSetToBinaryString (BitSet bs) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < bs.size(); i++) if (bs.get(i)) sb.append("1"); else sb.append("0"); return sb.toString(); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.util.BigDoubleArrayList; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; import org.elasticsearch.index.fielddata.BytesValues; import org.elasticsearch.index.fielddata.GeoPointValues; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ordinals.Ordinals; /** */ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointFieldData<ScriptDocValues> { public static GeoPointDoubleArrayAtomicFieldData empty(int numDocs) { return new Empty(numDocs); } private final int numDocs; protected long size = -1; public GeoPointDoubleArrayAtomicFieldData(int numDocs) { this.numDocs = numDocs; } @Override public void close() { } @Override public int getNumDocs() { return numDocs; } @Override public ScriptDocValues getScriptValues() { return new ScriptDocValues.GeoPoints(getGeoPointValues()); } static class Empty extends GeoPointDoubleArrayAtomicFieldData { Empty(int numDocs) { super(numDocs); } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getNumberUniqueValues() { return 0; } @Override public long getMemorySizeInBytes() { return 0; } @Override public BytesValues getBytesValues(boolean needsHashes) { return BytesValues.EMPTY; } @Override public GeoPointValues getGeoPointValues() { return GeoPointValues.EMPTY; } @Override public ScriptDocValues getScriptValues() { return ScriptDocValues.EMPTY; } } static class WithOrdinals extends GeoPointDoubleArrayAtomicFieldData { private final BigDoubleArrayList lon, lat; private final Ordinals ordinals; public WithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, Ordinals ordinals) { super(numDocs); this.lon = lon; this.lat = lat; this.ordinals = ordinals; } @Override public boolean isMultiValued() { return ordinals.isMultiValued(); } @Override public boolean isValuesOrdered() { return true; } @Override public long getNumberUniqueValues() { return ordinals.getNumOrds(); } @Override public long getMemorySizeInBytes() { if (size == -1) { size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes(); } return size; } @Override public GeoPointValues getGeoPointValues() { return new GeoPointValuesWithOrdinals(lon, lat, ordinals.ordinals()); } public static class GeoPointValuesWithOrdinals extends GeoPointValues { private final BigDoubleArrayList lon, lat; private final Ordinals.Docs ordinals; private final GeoPoint scratch = new GeoPoint(); GeoPointValuesWithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, Ordinals.Docs ordinals) { super(ordinals.isMultiValued()); this.lon = lon; this.lat = lat; this.ordinals = ordinals; } @Override public GeoPoint nextValue() { final long ord = ordinals.nextOrd(); assert ord > 0; return scratch.reset(lat.get(ord), lon.get(ord)); } @Override public int setDocument(int docId) { this.docId = docId; return ordinals.setDocument(docId); } } } /** * Assumes unset values are marked in bitset, and docId is used as the index to the value array. */ public static class SingleFixedSet extends GeoPointDoubleArrayAtomicFieldData { private final BigDoubleArrayList lon, lat; private final FixedBitSet set; private final long numOrds; public SingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, FixedBitSet set, long numOrds) { super(numDocs); this.lon = lon; this.lat = lat; this.set = set; this.numOrds = numOrds; } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getNumberUniqueValues() { return numOrds; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes() + RamUsageEstimator.sizeOf(set.getBits()); } return size; } @Override public GeoPointValues getGeoPointValues() { return new GeoPointValuesSingleFixedSet(lon, lat, set); } static class GeoPointValuesSingleFixedSet extends GeoPointValues { private final BigDoubleArrayList lon; private final BigDoubleArrayList lat; private final FixedBitSet set; private final GeoPoint scratch = new GeoPoint(); GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) { super(false); this.lon = lon; this.lat = lat; this.set = set; } @Override public int setDocument(int docId) { this.docId = docId; return set.get(docId) ? 1 : 0; } @Override public GeoPoint nextValue() { return scratch.reset(lat.get(docId), lon.get(docId)); } } } /** * Assumes all the values are "set", and docId is used as the index to the value array. */ public static class Single extends GeoPointDoubleArrayAtomicFieldData { private final BigDoubleArrayList lon, lat; private final long numOrds; public Single(BigDoubleArrayList lon, BigDoubleArrayList lat, int numDocs, long numOrds) { super(numDocs); this.lon = lon; this.lat = lat; this.numOrds = numOrds; } @Override public boolean isMultiValued() { return false; } @Override public boolean isValuesOrdered() { return false; } @Override public long getNumberUniqueValues() { return numOrds; } @Override public long getMemorySizeInBytes() { if (size == -1) { size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + (lon.sizeInBytes() + lat.sizeInBytes()); } return size; } @Override public GeoPointValues getGeoPointValues() { return new GeoPointValuesSingle(lon, lat); } static class GeoPointValuesSingle extends GeoPointValues { private final BigDoubleArrayList lon; private final BigDoubleArrayList lat; private final GeoPoint scratch = new GeoPoint(); GeoPointValuesSingle(BigDoubleArrayList lon, BigDoubleArrayList lat) { super(false); this.lon = lon; this.lat = lat; } @Override public int setDocument(int docId) { this.docId = docId; return 1; } @Override public GeoPoint nextValue() { return scratch.reset(lat.get(docId), lon.get(docId)); } } } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.aggregates; import java.text.ParseException; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.deidentifier.arx.AttributeType; import org.deidentifier.arx.AttributeType.Hierarchy; import org.deidentifier.arx.DataHandleStatistics; import org.deidentifier.arx.DataHandleStatistics.InterruptHandler; import org.deidentifier.arx.DataScale; import org.deidentifier.arx.DataType; import org.deidentifier.arx.DataType.ARXString; import org.deidentifier.arx.DataType.DataTypeWithRatioScale; import org.deidentifier.arx.aggregates.StatisticsContingencyTable.Entry; import org.deidentifier.arx.aggregates.StatisticsSummary.StatisticsSummaryOrdinal; import cern.colt.GenericSorting; import cern.colt.Swapper; import cern.colt.function.IntComparator; /** * A class offering basic descriptive statistics about data handles. * * @author Fabian Prasser */ public class StatisticsBuilder { /** * Local class for interrupts. * * @author Fabian Prasser */ class ComputationInterruptedException extends RuntimeException { /** TODO */ private static final long serialVersionUID = 5339918851212367422L; /** * * * @param message */ public ComputationInterruptedException(String message) { super(message); } /** * * * @param cause */ public ComputationInterruptedException(Throwable cause) { super(cause); } } /** The equivalence class statistics. */ private StatisticsEquivalenceClasses ecStatistics; /** The handle. */ private DataHandleStatistics handle; /** The stop flag. */ private volatile boolean interrupt; /** * Creates a new instance. * * @param handle * @param ecStatistics */ public StatisticsBuilder(DataHandleStatistics handle, StatisticsEquivalenceClasses ecStatistics) { this.ecStatistics = ecStatistics; this.handle = handle; } /** * Returns a contingency table for the given columns. * * @param column1 The first column * @param orderFromDefinition1 Indicates whether the order that should be assumed for string data items * can (and should) be derived from the hierarchy provided in the data * definition (if any) * @param column2 The second column * @param orderFromDefinition2 Indicates whether the order that should be assumed for string data items * can (and should) be derived from the hierarchy provided in the data * definition (if any) * @return */ public StatisticsContingencyTable getContingencyTable(int column1, boolean orderFromDefinition1, int column2, boolean orderFromDefinition2) { return getContingencyTable(column1, getHierarchy(column1, orderFromDefinition1), column2, getHierarchy(column2, orderFromDefinition2)); } /** * Returns a contingency table for the given columns. The order for string data items is derived * from the provided hierarchies * * @param column1 The first column * @param hierarchy1 The hierarchy for the first column, may be null * @param column2 The second column * @param hierarchy2 The hierarchy for the second column, may be null * @return */ public StatisticsContingencyTable getContingencyTable(int column1, Hierarchy hierarchy1, int column2, Hierarchy hierarchy2) { // Reset stop flag interrupt = false; // Init String[] values1 = getDistinctValuesOrdered(column1, hierarchy1); String[] values2 = getDistinctValuesOrdered(column2, hierarchy2); // Create maps of indexes Map<String, Integer> indexes1 = new HashMap<String, Integer>(); for (int i = 0; i < values1.length; i++) { checkInterrupt(); indexes1.put(values1[i], i); } Map<String, Integer> indexes2 = new HashMap<String, Integer>(); for (int i = 0; i < values2.length; i++) { checkInterrupt(); indexes2.put(values2[i], i); } // Create entry set int max = Integer.MIN_VALUE; final Map<Entry, Integer> entries = new HashMap<Entry, Integer>(); for (int row = 0; row < handle.getNumRows(); row++) { checkInterrupt(); int index1 = indexes1.get(handle.getValue(row, column1)); int index2 = indexes2.get(handle.getValue(row, column2)); Entry entry = new Entry(index1, index2); Integer previous = entries.get(entry); int value = previous != null ? previous + 1 : 1; max = Math.max(max, value); entries.put(entry, value); } // Create iterator final int count = handle.getNumRows(); final Iterator<Entry> internal = entries.keySet().iterator(); final Iterator<Entry> iterator = new Iterator<Entry>() { private Map<Entry, Integer> _entries = entries; private Iterator<Entry> _internal = internal; @Override public boolean hasNext() { if (_internal == null) return false; boolean result = _internal.hasNext(); // Try to release resources as early as possible if (!result) { _internal = null; _entries = null; } return result; } @Override public Entry next() { if (_internal == null) return null; Entry e = _internal.next(); e.frequency = (double) _entries.get(e) / (double) count; return e; } @Override public void remove() { throw new UnsupportedOperationException(); } }; // Result result return new StatisticsContingencyTable(values1, values2, count, (double) max / (double) count, iterator); } /** * Returns a contingency table for the given columns. This method assumes that the * order of string data items can (and should) be derived from the hierarchies provided * in the data definition (if any) * * @param column1 The first column * @param column2 The second column * @return */ public StatisticsContingencyTable getContingencyTable(int column1, int column2) { return getContingencyTable(column1, true, column2, true); } /** * Returns a contingency table for the given columns. * * @param column1 The first column * @param size1 The maximal size in this dimension * @param orderFromDefinition1 Indicates whether the order that should be assumed for string data items * can (and should) be derived from the hierarchy provided in the data * definition (if any) * @param column2 The second column * @param size2 The maximal size in this dimension * @param orderFromDefinition2 Indicates whether the order that should be assumed for string data items * can (and should) be derived from the hierarchy provided in the data * definition (if any) * @return */ public StatisticsContingencyTable getContingencyTable(int column1, int size1, boolean orderFromDefinition1, int column2, int size2, boolean orderFromDefinition2) { return getContingencyTable(column1, size1, getHierarchy(column1, orderFromDefinition1), column2, size2, getHierarchy(column2, orderFromDefinition2)); } /** * Returns a contingency table for the given columns. The order for string data items is derived * from the provided hierarchies * * @param column1 The first column * @param size1 The maximal size in this dimension * @param hierarchy1 The hierarchy for the first column, may be null * @param column2 The second column * @param size2 The maximal size in this dimension * @param hierarchy2 The hierarchy for the second column, may be null * @return */ public StatisticsContingencyTable getContingencyTable(int column1, int size1, Hierarchy hierarchy1, int column2, int size2, Hierarchy hierarchy2) { // Reset stop flag interrupt = false; // Check if (size1 <= 0 || size2 <= 0) { throw new IllegalArgumentException("Size must be > 0"); } // Obtain default table StatisticsContingencyTable table = getContingencyTable(column1, hierarchy1, column2, hierarchy2); // Check if suitable if (table.values1.length <= size1 && table.values2.length <= size2) { return table; } // Init String[] values1; String[] values2; double factor1; double factor2; // Compute factors and values if (table.values1.length > size1) { factor1 = (double) size1 / (double) table.values1.length; values1 = getScaledValues(table.values1, size1); } else { factor1 = 1; values1 = table.values1; } if (table.values2.length > size2) { factor2 = (double) size2 / (double) table.values2.length; values2 = getScaledValues(table.values2, size2); } else { factor2 = 1; values2 = table.values2; } // Create entry set final Map<Entry, Double> entries = new HashMap<Entry, Double>(); Iterator<Entry> iter = table.iterator; double max = 0d; while (iter.hasNext()) { checkInterrupt(); Entry old = iter.next(); int index1 = (int) Math.round((double) old.value1 * factor1); int index2 = (int) Math.round((double) old.value2 * factor2); index1 = index1 < size1 ? index1 : size1 - 1; index2 = index2 < size2 ? index2 : size2 - 1; Entry entry = new Entry(index1, index2); Double previous = entries.get(entry); double value = previous != null ? previous + old.frequency : old.frequency; max = Math.max(value, max); entries.put(entry, value); } // Create iterator final Iterator<Entry> internal = entries.keySet().iterator(); final Iterator<Entry> iterator = new Iterator<Entry>() { private Map<Entry, Double> _entries = entries; private Iterator<Entry> _internal = internal; @Override public boolean hasNext() { if (_internal == null) return false; boolean result = _internal.hasNext(); // Try to release resources as early as possible if (!result) { _internal = null; _entries = null; } return result; } @Override public Entry next() { if (_internal == null) return null; Entry e = _internal.next(); e.frequency = _entries.get(e); return e; } @Override public void remove() { throw new UnsupportedOperationException(); } }; // Result result return new StatisticsContingencyTable(values1, values2, table.count, max, iterator); } /** * Returns a contingency table for the given columns. This method assumes that the * order of string data items can (and should) be derived from the hierarchies provided * in the data definition (if any) * * @param column1 The first column * @param size1 The maximal size in this dimension * @param column2 The second column * @param size2 The maximal size in this dimension * @return */ public StatisticsContingencyTable getContingencyTable(int column1, int size1, int column2, int size2) { return getContingencyTable(column1, size1, true, column2, size2, true); } /** * Returns the distinct set of data items from the given column. * * @param column The column * @return */ public String[] getDistinctValues(int column) { return this.handle.getDistinctValues(column, new InterruptHandler() { @Override public void checkInterrupt() { StatisticsBuilder.this.checkInterrupt(); } }); } /** * Returns an ordered list of the distinct set of data items from the given column. This method assumes * that the order of string data items can (and should) be derived from the hierarchy provided in the * data definition (if any) * * @param column The column * @return */ public String[] getDistinctValuesOrdered(int column) { return this.getDistinctValuesOrdered(column, true); } /** * Returns an ordered list of the distinct set of data items from the given column. * * @param column The column * @param orderFromDefinition Indicates whether the order that should be assumed for string data * items can (and should) be derived from the hierarchy provided in the * data definition (if any) * @return */ public String[] getDistinctValuesOrdered(int column, boolean orderFromDefinition) { return getDistinctValuesOrdered(column, getHierarchy(column, orderFromDefinition)); } /** * Returns an ordered list of the distinct set of data items from the given column. This method assumes * that the order of string data items can (and should) be derived from the provided hierarchy * * @param column The column * @param hierarchy The hierarchy, may be null * @return */ public String[] getDistinctValuesOrdered(int column, Hierarchy hierarchy) { // Reset stop flag interrupt = false; // Obtain list and data type final String[] list = getDistinctValues(column); final String attribute = handle.getAttributeName(column); final DataType<?> datatype = handle.getDataType(attribute); final int level = handle.getGeneralization(attribute); final String[][] _hierarchy = hierarchy != null ? hierarchy.getHierarchy() : null; // Sort by data type if (_hierarchy == null || level == 0) { sort(list, datatype, handle.getSuppressionString()); // Sort by hierarchy and data type } else { // Build order directly from the hierarchy final Map<String, Integer> order = new HashMap<String, Integer>(); int max = 0; // The order to use for the suppression string // Create base order Set<String> baseSet = new HashSet<String>(); DataType<?> baseType = handle.getBaseDataType(attribute); for (int i = 0; i < _hierarchy.length; i++) { String element = _hierarchy[i][0]; checkInterrupt(); // Make sure that only elements from the hierarchy // are added that are included in the data // TODO: Calling isValid is only a work-around if (baseType.isValid(element)) baseSet.add(element); } String[] baseArray = baseSet.toArray(new String[baseSet.size()]); sort(baseArray, handle.getBaseDataType(attribute), handle.getSuppressionString()); Map<String, Integer> baseOrder = new HashMap<String, Integer>(); for (int i = 0; i < baseArray.length; i++) { checkInterrupt(); baseOrder.put(baseArray[i], i); } // Build higher level order from base order for (int i = 0; i < _hierarchy.length; i++) { checkInterrupt(); if (!order.containsKey(_hierarchy[i][level])) { Integer position = baseOrder.get(_hierarchy[i][0]); if (position != null) { order.put(_hierarchy[i][level], position); max = Math.max(position, max) + 1; } } } // Add suppression string String supp = handle.getSuppressionString(); if (supp != null) order.put(supp, max); // Sort sort(list, order); } // Done return list; } /** * Returns statistics about the equivalence classes. * * @return */ public StatisticsEquivalenceClasses getEquivalenceClassStatistics() { return ecStatistics; } /** * Returns a frequency distribution for the values in the given column. This method assumes that the * order of string data items can (and should) be derived from the hierarchy provided in the data * definition (if any) * * @param column The column * @return */ public StatisticsFrequencyDistribution getFrequencyDistribution(int column) { return getFrequencyDistribution(column, true); } /** * Returns a frequency distribution for the values in the given column. * * @param column The column * @param orderFromDefinition Indicates whether the order that should be assumed for string data items * can (and should) be derived from the hierarchy provided in the data * definition (if any) * @return */ public StatisticsFrequencyDistribution getFrequencyDistribution(int column, boolean orderFromDefinition) { return getFrequencyDistribution(column, getHierarchy(column, orderFromDefinition)); } /** * Returns a frequency distribution for the values in the given column. The order for string data items * is derived from the provided hierarchy * * @param column The column * @param hierarchy The hierarchy, may be null * @return */ public StatisticsFrequencyDistribution getFrequencyDistribution(int column, Hierarchy hierarchy) { // Reset stop flag interrupt = false; // Init String[] values = getDistinctValuesOrdered(column, hierarchy); double[] frequencies = new double[values.length]; // Create map of indexes Map<String, Integer> indexes = new HashMap<String, Integer>(); for (int i = 0; i < values.length; i++) { checkInterrupt(); indexes.put(values[i], i); } // Count frequencies for (int row = 0; row < handle.getNumRows(); row++) { checkInterrupt(); String value = handle.getValue(row, column); frequencies[indexes.get(value)]++; } // Divide by count int count = handle.getNumRows(); for (int i = 0; i < frequencies.length; i++) { checkInterrupt(); frequencies[i] /= (double) count; } // Return return new StatisticsFrequencyDistribution(values, frequencies, count); } /** * * Returns an interruptible instance of this object. * * @return */ public StatisticsBuilderInterruptible getInterruptibleInstance() { return new StatisticsBuilderInterruptible(handle, ecStatistics); } /** * Returns summary statistics for all attributes. * * @param listwiseDeletion A flag enabling list-wise deletion * @return */ @SuppressWarnings({ "unchecked", "rawtypes" }) public <T> Map<String, StatisticsSummary<?>> getSummaryStatistics(boolean listwiseDeletion) { Map<String, DescriptiveStatistics> statistics = new HashMap<String, DescriptiveStatistics>(); Map<String, StatisticsSummaryOrdinal> ordinal = new HashMap<String, StatisticsSummaryOrdinal>(); Map<String, DataScale> scales = new HashMap<String, DataScale>(); // Detect scales for (int col = 0; col < handle.getNumColumns(); col++) { // Meta String attribute = handle.getAttributeName(col); DataType<?> type = handle.getDataType(attribute); // Scale DataScale scale = type.getDescription().getScale(); // Try to replace nominal scale with ordinal scale based on base data type if (scale == DataScale.NOMINAL && handle.getGeneralization(attribute) != 0) { if (!(handle.getBaseDataType(attribute) instanceof ARXString) && getHierarchy(col, true) != null) { scale = DataScale.ORDINAL; } } // Store scales.put(attribute, scale); statistics.put(attribute, new DescriptiveStatistics()); ordinal.put(attribute, getSummaryStatisticsOrdinal(handle.getGeneralization(attribute), handle.getDataType(attribute), handle.getBaseDataType(attribute), getHierarchy(col, true))); } // Compute summary statistics for (int row = 0; row < handle.getNumRows(); row++) { // Check, if we should include this row boolean include = true; if (listwiseDeletion) { for (int col = 0; col < handle.getNumColumns(); col++) { if (handle.isSuppressed(row) || DataType.isNull(handle.getValue(row, col))) { include = false; break; } } } // Check checkInterrupt(); // If yes, add if (include) { // For each column for (int col = 0; col < handle.getNumColumns(); col++) { // Meta String value = handle.getValue(row, col); String attribute = handle.getAttributeName(col); DataType<?> type = handle.getDataType(attribute); // Analyze if (!value.equals(handle.getSuppressionString()) && !DataType.isNull(value)) { ordinal.get(attribute).addValue(value); if (type instanceof DataTypeWithRatioScale) { statistics.get(attribute).addValue(((DataTypeWithRatioScale) type).toDouble(type.parse(value))); } } } } } // Convert Map<String, StatisticsSummary<?>> result = new HashMap<String, StatisticsSummary<?>>(); for (int col = 0; col < handle.getNumColumns(); col++) { // Check checkInterrupt(); // Depending on scale String attribute = handle.getAttributeName(col); DataScale scale = scales.get(attribute); DataType<T> type = (DataType<T>) handle.getDataType(attribute); ordinal.get(attribute).analyze(); if (scale == DataScale.NOMINAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); result.put(attribute, new StatisticsSummary<T>(DataScale.NOMINAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()))); } else if (scale == DataScale.ORDINAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); result.put(attribute, new StatisticsSummary<T>(DataScale.ORDINAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()))); } else if (scale == DataScale.INTERVAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); DescriptiveStatistics stats2 = statistics.get(attribute); boolean isPeriod = type.getDescription().getWrappedClass() == Date.class; // TODO: Something is wrong with commons math's kurtosis double kurtosis = stats2.getKurtosis(); kurtosis = kurtosis < 0d ? Double.NaN : kurtosis; double range = stats2.getMax() - stats2.getMin(); double stddev = Math.sqrt(stats2.getVariance()); result.put(attribute, new StatisticsSummary<T>(DataScale.INTERVAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()), toString(type, stats2.getMean(), false, false), toValue(type, stats2.getMean()), stats2.getMean(), toString(type, stats2.getVariance(), isPeriod, true), toValue(type, stats2.getVariance()), stats2.getVariance(), toString(type, stats2.getPopulationVariance(), isPeriod, true), toValue(type, stats2.getPopulationVariance()), stats2.getPopulationVariance(), toString(type, stddev, isPeriod, false), toValue(type, stddev), stddev, toString(type, range, isPeriod, false), toValue(type, range), stats2.getMax() - stats2.getMin(), toString(type, kurtosis, isPeriod, false), toValue(type, kurtosis), kurtosis)); } else if (scale == DataScale.RATIO) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); DescriptiveStatistics stats2 = statistics.get(attribute); // TODO: Something is wrong with commons math's kurtosis double kurtosis = stats2.getKurtosis(); kurtosis = kurtosis < 0d ? Double.NaN : kurtosis; double range = stats2.getMax() - stats2.getMin(); double stddev = Math.sqrt(stats2.getVariance()); result.put(attribute, new StatisticsSummary<T>(DataScale.RATIO, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()), toString(type, stats2.getMean(), false, false), toValue(type, stats2.getMean()), stats2.getMean(), toString(type, stats2.getVariance(), false, false), toValue(type, stats2.getVariance()), stats2.getVariance(), toString(type, stats2.getPopulationVariance(), false, false), toValue(type, stats2.getPopulationVariance()), stats2.getPopulationVariance(), toString(type, stddev, false, false), toValue(type, stddev), stddev, toString(type, range, false, false), toValue(type, range), range, toString(type, kurtosis, false, false), toValue(type, kurtosis), kurtosis, toString(type, stats2.getGeometricMean(), false, false), toValue(type, stats2.getGeometricMean()), stats2.getGeometricMean())); } } return result; } /** * Checks whether an interruption happened. */ private void checkInterrupt() { if (interrupt) { throw new ComputationInterruptedException("Interrupted"); } } /** * Returns the appropriate hierarchy, if any. * * @param column * @param orderFromDefinition * @return */ private Hierarchy getHierarchy(int column, boolean orderFromDefinition) { // Init final String attribute = handle.getAttributeName(column); final AttributeType type = handle.getDefinition().getAttributeType(attribute); final DataType<?> datatype = handle.getDataType(attribute); final Hierarchy hierarchy; // Check if hierarchy available if (orderFromDefinition && datatype instanceof ARXString && type instanceof Hierarchy) { hierarchy = ((Hierarchy) type); } else { hierarchy = null; } return hierarchy; } /** * Scales the given string array. * * @param values * @param length The resulting length * @return */ private String[] getScaledValues(String[] values, int length) { // Init AggregateFunction<String> function = AggregateFunction.forType(DataType.STRING).createSetFunction(); double factor = (double) length / (double) values.length; String[] result = new String[length]; // Aggregate int previous = 0; List<String> toAggregate = new ArrayList<String>(); for (int i = 0; i < values.length; i++) { checkInterrupt(); int index = (int) Math.round((double) i * factor); index = index < length ? index : length - 1; if (index != previous) { result[previous] = function.aggregate(toAggregate.toArray(new String[toAggregate.size()])); toAggregate.clear(); previous = index; } toAggregate.add(values[i]); } result[length - 1] = function.aggregate(toAggregate.toArray(new String[toAggregate.size()])); return result; } /** * Returns a summary statistics object for the given attribute * @param generalization * @param dataType * @param baseDataType * @param hierarchy * @return */ private <U, V> StatisticsSummaryOrdinal getSummaryStatisticsOrdinal(final int generalization, final DataType<U> dataType, final DataType<V> baseDataType, final Hierarchy hierarchy) { // TODO: It would be cleaner to return an ARXOrderedString for generalized variables // TODO: that have a suitable data type directly from the DataHandle if (generalization == 0 || !(dataType instanceof ARXString)) { return new StatisticsSummaryOrdinal(dataType); } else if (baseDataType instanceof ARXString) { return new StatisticsSummaryOrdinal(dataType); } else if (hierarchy == null) { return new StatisticsSummaryOrdinal(dataType); } else { final String[][] array = hierarchy.getHierarchy(); final Map<String, String> map = new HashMap<String, String>(); for (int i = 0; i < array.length; i++) { map.put(array[i][generalization], array[i][0]); } return new StatisticsSummaryOrdinal(new Comparator<String>() { public int compare(String o1, String o2) { V _o1 = null; try { _o1 = baseDataType.parse(map.get(o1)); } catch (Exception e) { // Nothing to do } V _o2 = null; try { _o2 = baseDataType.parse(map.get(o2)); } catch (Exception e) { // Nothing to do } try { return baseDataType.compare(_o1, _o2); } catch (Exception e) { return 0; } } }); } } /** * Orders the given array by data type. * * @param array * @param type * @param suppressionString */ private void sort(final String[] array, final DataType<?> type, final String suppressionString) { GenericSorting.mergeSort(0, array.length, new IntComparator() { @Override public int compare(int arg0, int arg1) { checkInterrupt(); try { String s1 = array[arg0]; String s2 = array[arg1]; return (s1 == suppressionString && s2 == suppressionString) ? 0 : (s1 == suppressionString ? +1 : (s2 == suppressionString ? -1 : type.compare(s1, s2))); } catch ( IllegalArgumentException | ParseException e) { throw new RuntimeException("Some values seem to not conform to the data type", e); } } }, new Swapper() { @Override public void swap(int arg0, int arg1) { String temp = array[arg0]; array[arg0] = array[arg1]; array[arg1] = temp; } }); } /** * Orders the given array by the given sort order. * * @param array * @param order */ private void sort(final String[] array, final Map<String, Integer> order) { GenericSorting.mergeSort(0, array.length, new IntComparator() { @Override public int compare(int arg0, int arg1) { checkInterrupt(); Integer order1 = order.get(array[arg0]); Integer order2 = order.get(array[arg1]); if (order1 == null || order2 == null) { throw new RuntimeException("The hierarchy seems to not cover all data values"); } else { return order1.compareTo(order2); } } }, new Swapper() { @Override public void swap(int arg0, int arg1) { String temp = array[arg0]; array[arg0] = array[arg1]; array[arg1] = temp; } }); } /** * Used for building summary statistics * @param type * @param value * @param isPeriod Defines whether the parameter is a time period * @param isSquare Defines whether the period is a squared period * @return */ @SuppressWarnings({ "unchecked", "rawtypes" }) private String toString(DataType<?> type, double value, boolean isPeriod, boolean isSquare) { // Handle corner cases if (Double.isNaN(value)) { return "Not available"; } else if (Double.isInfinite(value)) { if (value < 0) { return "-Infinity"; } else { return "+Infinity"; } } // Handle periods if (isPeriod) { // Init long SECONDS = 1000; long MINUTES = 60 * SECONDS; long HOURS = 60 * MINUTES; long DAYS = 24 * HOURS; long WEEKS = 7 * DAYS; // Square if (isSquare) { SECONDS *= SECONDS; MINUTES *= MINUTES; HOURS *= HOURS; DAYS *= DAYS; WEEKS *= WEEKS; } // Compute final int weeks = (int) (value / WEEKS); value = value % WEEKS; final int days = (int) (value / DAYS); value = value % DAYS; final int hours = (int) (value / HOURS); value = value % HOURS; final int minutes = (int) (value / MINUTES); value = value % MINUTES; final int seconds = (int) (value / SECONDS); value = value % SECONDS; final int milliseconds = (int) (value); // Convert StringBuilder builder = new StringBuilder(); if (weeks != 0) builder.append(weeks).append(isSquare ? "w^2, " : "w, "); if (days != 0) builder.append(days).append(isSquare ? "d^2, " : "d, "); if (hours != 0) builder.append(hours).append(isSquare ? "h^2, " : "h, "); if (minutes != 0) builder.append(minutes).append(isSquare ? "m^2, " : "m, "); if (seconds != 0) builder.append(seconds).append(isSquare ? "s^2, " : "s, "); builder.append(milliseconds).append(isSquare ? "ms^2" : "ms"); // Return return builder.toString(); } // Handle data types if (type instanceof DataTypeWithRatioScale) { DataTypeWithRatioScale rType = (DataTypeWithRatioScale) type; return rType.format(rType.fromDouble(value)); } else { return String.valueOf(value); } } /** * Used for building summary statistics * @param type * @param value * @return */ @SuppressWarnings("unchecked") private <T> T toValue(DataType<T> type, double value) { // Handle corner cases if (Double.isNaN(value) || Double.isInfinite(value)) { return null; } // Handle data types Class<?> clazz = type.getDescription().getWrappedClass(); if (clazz == Long.class) { return (T) Long.valueOf((long) value); } else if (clazz == Double.class) { return (T) Double.valueOf(value); } else if (clazz == Date.class) { return (T) new Date((long) value); } else { return (T) String.valueOf(value); } } /** * Stops all computations. May lead to exceptions being thrown. Use with care. */ void interrupt() { this.interrupt = true; } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.jboss.netty.example.http.upload; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.http.cookie.Cookie; import org.jboss.netty.handler.codec.http.cookie.ServerCookieDecoder; import org.jboss.netty.handler.codec.http.cookie.ServerCookieEncoder; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.jboss.netty.handler.codec.http.QueryStringDecoder; import org.jboss.netty.handler.codec.http.multipart.Attribute; import org.jboss.netty.handler.codec.http.multipart.DefaultHttpDataFactory; import org.jboss.netty.handler.codec.http.multipart.DiskAttribute; import org.jboss.netty.handler.codec.http.multipart.DiskFileUpload; import org.jboss.netty.handler.codec.http.multipart.FileUpload; import org.jboss.netty.handler.codec.http.multipart.HttpDataFactory; import org.jboss.netty.handler.codec.http.multipart.HttpPostRequestDecoder; import org.jboss.netty.handler.codec.http.multipart.HttpPostRequestDecoder.EndOfDataDecoderException; import org.jboss.netty.handler.codec.http.multipart.HttpPostRequestDecoder.ErrorDataDecoderException; import org.jboss.netty.handler.codec.http.multipart.HttpPostRequestDecoder.NotEnoughDataDecoderException; import org.jboss.netty.handler.codec.http.multipart.InterfaceHttpData; import org.jboss.netty.handler.codec.http.multipart.InterfaceHttpData.HttpDataType; import org.jboss.netty.util.CharsetUtil; import java.io.IOException; import java.net.URI; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; public class HttpUploadServerHandler extends SimpleChannelUpstreamHandler { private static final HttpDataFactory factory = new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE); // Disk if size exceed MINSIZE static { //To limit to roughly 5MB each attribute, including fileupload //factory.setMaxLimit(5000000); DiskFileUpload.deleteOnExitTemporaryFile = true; // should delete file // on exit (in normal // exit) DiskFileUpload.baseDirectory = null; // system temp directory DiskAttribute.deleteOnExitTemporaryFile = true; // should delete file on // exit (in normal exit) DiskAttribute.baseDirectory = null; // system temp directory } private final StringBuilder responseContent = new StringBuilder(); private HttpPostRequestDecoder decoder; private HttpRequest request; private boolean readingChunks; @Override public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) { if (decoder != null) { decoder.cleanFiles(); } } @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { if (!readingChunks) { // clean previous FileUpload if Any if (decoder != null) { decoder.cleanFiles(); decoder = null; } HttpRequest request = this.request = (HttpRequest) e.getMessage(); URI uri = new URI(request.getUri()); if (!uri.getPath().startsWith("/form")) { // Write Menu writeMenu(e); return; } responseContent.setLength(0); responseContent.append("WELCOME TO THE WILD WILD WEB SERVER\r\n"); responseContent.append("===================================\r\n"); responseContent.append("VERSION: " + request.getProtocolVersion().getText() + "\r\n"); responseContent.append("REQUEST_URI: " + request.getUri() + "\r\n\r\n"); responseContent.append("\r\n\r\n"); // new method for (Entry<String, String> entry: request.headers()) { responseContent.append("HEADER: " + entry.getKey() + '=' + entry.getValue() + "\r\n"); } responseContent.append("\r\n\r\n"); // new method Set<Cookie> cookies; String value = request.headers().get(HttpHeaders.Names.COOKIE); if (value == null) { cookies = Collections.emptySet(); } else { cookies = ServerCookieDecoder.STRICT.decode(value); } for (Cookie cookie: cookies) { responseContent.append("COOKIE: " + cookie + "\r\n"); } responseContent.append("\r\n\r\n"); QueryStringDecoder decoderQuery = new QueryStringDecoder(request.getUri()); Map<String, List<String>> uriAttributes = decoderQuery.getParameters(); for (Entry<String, List<String>> attr: uriAttributes.entrySet()) { for (String attrVal: attr.getValue()) { responseContent.append("URI: " + attr.getKey() + '=' + attrVal + "\r\n"); } } responseContent.append("\r\n\r\n"); // if GET Method: should not try to create a HttpPostRequestDecoder try { decoder = new HttpPostRequestDecoder(factory, request); } catch (ErrorDataDecoderException e1) { e1.printStackTrace(); responseContent.append(e1.getMessage()); writeResponse(e.getChannel()); Channels.close(e.getChannel()); return; } responseContent.append("Is Chunked: " + request.isChunked() + "\r\n"); responseContent.append("IsMultipart: " + decoder.isMultipart() + "\r\n"); if (request.isChunked()) { // Chunk version responseContent.append("Chunks: "); readingChunks = true; } else { // Not chunk version readHttpDataAllReceive(e.getChannel()); responseContent.append("\r\n\r\nEND OF NOT CHUNKED CONTENT\r\n"); writeResponse(e.getChannel()); } } else { // New chunk is received HttpChunk chunk = (HttpChunk) e.getMessage(); try { decoder.offer(chunk); } catch (ErrorDataDecoderException e1) { e1.printStackTrace(); responseContent.append(e1.getMessage()); writeResponse(e.getChannel()); Channels.close(e.getChannel()); return; } responseContent.append('o'); // example of reading chunk by chunk (minimize memory usage due to Factory) readHttpDataChunkByChunk(); // example of reading only if at the end if (chunk.isLast()) { readHttpDataAllReceive(e.getChannel()); writeResponse(e.getChannel()); readingChunks = false; } } } /** * Example of reading all InterfaceHttpData from finished transfer */ private void readHttpDataAllReceive(Channel channel) { List<InterfaceHttpData> datas; try { datas = decoder.getBodyHttpDatas(); } catch (NotEnoughDataDecoderException e1) { // Should not be! e1.printStackTrace(); responseContent.append(e1.getMessage()); writeResponse(channel); Channels.close(channel); return; } for (InterfaceHttpData data: datas) { writeHttpData(data); } responseContent.append("\r\n\r\nEND OF CONTENT AT FINAL END\r\n"); } /** * Example of reading request by chunk and getting values from chunk to * chunk */ private void readHttpDataChunkByChunk() { try { while (decoder.hasNext()) { InterfaceHttpData data = decoder.next(); if (data != null) { // new value writeHttpData(data); } } } catch (EndOfDataDecoderException e1) { // end responseContent.append("\r\n\r\nEND OF CONTENT CHUNK BY CHUNK\r\n\r\n"); } } private void writeHttpData(InterfaceHttpData data) { if (data.getHttpDataType() == HttpDataType.Attribute) { Attribute attribute = (Attribute) data; String value; try { value = attribute.getValue(); } catch (IOException e1) { // Error while reading data from File, only print name and error e1.printStackTrace(); responseContent.append("\r\nBODY Attribute: " + attribute.getHttpDataType().name() + ": " + attribute.getName() + " Error while reading value: " + e1.getMessage() + "\r\n"); return; } if (value.length() > 100) { responseContent.append("\r\nBODY Attribute: " + attribute.getHttpDataType().name() + ": " + attribute.getName() + " data too long\r\n"); } else { responseContent.append( "\r\nBODY Attribute: " + attribute.getHttpDataType().name() + ": " + attribute + "\r\n"); } } else { responseContent.append( "\r\nBODY FileUpload: " + data.getHttpDataType().name() + ": " + data + "\r\n"); if (data.getHttpDataType() == HttpDataType.FileUpload) { FileUpload fileUpload = (FileUpload) data; if (fileUpload.isCompleted()) { if (fileUpload.length() < 10000) { responseContent.append("\tContent of file\r\n"); try { responseContent.append(fileUpload.getString(fileUpload.getCharset())); } catch (IOException e1) { // do nothing for the example e1.printStackTrace(); } responseContent.append("\r\n"); } else { responseContent.append( "\tFile too long to be printed out:" + fileUpload.length() + "\r\n"); } // fileUpload.isInMemory();// tells if the file is in Memory // or on File // fileUpload.renameTo(dest); // enable to move into another // File dest // decoder.removeFileUploadFromClean(fileUpload); //remove // the File of to delete file } else { responseContent.append("\tFile to be continued but should not!\r\n"); } } } } private void writeResponse(Channel channel) { // Convert the response content to a ChannelBuffer. ChannelBuffer buf = ChannelBuffers.copiedBuffer(responseContent.toString(), CharsetUtil.UTF_8); responseContent.setLength(0); // Decide whether to close the connection or not. boolean close = HttpHeaders.Values.CLOSE.equalsIgnoreCase( request.headers().get(HttpHeaders.Names.CONNECTION)) || request.getProtocolVersion().equals(HttpVersion.HTTP_1_0) && !HttpHeaders.Values.KEEP_ALIVE.equalsIgnoreCase(request.headers().get(HttpHeaders.Names.CONNECTION)); // Build the response object. HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); response.setContent(buf); response.headers().set(HttpHeaders.Names.CONTENT_TYPE, "text/plain; charset=UTF-8"); if (!close) { // There's no need to add 'Content-Length' header // if this is the last response. response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(buf.readableBytes())); } Set<Cookie> cookies; String value = request.headers().get(HttpHeaders.Names.COOKIE); if (value == null) { cookies = Collections.emptySet(); } else { cookies = ServerCookieDecoder.STRICT.decode(value); } if (!cookies.isEmpty()) { response.headers().add(HttpHeaders.Names.SET_COOKIE, ServerCookieEncoder.STRICT.encode(cookies)); } // Write the response. ChannelFuture future = channel.write(response); // Close the connection after the write operation is done if necessary. if (close) { future.addListener(ChannelFutureListener.CLOSE); } } private void writeMenu(MessageEvent e) { // print several HTML forms // Convert the response content to a ChannelBuffer. responseContent.setLength(0); // create Pseudo Menu responseContent.append("<html>"); responseContent.append("<head>"); responseContent.append("<title>Netty Test Form</title>\r\n"); responseContent.append("</head>\r\n"); responseContent.append("<body bgcolor=white><style>td{font-size: 12pt;}</style>"); responseContent.append("<table border=\"0\">"); responseContent.append("<tr>"); responseContent.append("<td>"); responseContent.append("<h1>Netty Test Form</h1>"); responseContent.append("Choose one FORM"); responseContent.append("</td>"); responseContent.append("</tr>"); responseContent.append("</table>\r\n"); // GET responseContent.append("<CENTER>GET FORM<HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); responseContent.append("<FORM ACTION=\"/formget\" METHOD=\"GET\">"); responseContent.append("<input type=hidden name=getform value=\"GET\">"); responseContent.append("<table border=\"0\">"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"info\" size=10></td></tr>"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"secondinfo\" size=20>"); responseContent.append("<tr><td>Fill with value:<br> <textarea name=\"thirdinfo\" cols=40 rows=10></textarea>"); responseContent.append("</td></tr>"); responseContent.append("<tr><td><INPUT TYPE=\"submit\" NAME=\"Send\" VALUE=\"Send\"></INPUT></td>"); responseContent.append("<td><INPUT TYPE=\"reset\" NAME=\"Clear\" VALUE=\"Clear\" ></INPUT></td></tr>"); responseContent.append("</table></FORM>\r\n"); responseContent.append("<CENTER><HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); // POST responseContent.append("<CENTER>POST FORM<HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); responseContent.append("<FORM ACTION=\"/formpost\" METHOD=\"POST\">"); responseContent.append("<input type=hidden name=getform value=\"POST\">"); responseContent.append("<table border=\"0\">"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"info\" size=10></td></tr>"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"secondinfo\" size=20>"); responseContent.append("<tr><td>Fill with value:<br> <textarea name=\"thirdinfo\" cols=40 rows=10></textarea>"); responseContent.append("<tr><td>Fill with file (only file name will be transmitted): <br> "); responseContent.append("<input type=file name=\"myfile\">"); responseContent.append("</td></tr>"); responseContent.append("<tr><td><INPUT TYPE=\"submit\" NAME=\"Send\" VALUE=\"Send\"></INPUT></td>"); responseContent.append("<td><INPUT TYPE=\"reset\" NAME=\"Clear\" VALUE=\"Clear\" ></INPUT></td></tr>"); responseContent.append("</table></FORM>\r\n"); responseContent.append("<CENTER><HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); // POST with enctype="multipart/form-data" responseContent.append("<CENTER>POST MULTIPART FORM<HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); responseContent.append("<FORM ACTION=\"/formpostmultipart\" ENCTYPE=\"multipart/form-data\" METHOD=\"POST\">"); responseContent.append("<input type=hidden name=getform value=\"POST\">"); responseContent.append("<table border=\"0\">"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"info\" size=10></td></tr>"); responseContent.append("<tr><td>Fill with value:<br> <input type=text name=\"secondinfo\" size=20>"); responseContent.append("<tr><td>Fill with value:<br> <textarea name=\"thirdinfo\" cols=40 rows=10></textarea>"); responseContent.append("<tr><td>Fill with file: <br> <input type=file name=\"myfile\">"); responseContent.append("</td></tr>"); responseContent.append("<tr><td><INPUT TYPE=\"submit\" NAME=\"Send\" VALUE=\"Send\"></INPUT></td>"); responseContent.append("<td><INPUT TYPE=\"reset\" NAME=\"Clear\" VALUE=\"Clear\" ></INPUT></td></tr>"); responseContent.append("</table></FORM>\r\n"); responseContent.append("<CENTER><HR WIDTH=\"75%\" NOSHADE color=\"blue\"></CENTER>"); responseContent.append("</body>"); responseContent.append("</html>"); ChannelBuffer buf = ChannelBuffers.copiedBuffer(responseContent.toString(), CharsetUtil.UTF_8); // Build the response object. HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); response.setContent(buf); response.headers().set(HttpHeaders.Names.CONTENT_TYPE, "text/html; charset=UTF-8"); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(buf.readableBytes())); // Write the response. e.getChannel().write(response); } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { e.getCause().printStackTrace(); e.getChannel().close(); } }
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.afwsamples.testdpc.provision; import static android.app.admin.DevicePolicyManager.EXTRA_PROVISIONING_ADMIN_EXTRAS_BUNDLE; import static android.app.admin.DevicePolicyManager.PERMISSION_GRANT_STATE_GRANTED; import static com.afwsamples.testdpc.DeviceAdminReceiver.getComponentName; import android.accounts.Account; import android.accounts.AccountManager; import android.annotation.TargetApi; import android.app.admin.DevicePolicyManager; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PermissionInfo; import android.os.Build.VERSION_CODES; import android.os.PersistableBundle; import android.util.Log; import com.afwsamples.testdpc.AddAccountActivity; import com.afwsamples.testdpc.FinalizeActivity; import com.afwsamples.testdpc.common.LaunchIntentUtil; import com.afwsamples.testdpc.common.Util; import com.afwsamples.testdpc.cosu.EnableCosuActivity; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Task executed after provisioning is done indicated by either the * {@link DevicePolicyManager#ACTION_PROVISIONING_SUCCESSFUL} activity intent or the * {@link android.app.admin.DeviceAdminReceiver#onProfileProvisioningComplete(Context, Intent)} * broadcast. * * <p>Operations performed: * <ul> * <li>self-grant all run-time permissions</li> * <li>enable the launcher activity</li> * <li>start waiting for first account ready broadcast</li> * </ul> */ public class PostProvisioningTask { private static final String TAG = "PostProvisioningTask"; private static final String SETUP_MANAGEMENT_LAUNCH_ACTIVITY = "com.afwsamples.testdpc.SetupManagementLaunchActivity"; private static final String POST_PROV_PREFS = "post_prov_prefs"; private static final String KEY_POST_PROV_DONE = "key_post_prov_done"; private final Context mContext; private final DevicePolicyManager mDevicePolicyManager; private final SharedPreferences mSharedPrefs; public PostProvisioningTask(Context context) { mContext = context; mDevicePolicyManager = (DevicePolicyManager) context.getSystemService(Context.DEVICE_POLICY_SERVICE); mSharedPrefs = context.getSharedPreferences(POST_PROV_PREFS, Context.MODE_PRIVATE); } public boolean performPostProvisioningOperations(Intent intent) { if (isPostProvisioningDone()) { return false; } markPostProvisioningDone(); // From M onwards, permissions are not auto-granted, so we need to manually grant // permissions for TestDPC. if (Util.SDK_INT >= VERSION_CODES.M) { autoGrantRequestedPermissionsToSelf(); } // Retreive the admin extras bundle, which we can use to determine the original context for // TestDPCs launch. PersistableBundle extras = intent.getParcelableExtra( EXTRA_PROVISIONING_ADMIN_EXTRAS_BUNDLE); if (Util.SDK_INT >= VERSION_CODES.O) { maybeSetAffiliationIds(extras); } // Hide the setup launcher when this app is the admin mContext.getPackageManager().setComponentEnabledSetting( new ComponentName(mContext, SETUP_MANAGEMENT_LAUNCH_ACTIVITY), PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP); return true; } public Intent getPostProvisioningLaunchIntent(Intent intent) { // Enable the profile after provisioning is complete. Intent launch; // Retreive the admin extras bundle, which we can use to determine the original context for // TestDPCs launch. PersistableBundle extras = intent.getParcelableExtra( EXTRA_PROVISIONING_ADMIN_EXTRAS_BUNDLE); String packageName = mContext.getPackageName(); boolean synchronousAuthLaunch = LaunchIntentUtil.isSynchronousAuthLaunch(extras); boolean cosuLaunch = LaunchIntentUtil.isCosuLaunch(extras); boolean isProfileOwner = mDevicePolicyManager.isProfileOwnerApp(packageName); boolean isDeviceOwner = mDevicePolicyManager.isDeviceOwnerApp(packageName); // Drop out quickly if we're neither profile or device owner. if (!isProfileOwner && !isDeviceOwner) { return null; } if (cosuLaunch) { launch = new Intent(mContext, EnableCosuActivity.class); launch.putExtra(EXTRA_PROVISIONING_ADMIN_EXTRAS_BUNDLE, extras); } else { launch = new Intent(mContext, FinalizeActivity.class); } if (synchronousAuthLaunch) { String accountName = LaunchIntentUtil.getAddedAccountName(extras); if (accountName != null) { launch.putExtra(LaunchIntentUtil.EXTRA_ACCOUNT_NAME, accountName); } } // For synchronous auth cases, we can assume accounts are already setup (or will be shortly, // as account migration for Profile Owner is asynchronous). For COSU we don't want to show // the account option to the user, as no accounts should be added for now. // In other cases, offer to add an account to the newly configured device/profile. if (!synchronousAuthLaunch && !cosuLaunch) { AccountManager accountManager = AccountManager.get(mContext); Account[] accounts = accountManager.getAccounts(); if (accounts != null && accounts.length == 0) { // Add account after provisioning is complete. Intent addAccountIntent = new Intent(mContext, AddAccountActivity.class); addAccountIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); addAccountIntent.putExtra(AddAccountActivity.EXTRA_NEXT_ACTIVITY_INTENT, launch); return addAccountIntent; } } launch.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); return launch; } private void markPostProvisioningDone() { mSharedPrefs.edit().putBoolean(KEY_POST_PROV_DONE, true).commit(); } private boolean isPostProvisioningDone() { return mSharedPrefs.getBoolean(KEY_POST_PROV_DONE, false); } @TargetApi(VERSION_CODES.O) private void maybeSetAffiliationIds(PersistableBundle extras) { if (extras == null) { return; } String affiliationId = extras.getString(LaunchIntentUtil.EXTRA_AFFILIATION_ID); if (affiliationId != null) { mDevicePolicyManager.setAffiliationIds( getComponentName(mContext), Collections.singleton(affiliationId)); } } @TargetApi(VERSION_CODES.M) private void autoGrantRequestedPermissionsToSelf() { String packageName = mContext.getPackageName(); ComponentName adminComponentName = getComponentName(mContext); List<String> permissions = getRuntimePermissions(mContext.getPackageManager(), packageName); for (String permission : permissions) { boolean success = mDevicePolicyManager.setPermissionGrantState(adminComponentName, packageName, permission, PERMISSION_GRANT_STATE_GRANTED); Log.d(TAG, "Auto-granting " + permission + ", success: " + success); if (!success) { Log.e(TAG, "Failed to auto grant permission to self: " + permission); } } } private List<String> getRuntimePermissions(PackageManager packageManager, String packageName) { List<String> permissions = new ArrayList<>(); PackageInfo packageInfo; try { packageInfo = packageManager.getPackageInfo(packageName, PackageManager.GET_PERMISSIONS); } catch (PackageManager.NameNotFoundException e) { Log.e(TAG, "Could not retrieve info about the package: " + packageName, e); return permissions; } if (packageInfo != null && packageInfo.requestedPermissions != null) { for (String requestedPerm : packageInfo.requestedPermissions) { if (isRuntimePermission(packageManager, requestedPerm)) { permissions.add(requestedPerm); } } } return permissions; } private boolean isRuntimePermission(PackageManager packageManager, String permission) { try { PermissionInfo pInfo = packageManager.getPermissionInfo(permission, 0); if (pInfo != null) { if ((pInfo.protectionLevel & PermissionInfo.PROTECTION_MASK_BASE) == PermissionInfo.PROTECTION_DANGEROUS) { return true; } } } catch (PackageManager.NameNotFoundException e) { Log.i(TAG, "Could not retrieve info about the permission: " + permission); } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.api.activation; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Arrays; import org.apache.polygene.api.injection.scope.Structure; import org.apache.polygene.api.mixin.Mixins; import org.apache.polygene.api.service.ServiceReference; import org.apache.polygene.api.structure.Application; import org.apache.polygene.api.structure.Layer; import org.apache.polygene.api.structure.Module; import org.apache.polygene.bootstrap.AssemblyException; import org.apache.polygene.bootstrap.builder.ApplicationBuilder; import org.junit.Test; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.StringContains.containsString; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class PassivationExceptionTest { private static String stack( Exception ex ) { StringWriter writer = new StringWriter(); ex.printStackTrace( new PrintWriter( writer ) ); return writer.toString(); } @Test public void testEmptyPassivationException() { PassivationException empty = new PassivationException( emptyList() ); assertThat( empty.getMessage(), containsString( "has 0 cause" ) ); } @Test public void testSinglePassivationException() { PassivationException single = new PassivationException( singletonList( new Exception( "single" ) ) ); String stack = stack( single ); assertThat( single.getMessage(), containsString( "has 1 cause" ) ); assertThat( stack, containsString( "Suppressed: java.lang.Exception: single" ) ); } @Test public void testMultiplePassivationException() { PassivationException multi = new PassivationException( Arrays.asList( new Exception( "one" ), new Exception( "two" ), new Exception( "three" ) ) ); String stack = stack( multi ); assertThat( multi.getMessage(), containsString( "has 3 cause(s)" ) ); assertThat( stack, containsString( "Suppressed: java.lang.Exception: one" ) ); assertThat( stack, containsString( "Suppressed: java.lang.Exception: two" ) ); assertThat( stack, containsString( "Suppressed: java.lang.Exception: three" ) ); } @Test public void testPassivationExceptionsAccrossStructure() throws AssemblyException, ActivationException { ApplicationBuilder appBuilder = new ApplicationBuilder( "TestApplication" ); appBuilder.withLayer( "Layer 1" ).withModule( "Module A" ).withAssembler( module -> module.services( TestService.class ) .identifiedBy( "TestService_Module.A" ) .withActivators( FailBeforePassivationServiceActivator.class ) .instantiateOnStartup() ); appBuilder.withLayer( "Layer 2" ).withModule( "Module B" ).withAssembler( module -> module.services( TestService.class ) .identifiedBy( "TestService_Module.B" ) .withActivators( FailAfterPassivationServiceActivator.class ) .instantiateOnStartup() ); appBuilder.registerActivationEventListener( new TestActivationEventListener() ); Application app = appBuilder.newApplication(); try { Module moduleA = app.findModule( "Layer 1", "Module A" ); TestService service = moduleA.findService( TestService.class ).get(); assertThat( service.hello(), equalTo( "Hello Polygene!" ) ); } finally { try { app.passivate(); fail( "No PassivationException" ); } catch( PassivationException ex ) { ex.printStackTrace(); String stack = stack( ex ); assertThat( ex.getMessage(), containsString( "has 12 cause(s)" ) ); assertThat( stack, containsString( "EVENT: FAIL BEFORE PASSIVATION for TestApplication" ) ); assertThat( stack, containsString( "EVENT: FAIL BEFORE PASSIVATION for Layer 2" ) ); assertThat( stack, containsString( "EVENT: FAIL BEFORE PASSIVATION for Module B" ) ); assertThat( stack, containsString( "ACTIVATOR: FAIL AFTER PASSIVATION for TestService_Module.B(active=false,module='Module B')" ) ); assertThat( stack, containsString( "EVENT: FAIL AFTER PASSIVATION for Module B" ) ); assertThat( stack, containsString( "EVENT: FAIL AFTER PASSIVATION for Layer 2" ) ); assertThat( stack, containsString( "EVENT: FAIL BEFORE PASSIVATION for Layer 1" ) ); assertThat( stack, containsString( "EVENT: FAIL BEFORE PASSIVATION for Module A" ) ); assertThat( stack, containsString( "ACTIVATOR: FAIL BEFORE PASSIVATION for TestService_Module.A(active=true,module='Module A')" ) ); assertThat( stack, containsString( "EVENT: FAIL AFTER PASSIVATION for Module A" ) ); assertThat( stack, containsString( "EVENT: FAIL AFTER PASSIVATION for Layer 1" ) ); assertThat( stack, containsString( "EVENT: FAIL AFTER PASSIVATION for TestApplication" ) ); } } } @Mixins( TestService.Mixin.class ) public interface TestService { String hello(); class Mixin implements TestService { @Structure private Module module; @Override public String hello() { module.name(); return "Hello Polygene!"; } } } public static class FailBeforePassivationServiceActivator extends ActivatorAdapter<ServiceReference<TestService>> { @Override public void beforePassivation( ServiceReference<TestService> passivated ) throws Exception { throw new Exception( "ACTIVATOR: FAIL BEFORE PASSIVATION for " + passivated ); } } public static class FailAfterPassivationServiceActivator extends ActivatorAdapter<ServiceReference<TestService>> { @Override public void afterPassivation( ServiceReference<TestService> passivated ) throws Exception { throw new Exception( "ACTIVATOR: FAIL AFTER PASSIVATION for " + passivated ); } } public static class TestActivationEventListener implements ActivationEventListener { @Override public void onEvent( ActivationEvent event ) throws Exception { if( !( event.source() instanceof Application ) && !( event.source() instanceof Layer ) && !( event.source() instanceof Module ) ) { return; } switch( event.type() ) { case PASSIVATING: throw new Exception( "EVENT: FAIL BEFORE PASSIVATION for " + event.source() ); case PASSIVATED: throw new Exception( "EVENT: FAIL AFTER PASSIVATION for " + event.source() ); } } } }
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.sso.saml.dto; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil; import java.io.Serializable; public class SAMLSSOServiceProviderDTO implements Serializable { private static final long serialVersionUID = -7633935958583257097L; private String issuer; private String[] assertionConsumerUrls; private String defaultAssertionConsumerUrl; private String assertionConsumerUrl; private String certAlias; private String sloResponseURL; private String sloRequestURL; private String loginPageURL; private String attributeConsumingServiceIndex; private boolean doSingleLogout; private boolean doSignAssertions; private boolean doSignResponse; private String[] requestedClaims; private String[] requestedAudiences; private String[] requestedRecipients; private boolean enableAttributeProfile; private boolean enableAttributesByDefault; private String nameIdClaimUri; private String nameIDFormat; private boolean idPInitSSOEnabled; private boolean idPInitSLOEnabled; private String[] idpInitSLOReturnToURLs; private boolean doEnableEncryptedAssertion; private boolean doValidateSignatureInRequests; private String signingAlgorithmURI; private String digestAlgorithmURI; public SAMLSSOServiceProviderDTO() { signingAlgorithmURI = IdentityApplicationManagementUtil.getSigningAlgoURIByConfig(); digestAlgorithmURI = IdentityApplicationManagementUtil.getDigestAlgoURIByConfig(); } public String getSigningAlgorithmURI() { return signingAlgorithmURI; } public void setSigningAlgorithmURI(String signingAlgorithmURI) { if (StringUtils.isNotBlank(signingAlgorithmURI)) { this.signingAlgorithmURI = signingAlgorithmURI; } } public String getDigestAlgorithmURI() { return digestAlgorithmURI; } public void setDigestAlgorithmURI(String digestAlgorithmURI) { if (StringUtils.isNotBlank(digestAlgorithmURI)) { this.digestAlgorithmURI = digestAlgorithmURI; } } public String getNameIDFormat() { return nameIDFormat; } public void setNameIDFormat(String nameIDFormat) { this.nameIDFormat = nameIDFormat; } public String getNameIdClaimUri() { return nameIdClaimUri; } public void setNameIdClaimUri(String nameIdClaimUri) { this.nameIdClaimUri = nameIdClaimUri; } public boolean isEnableAttributeProfile() { return enableAttributeProfile; } public void setEnableAttributeProfile(boolean enableAttributeProfile) { this.enableAttributeProfile = enableAttributeProfile; } public boolean isEnableAttributesByDefault() { return enableAttributesByDefault; } public void setEnableAttributesByDefault(boolean enableAttributesByDefault) { this.enableAttributesByDefault = enableAttributesByDefault; } public String getIssuer() { return issuer; } public void setIssuer(String issuer) { this.issuer = issuer; } public String getAssertionConsumerUrl() { return assertionConsumerUrl; } public void setAssertionConsumerUrl(String assertionConsumerUrl) { this.assertionConsumerUrl = assertionConsumerUrl; } public String getCertAlias() { return certAlias; } public void setCertAlias(String certAlias) { this.certAlias = certAlias; } public boolean isDoSingleLogout() { return doSingleLogout; } public void setDoSingleLogout(boolean doSingleLogout) { this.doSingleLogout = doSingleLogout; } public String getSloResponseURL() { return sloResponseURL; } public void setSloResponseURL(String logoutURL) { this.sloResponseURL = logoutURL; } public String getLoginPageURL() { return loginPageURL; } public void setLoginPageURL(String loginPageURL) { this.loginPageURL = loginPageURL; } /** * @return */ public boolean isDoSignAssertions() { return doSignAssertions; } /** * @param doSignAssertions */ public void setDoSignAssertions(boolean doSignAssertions) { this.doSignAssertions = doSignAssertions; } public String getAttributeConsumingServiceIndex() { return attributeConsumingServiceIndex; } public void setAttributeConsumingServiceIndex(String attributeConsumingServiceIndex) { this.attributeConsumingServiceIndex = attributeConsumingServiceIndex; } /** * @return the requestedClaims */ public String[] getRequestedClaims() { if (requestedClaims == null) { return new String[0]; } return requestedClaims.clone(); } /** * @param requestedClaims the requestedClaims to set */ public void setRequestedClaims(String[] requestedClaims) { if (requestedClaims == null) { requestedClaims = new String[0]; } this.requestedClaims = requestedClaims.clone(); } /** * @return the requestedAudiences */ public String[] getRequestedAudiences() { if (requestedAudiences == null) { return new String[0]; } return requestedAudiences.clone(); } /** * @param requestedAudiences the requestedAudiences to set */ public void setRequestedAudiences(String[] requestedAudiences) { if (requestedAudiences == null) { requestedAudiences = new String[0]; } this.requestedAudiences = requestedAudiences.clone(); } /** * @return the requestedRecipients */ public String[] getRequestedRecipients() { if (requestedRecipients == null) { return new String[0]; } return requestedRecipients.clone(); } /** * @param requestedRecipients the requestedRecipients to set */ public void setRequestedRecipients(String[] requestedRecipients) { if (requestedRecipients == null) { requestedRecipients = new String[0]; } this.requestedRecipients = requestedRecipients; } /** * @return the doSignResponse */ public boolean isDoSignResponse() { return doSignResponse; } /** * @param doSignResponse the doSignResponse to set */ public void setDoSignResponse(boolean doSignResponse) { this.doSignResponse = doSignResponse; } public boolean isIdPInitSSOEnabled() { return idPInitSSOEnabled; } public void setIdPInitSSOEnabled(boolean idPInitSSOEnabled) { this.idPInitSSOEnabled = idPInitSSOEnabled; } public boolean isDoEnableEncryptedAssertion() { return doEnableEncryptedAssertion; } public void setDoEnableEncryptedAssertion(boolean doEnableEncryptedAssertion) { this.doEnableEncryptedAssertion = doEnableEncryptedAssertion; } public boolean isDoValidateSignatureInRequests() { return doValidateSignatureInRequests; } public void setDoValidateSignatureInRequests(boolean doValidateSignatureInRequests) { this.doValidateSignatureInRequests = doValidateSignatureInRequests; } public String[] getAssertionConsumerUrls() { if (assertionConsumerUrls == null) { return new String[0]; } return assertionConsumerUrls.clone(); } public void setAssertionConsumerUrls(String[] assertionConsumerUrls) { this.assertionConsumerUrls = assertionConsumerUrls; } public String getDefaultAssertionConsumerUrl() { return defaultAssertionConsumerUrl; } public void setDefaultAssertionConsumerUrl(String defaultAssertionConsumerUrl) { this.defaultAssertionConsumerUrl = defaultAssertionConsumerUrl; } public String getSloRequestURL() { return sloRequestURL; } public void setSloRequestURL(String sloRequestURL) { this.sloRequestURL = sloRequestURL; } public boolean isIdPInitSLOEnabled() { return idPInitSLOEnabled; } public void setIdPInitSLOEnabled(boolean idPInitSLOEnabled) { this.idPInitSLOEnabled = idPInitSLOEnabled; } public String[] getIdpInitSLOReturnToURLs() { if (idpInitSLOReturnToURLs == null) { return ArrayUtils.EMPTY_STRING_ARRAY; } return idpInitSLOReturnToURLs.clone(); } public void setIdpInitSLOReturnToURLs(String[] idpInitSLOReturnToURLs) { if(idpInitSLOReturnToURLs != null) { this.idpInitSLOReturnToURLs = idpInitSLOReturnToURLs.clone(); } else { this.idpInitSLOReturnToURLs = null; } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.lookup.namespace.cache; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.Execs; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.lookup.namespace.CacheGenerator; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespaceTest; import io.druid.server.lookup.namespace.NamespaceExtractionConfig; import io.druid.server.metrics.NoopServiceEmitter; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import javax.annotation.Nullable; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * */ @RunWith(Parameterized.class) public class CacheSchedulerTest { public static final Function<Lifecycle, NamespaceExtractionCacheManager> CREATE_ON_HEAP_CACHE_MANAGER = new Function<Lifecycle, NamespaceExtractionCacheManager>() { @Nullable @Override public NamespaceExtractionCacheManager apply(@Nullable Lifecycle lifecycle) { return new OnHeapNamespaceExtractionCacheManager( lifecycle, new NoopServiceEmitter(), new NamespaceExtractionConfig() ); } }; public static final Function<Lifecycle, NamespaceExtractionCacheManager> CREATE_OFF_HEAP_CACHE_MANAGER = new Function<Lifecycle, NamespaceExtractionCacheManager>() { @Nullable @Override public NamespaceExtractionCacheManager apply(@Nullable Lifecycle lifecycle) { return new OffHeapNamespaceExtractionCacheManager( lifecycle, new NoopServiceEmitter(), new NamespaceExtractionConfig() ); } }; @Parameterized.Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][]{{CREATE_ON_HEAP_CACHE_MANAGER}}); } public static void waitFor(CacheScheduler.Entry entry) throws InterruptedException { entry.awaitTotalUpdates(1); } private static final String KEY = "foo"; private static final String VALUE = "bar"; @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); private final Function<Lifecycle, NamespaceExtractionCacheManager> createCacheManager; private Lifecycle lifecycle; private NamespaceExtractionCacheManager cacheManager; private CacheScheduler scheduler; private File tmpFile; public CacheSchedulerTest( Function<Lifecycle, NamespaceExtractionCacheManager> createCacheManager ) { this.createCacheManager = createCacheManager; } @Before public void setUp() throws Exception { lifecycle = new Lifecycle(); lifecycle.start(); cacheManager = createCacheManager.apply(lifecycle); final Path tmpDir = temporaryFolder.newFolder().toPath(); final CacheGenerator<UriExtractionNamespace> cacheGenerator = new CacheGenerator<UriExtractionNamespace>() { @Override public CacheScheduler.VersionedCache generateCache( final UriExtractionNamespace extractionNamespace, final CacheScheduler.EntryImpl<UriExtractionNamespace> id, final String lastVersion, final CacheScheduler scheduler ) throws InterruptedException { Thread.sleep(2); // To make absolutely sure there is a unique currentTimeMillis String version = Long.toString(System.currentTimeMillis()); CacheScheduler.VersionedCache versionedCache = scheduler.createVersionedCache(id, version); // Don't actually read off disk because TravisCI doesn't like that versionedCache.getCache().put(KEY, VALUE); return versionedCache; } }; scheduler = new CacheScheduler( new NoopServiceEmitter(), ImmutableMap.<Class<? extends ExtractionNamespace>, CacheGenerator<?>>of( UriExtractionNamespace.class, cacheGenerator ), cacheManager ); tmpFile = Files.createTempFile(tmpDir, "druidTestURIExtractionNS", ".dat").toFile(); try (OutputStream ostream = new FileOutputStream(tmpFile)) { try (OutputStreamWriter out = new OutputStreamWriter(ostream, StandardCharsets.UTF_8)) { // Since Travis sucks with disk related stuff, we override the disk reading part above. // This is safe and should shake out any problem areas that accidentally read the file. out.write("SHOULDN'T TRY TO PARSE"); out.flush(); } } } @After public void tearDown() { lifecycle.stop(); } @Test(timeout = 10_000) public void testSimpleSubmission() throws InterruptedException { UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), null, null, new UriExtractionNamespace.ObjectMapperFlatDataParser( UriExtractionNamespaceTest.registerTypes(new ObjectMapper()) ), new Period(0), null ); CacheScheduler.Entry entry = scheduler.schedule(namespace); waitFor(entry); Map<String, String> cache = entry.getCache(); Assert.assertNull(cache.put("key", "val")); Assert.assertEquals("val", cache.get("key")); } @Test(timeout = 10_000) public void testPeriodicUpdatesScheduled() throws InterruptedException { final int repeatCount = 5; final long delay = 5; try { final UriExtractionNamespace namespace = getUriExtractionNamespace(delay); final long start = System.currentTimeMillis(); try (CacheScheduler.Entry entry = scheduler.schedule(namespace)) { Assert.assertFalse(entry.getUpdaterFuture().isDone()); Assert.assertFalse(entry.getUpdaterFuture().isCancelled()); entry.awaitTotalUpdates(repeatCount); long minEnd = start + ((repeatCount - 1) * delay); long end = System.currentTimeMillis(); Assert.assertTrue( StringUtils.format( "Didn't wait long enough between runs. Expected more than %d was %d", minEnd - start, end - start ), minEnd <= end ); } } finally { lifecycle.stop(); cacheManager.waitForServiceToEnd(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } checkNoMoreRunning(); } @Test(timeout = 10_000) // This is very fast when run locally. Speed on Travis completely depends on noisy neighbors. public void testConcurrentAddDelete() throws InterruptedException { final int threads = 10; final int deletesPerThread = 5; ListeningExecutorService executorService = MoreExecutors.listeningDecorator( Execs.multiThreaded( threads, "concurrentTestingPool-%s" ) ); final CountDownLatch latch = new CountDownLatch(threads); Collection<ListenableFuture<?>> futures = new ArrayList<>(); for (int i = 0; i < threads; ++i) { futures.add( executorService.submit( new Runnable() { @Override public void run() { try { latch.countDown(); if (!latch.await(5, TimeUnit.SECONDS)) { throw new RuntimeException(new TimeoutException("Took too long to wait for more tasks")); } for (int j = 0; j < deletesPerThread; ++j) { try { testDelete(); } catch (Exception e) { throw Throwables.propagate(e); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Throwables.propagate(e); } } } ) ); } // Create an all-encompassing exception if any of them failed final Collection<Exception> exceptions = new ArrayList<>(); try { for (ListenableFuture<?> future : futures) { try { future.get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw e; } catch (Exception e) { exceptions.add(e); } } if (!exceptions.isEmpty()) { final RuntimeException e = new RuntimeException("Futures failed"); for (Exception ex : exceptions) { e.addSuppressed(ex); } } } finally { executorService.shutdown(); executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } checkNoMoreRunning(); } @Test(timeout = 10_000L) public void testSimpleDelete() throws InterruptedException { testDelete(); } public void testDelete() throws InterruptedException { final long period = 1_000L; // Give it some time between attempts to update final UriExtractionNamespace namespace = getUriExtractionNamespace(period); CacheScheduler.Entry entry = scheduler.scheduleAndWait(namespace, 10_000); Assert.assertNotNull(entry); final Future<?> future = entry.getUpdaterFuture(); Assert.assertFalse(future.isCancelled()); Assert.assertFalse(future.isDone()); entry.awaitTotalUpdates(1); Assert.assertEquals(VALUE, entry.getCache().get(KEY)); entry.close(); try { Assert.assertNull(future.get()); } catch (CancellationException e) { // Ignore } catch (ExecutionException e) { if (!future.isCancelled()) { throw Throwables.propagate(e); } } Assert.assertTrue(future.isCancelled()); Assert.assertTrue(future.isDone()); } private UriExtractionNamespace getUriExtractionNamespace(long period) { return new UriExtractionNamespace( tmpFile.toURI(), null, null, new UriExtractionNamespace.ObjectMapperFlatDataParser( UriExtractionNamespaceTest.registerTypes(new ObjectMapper()) ), new Period(period), null ); } @Test(timeout = 10_000) public void testShutdown() throws InterruptedException { final long period = 5L; try { final UriExtractionNamespace namespace = getUriExtractionNamespace(period); try (CacheScheduler.Entry entry = scheduler.schedule(namespace)) { final Future<?> future = entry.getUpdaterFuture(); entry.awaitNextUpdates(1); Assert.assertFalse(future.isCancelled()); Assert.assertFalse(future.isDone()); final long prior = scheduler.updatesStarted(); entry.awaitNextUpdates(1); Assert.assertTrue(scheduler.updatesStarted() > prior); } } finally { lifecycle.stop(); } while (!cacheManager.waitForServiceToEnd(1_000, TimeUnit.MILLISECONDS)) { // keep waiting } checkNoMoreRunning(); Assert.assertTrue(cacheManager.scheduledExecutorService().isShutdown()); Assert.assertTrue(cacheManager.scheduledExecutorService().isTerminated()); } @Test(timeout = 10_000) public void testRunCount() throws InterruptedException { final int numWaits = 5; try { final UriExtractionNamespace namespace = getUriExtractionNamespace((long) 5); try (CacheScheduler.Entry entry = scheduler.schedule(namespace)) { final Future<?> future = entry.getUpdaterFuture(); entry.awaitNextUpdates(numWaits); Assert.assertFalse(future.isDone()); } } finally { lifecycle.stop(); } while (!cacheManager.waitForServiceToEnd(1_000, TimeUnit.MILLISECONDS)) { // keep waiting } Assert.assertTrue(scheduler.updatesStarted() >= numWaits); checkNoMoreRunning(); } /** * Tests that even if entry.close() wasn't called, the scheduled task is cancelled when the entry becomes * unreachable. */ @Test(timeout = 60_000) public void testEntryCloseForgotten() throws InterruptedException { scheduleDanglingEntry(); Assert.assertEquals(1, scheduler.getActiveEntries()); while (scheduler.getActiveEntries() > 0) { System.gc(); Thread.sleep(1000); } Assert.assertEquals(0, scheduler.getActiveEntries()); } private void scheduleDanglingEntry() throws InterruptedException { CacheScheduler.Entry entry = scheduler.schedule(getUriExtractionNamespace(5)); entry.awaitTotalUpdates(1); } private void checkNoMoreRunning() throws InterruptedException { Assert.assertEquals(0, scheduler.getActiveEntries()); final long pre = scheduler.updatesStarted(); Thread.sleep(100L); Assert.assertEquals(pre, scheduler.updatesStarted()); } }
/** * Copyright Microsoft Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.windowsazure.management.compute; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.security.InvalidKeyException; import java.util.ArrayList; import java.util.Random; import java.util.concurrent.Callable; import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; import org.junit.Assert; import com.microsoft.windowsazure.Configuration; import com.microsoft.windowsazure.MockIntegrationTestBase; import com.microsoft.windowsazure.core.OperationResponse; import com.microsoft.windowsazure.core.ServiceClient; import com.microsoft.windowsazure.core.pipeline.apache.ApacheConfigurationProperties; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.ManagementClient; import com.microsoft.windowsazure.management.ManagementService; import com.microsoft.windowsazure.management.compute.models.MockCloudBlobClient; import com.microsoft.windowsazure.management.compute.models.MockCloudBlobContainer; import com.microsoft.windowsazure.management.compute.models.MockCloudPageBlob; import com.microsoft.windowsazure.management.compute.models.MockListBlobItem; import com.microsoft.windowsazure.management.configuration.ManagementConfiguration; import com.microsoft.windowsazure.management.models.LocationAvailableServiceNames; import com.microsoft.windowsazure.management.models.LocationsListResponse; import com.microsoft.windowsazure.management.storage.StorageManagementClient; import com.microsoft.windowsazure.management.storage.StorageManagementService; import com.microsoft.windowsazure.management.storage.models.StorageAccountCreateParameters; import com.microsoft.windowsazure.management.storage.models.StorageAccountGetKeysResponse; import com.microsoft.azure.storage.CloudStorageAccount; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.*; public abstract class ComputeManagementIntegrationTestBase extends MockIntegrationTestBase{ protected static String testVMPrefix = "aztst"; protected static String testStoragePrefix = "aztst"; protected static String testHostedServicePrefix = "azhst"; protected static String storageAccountKey = ""; protected static String vmLocation = null; protected static String blobhost = ""; protected static ComputeManagementClient computeManagementClient; protected static StorageManagementClient storageManagementClient; protected static ManagementClient managementClient; protected static void createComputeManagementClient() throws Exception { Configuration config = createConfiguration(); computeManagementClient = ComputeManagementService.create(config); addClient((ServiceClient<?>) computeManagementClient, new Callable<Void>() { @Override public Void call() throws Exception { createComputeManagementClient(); return null; } }); addRegexRule("hostedservices/azhst[a-z]{10}"); } protected static void createStorageManagementClient() throws Exception { Configuration config = createConfiguration(); storageManagementClient = StorageManagementService.create(config); addClient((ServiceClient<?>) storageManagementClient, new Callable<Void>() { @Override public Void call() throws Exception { createStorageManagementClient(); return null; } }); addRegexRule("storageservices/aztst[a-z]{10}"); } protected static void createManagementClient() throws Exception { Configuration config = createConfiguration(); config.setProperty(ApacheConfigurationProperties.PROPERTY_RETRY_HANDLER, new DefaultHttpRequestRetryHandler()); managementClient = ManagementService.create(config); addClient((ServiceClient<?>) managementClient, new Callable<Void>() { @Override public Void call() throws Exception { createManagementClient(); return null; } }); } protected static Configuration createConfiguration() throws Exception { String baseUri = System.getenv(ManagementConfiguration.URI); if (IS_MOCKED) { return ManagementConfiguration.configure( new URI(MOCK_URI), MOCK_SUBSCRIPTION, null, null, null ); } else { return ManagementConfiguration.configure( baseUri != null ? new URI(baseUri) : null, System.getenv(ManagementConfiguration.SUBSCRIPTION_ID), System.getenv(ManagementConfiguration.KEYSTORE_PATH), System.getenv(ManagementConfiguration.KEYSTORE_PASSWORD), KeyStoreType.fromString(System.getenv(ManagementConfiguration.KEYSTORE_TYPE)) ); } } protected static String randomString(int length) { Random random = new Random(); StringBuilder stringBuilder = new StringBuilder(length); for (int i=0; i<length; i++) { stringBuilder.append((char)('a' + random.nextInt(26))); } return stringBuilder.toString(); } protected static void createStorageAccount(String storageAccountName, String storageContainer) throws Exception { //String storageAccountCreateName = testStoragePrefix + randomString(10); String storageAccountLabel = storageAccountName + "Label1"; //Arrange StorageAccountCreateParameters createParameters = new StorageAccountCreateParameters(); //required createParameters.setName(storageAccountName); //required createParameters.setLabel(storageAccountLabel); //required if no affinity group has set createParameters.setLocation(vmLocation); createParameters.setAccountType("Standard_LRS"); //act OperationResponse operationResponse = storageManagementClient.getStorageAccountsOperations().create(createParameters); //Assert Assert.assertEquals(200, operationResponse.getStatusCode()); //use container inside storage account, needed for os image storage. StorageAccountGetKeysResponse storageAccountGetKeysResponse = storageManagementClient.getStorageAccountsOperations().getKeys(storageAccountName); storageAccountKey = storageAccountGetKeysResponse.getPrimaryKey(); createStorageContainer(storageAccountName, storageContainer); } protected static void createStorageContainer(String storageAccountName, String storageContainer) throws Exception { MockCloudBlobClient blobClient = createBlobClient(storageAccountName, storageAccountKey); MockCloudBlobContainer container = blobClient.getContainerReference(storageContainer); container.createIfNotExists(); //make sure it created and available, otherwise vm deployment will fail with storage/container still creating boolean found = false; while(found == false) { Iterable<MockCloudBlobContainer> listContainerResult = blobClient.listContainers(storageContainer); for (MockCloudBlobContainer item : listContainerResult) { blobhost =item.getUri().getHost(); if (item.getName().contains(storageContainer) == true) { blobhost =item.getUri().getHost(); found = true; } } if (found == false) { Thread.sleep(1000 * 30); } else if (!IS_MOCKED) { Thread.sleep(1000 * 60); } } } protected static MockCloudBlobClient createBlobClient(String storageAccountName, String storageAccountKey) throws InvalidKeyException, URISyntaxException { String storageconnectionstring = "DefaultEndpointsProtocol=http;AccountName="+ storageAccountName +";AccountKey=" + storageAccountKey; CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageconnectionstring); // Create the blob client CloudBlobClient blobClient = storageAccount.createCloudBlobClient(); return new MockCloudBlobClient(blobClient, IS_MOCKED); } protected static void uploadFileToBlob(String storageAccountName, String storageContainer, String fileName, String filePath) throws InvalidKeyException, URISyntaxException, StorageException, InterruptedException, IOException { MockCloudBlobClient blobClient = createBlobClient(storageAccountName, storageAccountKey); MockCloudBlobContainer container = blobClient.getContainerReference(storageContainer); MockCloudPageBlob pageblob = container.getPageBlobReference(fileName); File source = new File(filePath + fileName); pageblob.upload(new FileInputStream(source), source.length()); //make sure it created and available, otherwise vm deployment will fail with storage/container still creating boolean found = false; while(found == false) { // Loop over blobs within the container and output the URI to each of them for (MockListBlobItem item : container.listBlobs()) { if (item.getUri().getPath().contains(fileName) == true) { found = true; } } if (found == false) { Thread.sleep(1000 * 10); } else if (!IS_MOCKED) { Thread.sleep(1000 * 20); } } } protected static void getLocation() throws Exception { //has to be a location that support compute, storage, vm, some of the locations are not, need to find out the right one ArrayList<String> serviceName = new ArrayList<String>(); serviceName.add(LocationAvailableServiceNames.COMPUTE); serviceName.add(LocationAvailableServiceNames.PERSISTENTVMROLE); serviceName.add(LocationAvailableServiceNames.STORAGE); LocationsListResponse locationsListResponse = managementClient.getLocationsOperations().list(); for (LocationsListResponse.Location location : locationsListResponse) { ArrayList<String> availableServicelist = location.getAvailableServices(); String locationName = location.getName(); if (availableServicelist.containsAll(serviceName)== true) { if (locationName.contains("West US") == true) { vmLocation = locationName; } if (vmLocation==null) { vmLocation = locationName; } } } } protected static void cleanBlob(String storageAccountName, String storageContainer) { // Create the blob client MockCloudBlobClient blobClient = null; try { blobClient = createBlobClient(storageAccountName, storageAccountKey); } catch (InvalidKeyException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } if (blobClient != null) { MockCloudBlobContainer container = null; try { container = blobClient.getContainerReference(storageContainer); } catch (URISyntaxException e) { } catch (StorageException e) { } try { container.breakLease(0); } catch (StorageException e) { } try { container.delete(); } catch (StorageException e) { } try { while (container.exists()) { Thread.sleep(1000); } } catch (StorageException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } } protected static void cleanStorageAccount(String storageAccountName) { OperationResponse operationResponse = null; try { operationResponse = storageManagementClient.getStorageAccountsOperations().delete(storageAccountName); } catch (IOException e) { e.printStackTrace(); } catch (ServiceException e) { e.printStackTrace(); } if (operationResponse != null) { Assert.assertEquals(200, operationResponse.getStatusCode()); } } }
/* * Copyright (c) 2015-2016, Mostafa Ali (engabdomostafa@gmail.com) * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. Redistributions * in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ package ca.mali.fomparser.datatype; import ca.mali.customcontrol.ArrayPane; import ca.mali.fomparser.DataTypeEnum; import hla.rti1516e.encoding.*; import javafx.geometry.HPos; import javafx.geometry.Insets; import javafx.scene.control.Label; import javafx.scene.control.ScrollPane; import javafx.scene.control.TextField; import javafx.scene.layout.GridPane; import javafx.scene.layout.Region; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static ca.mali.hlalistener.PublicVariables.encoderFactory; /** * @author Mostafa Ali <engabdomostafa@gmail.com> */ public class ArrayFDD extends AbstractDataType { //Logger private static final org.apache.logging.log4j.Logger logger = LogManager.getLogger(); private AbstractDataType elementType; private String encoding; private String cardinality; private String semantics; private Object value; TextField textField; private ArrayPane arrayPane = new ArrayPane(); List<AbstractDataType> arrayElements = new ArrayList<>(); private int lowerLimit = 0; //we will just support 1 dimensional array private int upperLimit = 0; //we will just support 1 dimensional array private boolean isDynamic; public ArrayFDD(String name) { super(name, DataTypeEnum.ARRAY); getControl(true); } @Override public Object clone() throws CloneNotSupportedException { ArrayFDD cloned = (ArrayFDD) super.clone(); cloned.setElementType((AbstractDataType) cloned.getElementType().clone()); cloned.getControl(true); return cloned; } @Override public byte[] EncodeValue() { byte[] encodedValue; switch (getName()) { case "HLAASCIIstring": { HLAASCIIstring encoder = encoderFactory.createHLAASCIIstring(); if (value != null) encoder.setValue((String) value); encodedValue = encoder.toByteArray(); break; } case "HLAunicodeString": { HLAunicodeString encoder = encoderFactory.createHLAunicodeString(); if (value != null) encoder.setValue((String) value); encodedValue = encoder.toByteArray(); break; } case "HLAopaqueData": { HLAopaqueData encoder = encoderFactory.createHLAopaqueData(); if (value != null) encoder.setValue((byte[]) value); encodedValue = encoder.toByteArray(); break; } default: { encodedValue = getDataElement().toByteArray(); break; } } return encodedValue; } @Override public String DecodeValue(byte[] encodedValue) { String value = ""; try { switch (getName()) { case "HLAASCIIstring": { HLAASCIIstring encoder = encoderFactory.createHLAASCIIstring(); encoder.decode(encodedValue); value = encoder.getValue(); break; } case "HLAunicodeString": { HLAunicodeString encoder = encoderFactory.createHLAunicodeString(); encoder.decode(encodedValue); value = encoder.getValue(); break; } case "HLAopaqueData": { HLAopaqueData encoder = encoderFactory.createHLAopaqueData(); encoder.decode(encodedValue); value = Arrays.toString(encoder.getValue()); break; } default: { DataElementFactory<DataElement> dataElementFactory = i -> getElementType().getDataElement(); if (isDynamic) { HLAvariableArray<DataElement> hlAvariableArray = encoderFactory.createHLAvariableArray(dataElementFactory); hlAvariableArray.decode(encodedValue); value = "["; for (DataElement dataElement : hlAvariableArray) { value += getElementType().DecodeValue(dataElement.toByteArray()) + ", "; } value = value.substring(0, value.length() - 2) + "]"; } else { HLAfixedArray<DataElement> hlAfixedArray = encoderFactory.createHLAfixedArray(dataElementFactory, arrayElements.size()); hlAfixedArray.decode(encodedValue); value = "["; for (DataElement dataElement : hlAfixedArray) { value += getElementType().DecodeValue(dataElement.toByteArray()) + ", "; } value = value.substring(0, value.length() - 2) + "]"; } } } } catch (DecoderException ex) { logger.log(Level.ERROR, "Error in decoding value", ex); } return value; } @Override public DataElement getDataElement() { switch (getName()) { case "HLAASCIIstring": { HLAASCIIstring encoder = encoderFactory.createHLAASCIIstring(); if (value != null) encoder.setValue((String) value); return encoder; } case "HLAunicodeString": { HLAunicodeString encoder = encoderFactory.createHLAunicodeString(); if (value != null) encoder.setValue((String) value); return encoder; } case "HLAopaqueData": { HLAopaqueData encoder = encoderFactory.createHLAopaqueData(); if (value != null) encoder.setValue((byte[]) value); return encoder; } default: { DataElementFactory<DataElement> dataElementFactory = i -> getElementType().getDataElement(); if (isDynamic) { HLAvariableArray<DataElement> hlAvariableArray = encoderFactory.createHLAvariableArray(dataElementFactory); for (AbstractDataType arrayElement : arrayElements) { hlAvariableArray.addElement(arrayElement.getDataElement()); } return hlAvariableArray; } DataElement[] elements = new DataElement[arrayElements.size()]; for (int i = 0; i < arrayElements.size(); i++) { elements[i] = arrayElements.get(i).getDataElement(); } return encoderFactory.createHLAfixedArray(elements); } } } @Override public Region getControl(boolean reset) { if ("HLAtoken".equalsIgnoreCase(getName())) return null; // TODO: 1/30/2016 I don't understand this attribute "HLAprivilegeToDeleteObject" very well if ("HLAASCIIstring".equalsIgnoreCase(getName()) || "HLAunicodeString".equalsIgnoreCase(getName())) { if (reset) { this.value = null; textField = new TextField(); textField.textProperty().addListener((observable, oldValue, newValue) -> this.value = newValue); } return textField; } else { if (reset) { this.value = null; arrayPane = new ArrayPane(); arrayPane.getAddRow().setOnAction(event -> { try { int i = arrayElements.size(); for (int i1 = 0; i1 < arrayElements.size(); i1++) { if (arrayElements.get(i1).getControl(false).isFocused()) { i = i1; break; } } arrayElements.add(i, (AbstractDataType) getElementType().clone()); arrayPane.setAddRowDisable(arrayElements.size() >= upperLimit); arrayPane.setRemoveRowDisable(arrayElements.size() <= lowerLimit); populateArrayPane(false); } catch (CloneNotSupportedException ex) { logger.log(Level.ERROR, "Error in adding new row", ex); } }); arrayPane.getRemoveRow().setOnAction(event -> { int i = arrayElements.size(); for (int i1 = 0; i1 < arrayElements.size(); i1++) { if (arrayElements.get(i1).getControl(false).isFocused()) { i = i1; break; } } arrayElements.remove(i-1); arrayPane.setAddRowDisable(arrayElements.size() >= upperLimit); arrayPane.setRemoveRowDisable(arrayElements.size() <= lowerLimit); populateArrayPane(false); }); if (getCardinality() != null && !getCardinality().isEmpty()) { addArrayElements(); } } return arrayPane; } } @Override public boolean isValueExist() { // return "Employees".equalsIgnoreCase(getName()); // return value != null && (value.getClass().isArray() || value instanceof String); if (value != null && value instanceof String) return true; if (arrayElements.size() > 0) { if (arrayElements.stream().anyMatch(AbstractDataType::isValueExist)) { return true; } } return false; } @Override public Class getObjectClass() { switch (getName()) { case "HLAASCIIstring": { return HLAASCIIstring.class; } case "HLAunicodeString": { return HLAunicodeString.class; } case "HLAopaqueData": { return HLAopaqueData.class; } default: { if ("Dynamic".equalsIgnoreCase(getCardinality())) { return HLAvariableArray.class; } return HLAfixedArray.class; } } } @Override public String valueAsString() { if (value != null) return value.toString() + "<" + Arrays.toString(EncodeValue()) + ">"; String result = "["; for (AbstractDataType element : arrayElements) { result += element.valueAsString() + ", "; result = result.substring(0, result.length() - 2) + ", "; } result = result.substring(0, result.length() - 2) + "]"; return result; } public AbstractDataType getElementType() { return elementType; } public void setElementType(AbstractDataType elementType) { this.elementType = elementType; if (this.elementType != null && !"HLAASCIIstring".equalsIgnoreCase(getName()) && !"HLAunicodeString".equalsIgnoreCase(getName())) { addArrayElements(); } } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public String getCardinality() { return cardinality; } public void setCardinality(String cardinality) { this.cardinality = cardinality; if (!"HLAASCIIstring".equalsIgnoreCase(getName()) && !"HLAunicodeString".equalsIgnoreCase(getName())) { if (this.cardinality.contains("Dynamic")) { upperLimit = Integer.MAX_VALUE; lowerLimit = 1; isDynamic = true; } else { String[] parts = this.cardinality.split(","); if (parts.length > 1) { logger.log(Level.WARN, "Array: {} has {} dimensions, only one dimension will be displayed", getName(), parts.length); } if (parts[0].contains("..")) { isDynamic = true; String s = parts[0].replace("..", ",").replace("[", "").replace("]", "").replace(" ", ""); String[] upperLowerLimit = s.split(","); upperLimit = Integer.parseInt(upperLowerLimit[0]); lowerLimit = Integer.parseInt(upperLowerLimit[1]); } else { isDynamic = false; int v = Integer.parseInt(parts[0]); upperLimit = v; lowerLimit = v; } } } } public String getSemantics() { return semantics; } public void setSemantics(String semantics) { this.semantics = semantics; } private void addArrayElements() { try { arrayElements.clear(); if (isDynamic) { if (upperLimit == Integer.MAX_VALUE) { for (int i = 0; i < 3; i++) { arrayElements.add((AbstractDataType) getElementType().clone()); } } else { for (int i = 0; i < lowerLimit; i++) { arrayElements.add((AbstractDataType) getElementType().clone()); } } } else { for (int i = 0; i < lowerLimit; i++) { arrayElements.add((AbstractDataType) getElementType().clone()); } } arrayPane.setRemoveRowDisable(arrayElements.size() <= lowerLimit); arrayPane.setAddRowDisable(arrayElements.size() >= upperLimit); populateArrayPane(true); } catch (CloneNotSupportedException ex) { logger.log(Level.ERROR, "Error in adding array elements", ex); } } private void populateArrayPane(boolean reset) { GridPane gridPane = new GridPane(); gridPane.setHgap(4); gridPane.setVgap(4); gridPane.setPadding(new Insets(4)); ScrollPane scrollPane = new ScrollPane(gridPane); arrayPane.setCenter(scrollPane); if (!arrayElements.isEmpty()) { for (int i = 0; i < arrayElements.size(); i++) { Label rowNum = new Label(String.valueOf(i + 1)); gridPane.add(rowNum, 0, i); gridPane.add(arrayElements.get(i).getControl(reset), 1, i); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.blockmanagement; import java.util.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopologyWithNodeGroup; import org.apache.hadoop.net.Node; import org.apache.hadoop.net.NodeBase; /** The class is responsible for choosing the desired number of targets * for placing block replicas on environment with node-group layer. * The replica placement strategy is adjusted to: * If the writer is on a datanode, the 1st replica is placed on the local * node(or local node-group or on local rack), otherwise a random datanode. * The 2nd replica is placed on a datanode that is on a different rack with 1st * replica node. * The 3rd replica is placed on a datanode which is on a different node-group * but the same rack as the second replica node. */ public class BlockPlacementPolicyWithNodeGroup extends BlockPlacementPolicyDefault { protected BlockPlacementPolicyWithNodeGroup() { } public void initialize(Configuration conf, FSClusterStats stats, NetworkTopology clusterMap, Host2NodesMap host2datanodeMap) { super.initialize(conf, stats, clusterMap, host2datanodeMap); } /** * choose all good favored nodes as target. * If no enough targets, then choose one replica from * each bad favored node's node group. * @throws NotEnoughReplicasException */ @Override protected void chooseFavouredNodes(String src, int numOfReplicas, List<DatanodeDescriptor> favoredNodes, Set<Node> favoriteAndExcludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { super.chooseFavouredNodes(src, numOfReplicas, favoredNodes, favoriteAndExcludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); if (results.size() < numOfReplicas) { // Not enough replicas, choose from unselected Favorednode's Nodegroup for (int i = 0; i < favoredNodes.size() && results.size() < numOfReplicas; i++) { DatanodeDescriptor favoredNode = favoredNodes.get(i); boolean chosenNode = isNodeChosen(results, favoredNode); if (chosenNode) { continue; } NetworkTopologyWithNodeGroup clusterMapNodeGroup = (NetworkTopologyWithNodeGroup) clusterMap; // try a node on FavouredNode's node group DatanodeStorageInfo target = null; String scope = clusterMapNodeGroup.getNodeGroup(favoredNode.getNetworkLocation()); try { target = chooseRandom(scope, favoriteAndExcludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e) { // catch Exception and continue with other favored nodes continue; } if (target == null) { LOG.warn("Could not find a target for file " + src + " within nodegroup of favored node " + favoredNode); continue; } favoriteAndExcludedNodes.add(target.getDatanodeDescriptor()); } } } private boolean isNodeChosen( List<DatanodeStorageInfo> results, DatanodeDescriptor favoredNode) { boolean chosenNode = false; for (int j = 0; j < results.size(); j++) { if (results.get(j).getDatanodeDescriptor().equals(favoredNode)) { chosenNode = true; break; } } return chosenNode; } /** choose local node of <i>localMachine</i> as the target. * If localMachine is not available, will fallback to nodegroup/rack * when flag <i>fallbackToNodeGroupAndLocalRack</i> is set. * @return the chosen node */ @Override protected DatanodeStorageInfo chooseLocalStorage(Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes, boolean fallbackToNodeGroupAndLocalRack) throws NotEnoughReplicasException { DatanodeStorageInfo localStorage = chooseLocalStorage(localMachine, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); if (localStorage != null) { return localStorage; } if (!fallbackToNodeGroupAndLocalRack) { return null; } // try a node on local node group DatanodeStorageInfo chosenStorage = chooseLocalNodeGroup( (NetworkTopologyWithNodeGroup)clusterMap, localMachine, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); if (chosenStorage != null) { return chosenStorage; } // try a node on local rack return chooseLocalRack(localMachine, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } /** @return the node of the second replica */ private static DatanodeDescriptor secondNode(Node localMachine, List<DatanodeStorageInfo> results) { // find the second replica for(DatanodeStorageInfo nextStorage : results) { DatanodeDescriptor nextNode = nextStorage.getDatanodeDescriptor(); if (nextNode != localMachine) { return nextNode; } } return null; } @Override protected DatanodeStorageInfo chooseLocalRack(Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { // no local machine, so choose a random machine if (localMachine == null) { return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } // choose one from the local rack, but off-nodegroup try { final String scope = NetworkTopology.getFirstHalf(localMachine.getNetworkLocation()); return chooseRandom(scope, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e1) { // find the second replica final DatanodeDescriptor newLocal = secondNode(localMachine, results); if (newLocal != null) { try { return chooseRandom( clusterMap.getRack(newLocal.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch(NotEnoughReplicasException e2) { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } else { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } } /** * {@inheritDoc} */ @Override protected void chooseRemoteRack(int numOfReplicas, DatanodeDescriptor localMachine, Set<Node> excludedNodes, long blocksize, int maxReplicasPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { int oldNumOfReplicas = results.size(); final String rackLocation = NetworkTopology.getFirstHalf( localMachine.getNetworkLocation()); try { // randomly choose from remote racks chooseRandom(numOfReplicas, "~" + rackLocation, excludedNodes, blocksize, maxReplicasPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e) { // fall back to the local rack chooseRandom(numOfReplicas - (results.size() - oldNumOfReplicas), rackLocation, excludedNodes, blocksize, maxReplicasPerRack, results, avoidStaleNodes, storageTypes); } } /* choose one node from the nodegroup that <i>localMachine</i> is on. * if no such node is available, choose one node from the nodegroup where * a second replica is on. * if still no such node is available, return null. * @return the chosen node */ private DatanodeStorageInfo chooseLocalNodeGroup( NetworkTopologyWithNodeGroup clusterMap, Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { // no local machine, so choose a random machine if (localMachine == null) { return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } // choose one from the local node group try { return chooseRandom( clusterMap.getNodeGroup(localMachine.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e1) { final DatanodeDescriptor newLocal = secondNode(localMachine, results); if (newLocal != null) { try { return chooseRandom( clusterMap.getNodeGroup(newLocal.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch(NotEnoughReplicasException e2) { //otherwise return null return null; } } else { //otherwise return null return null; } } } @Override protected String getRack(final DatanodeInfo cur) { String nodeGroupString = cur.getNetworkLocation(); return NetworkTopology.getFirstHalf(nodeGroupString); } /** * Find other nodes in the same nodegroup of <i>localMachine</i> and add them * into <i>excludeNodes</i> as replica should not be duplicated for nodes * within the same nodegroup * @return number of new excluded nodes */ @Override protected int addToExcludedNodes(DatanodeDescriptor chosenNode, Set<Node> excludedNodes) { int countOfExcludedNodes = 0; String nodeGroupScope = chosenNode.getNetworkLocation(); List<Node> leafNodes = clusterMap.getLeaves(nodeGroupScope); for (Node leafNode : leafNodes) { if (excludedNodes.add(leafNode)) { // not a existing node in excludedNodes countOfExcludedNodes++; } } countOfExcludedNodes += addDependentNodesToExcludedNodes( chosenNode, excludedNodes); return countOfExcludedNodes; } /** * Add all nodes from a dependent nodes list to excludedNodes. * @return number of new excluded nodes */ private int addDependentNodesToExcludedNodes(DatanodeDescriptor chosenNode, Set<Node> excludedNodes) { if (this.host2datanodeMap == null) { return 0; } int countOfExcludedNodes = 0; for(String hostname : chosenNode.getDependentHostNames()) { DatanodeDescriptor node = this.host2datanodeMap.getDataNodeByHostName(hostname); if(node!=null) { if (excludedNodes.add(node)) { countOfExcludedNodes++; } } else { LOG.warn("Not able to find datanode " + hostname + " which has dependency with datanode " + chosenNode.getHostName()); } } return countOfExcludedNodes; } /** * Pick up replica node set for deleting replica as over-replicated. * First set contains replica nodes on rack with more than one * replica while second set contains remaining replica nodes. * If first is not empty, divide first set into two subsets: * moreThanOne contains nodes on nodegroup with more than one replica * exactlyOne contains the remaining nodes in first set * then pickup priSet if not empty. * If first is empty, then pick second. */ @Override public Collection<DatanodeStorageInfo> pickupReplicaSet( Collection<DatanodeStorageInfo> first, Collection<DatanodeStorageInfo> second) { // If no replica within same rack, return directly. if (first.isEmpty()) { return second; } // Split data nodes in the first set into two sets, // moreThanOne contains nodes on nodegroup with more than one replica // exactlyOne contains the remaining nodes Map<String, List<DatanodeStorageInfo>> nodeGroupMap = new HashMap<>(); for(DatanodeStorageInfo storage : first) { final String nodeGroupName = NetworkTopology.getLastHalf( storage.getDatanodeDescriptor().getNetworkLocation()); List<DatanodeStorageInfo> storageList = nodeGroupMap.get(nodeGroupName); if (storageList == null) { storageList = new ArrayList<>(); nodeGroupMap.put(nodeGroupName, storageList); } storageList.add(storage); } final List<DatanodeStorageInfo> moreThanOne = new ArrayList<>(); final List<DatanodeStorageInfo> exactlyOne = new ArrayList<>(); // split nodes into two sets for(List<DatanodeStorageInfo> datanodeList : nodeGroupMap.values()) { if (datanodeList.size() == 1 ) { // exactlyOne contains nodes on nodegroup with exactly one replica exactlyOne.add(datanodeList.get(0)); } else { // moreThanOne contains nodes on nodegroup with more than one replica moreThanOne.addAll(datanodeList); } } return moreThanOne.isEmpty()? exactlyOne : moreThanOne; } /** * Check if there are any replica (other than source) on the same node group * with target. If true, then target is not a good candidate for placing * specific replica as we don't want 2 replicas under the same nodegroup. * * @return true if there are any replica (other than source) on the same node * group with target */ @Override public boolean isMovable(Collection<DatanodeInfo> locs, DatanodeInfo source, DatanodeInfo target) { for (DatanodeInfo dn : locs) { if (dn != source && dn != target && clusterMap.isOnSameNodeGroup(dn, target)) { return false; } } return true; } }
/* * Central Repository * * Copyright 2017-2020 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.centralrepository.eventlisteners; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.logging.Level; import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; import org.sleuthkit.autopsy.coreutils.ThreadUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; /** * Listen for case events and update entries in the Central Repository database * accordingly */ @Messages({"caseeventlistener.evidencetag=Evidence"}) final class CaseEventListener implements PropertyChangeListener { private static final Logger LOGGER = Logger.getLogger(CaseEventListener.class.getName()); private final ExecutorService jobProcessingExecutor; private static final String CASE_EVENT_THREAD_NAME = "Case-Event-Listener-%d"; private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of( Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED, Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED, Case.Events.DATA_SOURCE_ADDED, Case.Events.TAG_DEFINITION_CHANGED, Case.Events.CURRENT_CASE, Case.Events.DATA_SOURCE_NAME_CHANGED); CaseEventListener() { jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(CASE_EVENT_THREAD_NAME).build()); } void shutdown() { ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor); } @Override public void propertyChange(PropertyChangeEvent evt) { CentralRepository dbManager; try { dbManager = CentralRepository.getInstance(); } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Failed to get instance of db manager.", ex); return; } // If any changes are made to which event types are handled the change // must also be made to CASE_EVENTS_OF_INTEREST. switch (Case.Events.valueOf(evt.getPropertyName())) { case CONTENT_TAG_ADDED: case CONTENT_TAG_DELETED: { jobProcessingExecutor.submit(new ContentTagTask(dbManager, evt)); } break; case BLACKBOARD_ARTIFACT_TAG_DELETED: case BLACKBOARD_ARTIFACT_TAG_ADDED: { jobProcessingExecutor.submit(new BlackboardTagTask(dbManager, evt)); } break; case DATA_SOURCE_ADDED: { jobProcessingExecutor.submit(new DataSourceAddedTask(dbManager, evt)); } break; case TAG_DEFINITION_CHANGED: { jobProcessingExecutor.submit(new TagDefinitionChangeTask(evt)); } break; case CURRENT_CASE: { jobProcessingExecutor.submit(new CurrentCaseTask(dbManager, evt)); } break; case DATA_SOURCE_NAME_CHANGED: { jobProcessingExecutor.submit(new DataSourceNameChangedTask(dbManager, evt)); } break; } } /* * Add all of our Case Event Listeners to the case. */ void installListeners() { Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); } /* * Remove all of our Case Event Listeners from the case. */ void uninstallListeners() { Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); } private final class ContentTagTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; private ContentTagTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @Override public void run() { if (!CentralRepository.isEnabled()) { return; } AbstractFile af; TskData.FileKnown knownStatus; String comment; if (Case.Events.valueOf(event.getPropertyName()) == Case.Events.CONTENT_TAG_ADDED) { // For added tags, we want to change the known status to BAD if the // tag that was just added is in the list of central repo tags. final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) event; final ContentTag tagAdded = tagAddedEvent.getAddedTag(); if (TagsManager.getNotableTagDisplayNames().contains(tagAdded.getName().getDisplayName())) { if (tagAdded.getContent() instanceof AbstractFile) { af = (AbstractFile) tagAdded.getContent(); knownStatus = TskData.FileKnown.BAD; comment = tagAdded.getComment(); } else { LOGGER.log(Level.WARNING, "Error updating non-file object"); return; } } else { // The added tag isn't flagged as bad in central repo, so do nothing return; } } else { // CONTENT_TAG_DELETED // For deleted tags, we want to set the file status to UNKNOWN if: // - The tag that was just removed is notable in central repo // - There are no remaining tags that are notable final ContentTagDeletedEvent tagDeletedEvent = (ContentTagDeletedEvent) event; long contentID = tagDeletedEvent.getDeletedTagInfo().getContentID(); String tagName = tagDeletedEvent.getDeletedTagInfo().getName().getDisplayName(); if (!TagsManager.getNotableTagDisplayNames().contains(tagName)) { // If the tag that got removed isn't on the list of central repo tags, do nothing return; } try { // Get the remaining tags on the content object Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(contentID); TagsManager tagsManager = Case.getCurrentCaseThrows().getServices().getTagsManager(); List<ContentTag> tags = tagsManager.getContentTagsByContent(content); if (tags.stream() .map(tag -> tag.getName().getDisplayName()) .filter(TagsManager.getNotableTagDisplayNames()::contains) .collect(Collectors.toList()) .isEmpty()) { // There are no more bad tags on the object if (content instanceof AbstractFile) { af = (AbstractFile) content; knownStatus = TskData.FileKnown.UNKNOWN; comment = ""; } else { LOGGER.log(Level.WARNING, "Error updating non-file object"); return; } } else { // There's still at least one bad tag, so leave the known status as is return; } } catch (TskCoreException | NoCurrentCaseException ex) { LOGGER.log(Level.SEVERE, "Failed to find content", ex); return; } } final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeCorrAttrFromFile(af); if (eamArtifact != null) { // send update to Central Repository db try { dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus); } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS } } } // CONTENT_TAG_ADDED, CONTENT_TAG_DELETED } private final class BlackboardTagTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; private BlackboardTagTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @Override public void run() { if (!CentralRepository.isEnabled()) { return; } Content content; BlackboardArtifact bbArtifact; TskData.FileKnown knownStatus; String comment; if (Case.Events.valueOf(event.getPropertyName()) == Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED) { // For added tags, we want to change the known status to BAD if the // tag that was just added is in the list of central repo tags. final BlackBoardArtifactTagAddedEvent tagAddedEvent = (BlackBoardArtifactTagAddedEvent) event; final BlackboardArtifactTag tagAdded = tagAddedEvent.getAddedTag(); if (TagsManager.getNotableTagDisplayNames().contains(tagAdded.getName().getDisplayName())) { content = tagAdded.getContent(); bbArtifact = tagAdded.getArtifact(); knownStatus = TskData.FileKnown.BAD; comment = tagAdded.getComment(); } else { // The added tag isn't flagged as bad in central repo, so do nothing return; } } else { //BLACKBOARD_ARTIFACT_TAG_DELETED Case openCase; try { openCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); return; } // For deleted tags, we want to set the file status to UNKNOWN if: // - The tag that was just removed is notable in central repo // - There are no remaining tags that are notable final BlackBoardArtifactTagDeletedEvent tagDeletedEvent = (BlackBoardArtifactTagDeletedEvent) event; long contentID = tagDeletedEvent.getDeletedTagInfo().getContentID(); long artifactID = tagDeletedEvent.getDeletedTagInfo().getArtifactID(); String tagName = tagDeletedEvent.getDeletedTagInfo().getName().getDisplayName(); if (!TagsManager.getNotableTagDisplayNames().contains(tagName)) { // If the tag that got removed isn't on the list of central repo tags, do nothing return; } try { // Get the remaining tags on the artifact content = openCase.getSleuthkitCase().getContentById(contentID); bbArtifact = openCase.getSleuthkitCase().getBlackboardArtifact(artifactID); TagsManager tagsManager = openCase.getServices().getTagsManager(); List<BlackboardArtifactTag> tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbArtifact); if (tags.stream() .map(tag -> tag.getName().getDisplayName()) .filter(TagsManager.getNotableTagDisplayNames()::contains) .collect(Collectors.toList()) .isEmpty()) { // There are no more bad tags on the object knownStatus = TskData.FileKnown.UNKNOWN; comment = ""; } else { // There's still at least one bad tag, so leave the known status as is return; } } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, "Failed to find content", ex); return; } } if ((content instanceof AbstractFile) && (((AbstractFile) content).getKnown() == TskData.FileKnown.KNOWN)) { return; } List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeCorrAttrsForCorrelation(bbArtifact); for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { eamArtifact.setComment(comment); try { dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus); } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS } } } // BLACKBOARD_ARTIFACT_TAG_ADDED, BLACKBOARD_ARTIFACT_TAG_DELETED } private final class TagDefinitionChangeTask implements Runnable { private final PropertyChangeEvent event; private TagDefinitionChangeTask(PropertyChangeEvent evt) { event = evt; } @Override public void run() { if (!CentralRepository.isEnabled()) { return; } //get the display name of the tag that has had it's definition modified String modifiedTagName = (String) event.getOldValue(); /* * Set knownBad status for all files/artifacts in the given case * that are tagged with the given tag name. */ try { TagName tagName = Case.getCurrentCaseThrows().getServices().getTagsManager().getDisplayNamesToTagNamesMap().get(modifiedTagName); //First update the artifacts //Get all BlackboardArtifactTags with this tag name List<BlackboardArtifactTag> artifactTags = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifactTagsByTagName(tagName); for (BlackboardArtifactTag bbTag : artifactTags) { //start with assumption that none of the other tags applied to this Correlation Attribute will prevent it's status from being changed boolean hasTagWithConflictingKnownStatus = false; // if the status of the tag has been changed to TskData.FileKnown.UNKNOWN // we need to check the status of all other tags on this correlation attribute before changing // the status of the correlation attribute in the central repository if (tagName.getKnownStatus() == TskData.FileKnown.UNKNOWN) { Content content = bbTag.getContent(); // If the content which this Blackboard Artifact Tag is linked to is an AbstractFile with KNOWN status then // it's status in the central reporsitory should not be changed to UNKNOWN if ((content instanceof AbstractFile) && (((AbstractFile) content).getKnown() == TskData.FileKnown.KNOWN)) { continue; } //Get the BlackboardArtifact which this BlackboardArtifactTag has been applied to. BlackboardArtifact bbArtifact = bbTag.getArtifact(); TagsManager tagsManager = Case.getCurrentCaseThrows().getServices().getTagsManager(); List<BlackboardArtifactTag> tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbArtifact); //get all tags which are on this blackboard artifact for (BlackboardArtifactTag t : tags) { //All instances of the modified tag name will be changed, they can not conflict with each other if (t.getName().equals(tagName)) { continue; } //if any other tags on this artifact are Notable in status then this artifact can not have its status changed if (TskData.FileKnown.BAD == t.getName().getKnownStatus()) { //a tag with a conflicting status has been found, the status of this correlation attribute can not be modified hasTagWithConflictingKnownStatus = true; break; } } } //if the Correlation Attribute will have no tags with a status which would prevent the current status from being changed if (!hasTagWithConflictingKnownStatus) { //Get the correlation atttributes that correspond to the current BlackboardArtifactTag if their status should be changed //with the initial set of correlation attributes this should be a single correlation attribute List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeCorrAttrsForCorrelation(bbTag.getArtifact()); for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { CentralRepository.getInstance().setAttributeInstanceKnownStatus(eamArtifact, tagName.getKnownStatus()); } } } // Next update the files List<ContentTag> fileTags = Case.getCurrentCaseThrows().getSleuthkitCase().getContentTagsByTagName(tagName); //Get all ContentTags with this tag name for (ContentTag contentTag : fileTags) { //start with assumption that none of the other tags applied to this ContentTag will prevent it's status from being changed boolean hasTagWithConflictingKnownStatus = false; // if the status of the tag has been changed to TskData.FileKnown.UNKNOWN // we need to check the status of all other tags on this file before changing // the status of the file in the central repository if (tagName.getKnownStatus() == TskData.FileKnown.UNKNOWN) { Content content = contentTag.getContent(); TagsManager tagsManager = Case.getCurrentCaseThrows().getServices().getTagsManager(); List<ContentTag> tags = tagsManager.getContentTagsByContent(content); //get all tags which are on this file for (ContentTag t : tags) { //All instances of the modified tag name will be changed, they can not conflict with each other if (t.getName().equals(tagName)) { continue; } //if any other tags on this file are Notable in status then this file can not have its status changed if (TskData.FileKnown.BAD == t.getName().getKnownStatus()) { //a tag with a conflicting status has been found, the status of this file can not be modified hasTagWithConflictingKnownStatus = true; break; } } } //if the file will have no tags with a status which would prevent the current status from being changed if (!hasTagWithConflictingKnownStatus) { Content taggedContent = contentTag.getContent(); if (taggedContent instanceof AbstractFile) { final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeCorrAttrFromFile((AbstractFile)taggedContent); if (eamArtifact != null) { CentralRepository.getInstance().setAttributeInstanceKnownStatus(eamArtifact, tagName.getKnownStatus()); } } } } } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, "Cannot update known status in central repository for tag: " + modifiedTagName, ex); //NON-NLS } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Cannot get central repository for tag: " + modifiedTagName, ex); //NON-NLS } catch (NoCurrentCaseException ex) { LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS } } //TAG_STATUS_CHANGED } private final class DataSourceAddedTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; private DataSourceAddedTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @Override public void run() { if (!CentralRepository.isEnabled()) { return; } Case openCase; try { openCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); return; } final DataSourceAddedEvent dataSourceAddedEvent = (DataSourceAddedEvent) event; Content newDataSource = dataSourceAddedEvent.getDataSource(); try { CorrelationCase correlationCase = dbManager.getCase(openCase); if (null == dbManager.getDataSource(correlationCase, newDataSource.getId())) { CorrelationDataSource.fromTSKDataSource(correlationCase, newDataSource); } } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Error adding new data source to the central repository", ex); //NON-NLS } } // DATA_SOURCE_ADDED } private final class CurrentCaseTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; private CurrentCaseTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @Override public void run() { /* * A case has been opened if evt.getOldValue() is null and * evt.getNewValue() is a valid Case. */ if ((null == event.getOldValue()) && (event.getNewValue() instanceof Case)) { Case curCase = (Case) event.getNewValue(); IngestEventsListener.resetCeModuleInstanceCount(); if (!CentralRepository.isEnabled()) { return; } try { // NOTE: Cannot determine if the opened case is a new case or a reopened case, // so check for existing name in DB and insert if missing. if (dbManager.getCase(curCase) == null) { dbManager.newCase(curCase); } } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); //NON-NLS } } } // CURRENT_CASE } private final class DataSourceNameChangedTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; private DataSourceNameChangedTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @Override public void run() { final DataSourceNameChangedEvent dataSourceNameChangedEvent = (DataSourceNameChangedEvent) event; Content dataSource = dataSourceNameChangedEvent.getDataSource(); String newName = (String) event.getNewValue(); if (!StringUtils.isEmpty(newName)) { if (!CentralRepository.isEnabled()) { return; } try { CorrelationCase correlationCase = dbManager.getCase(Case.getCurrentCaseThrows()); CorrelationDataSource existingEamDataSource = dbManager.getDataSource(correlationCase, dataSource.getId()); dbManager.updateDataSourceName(existingEamDataSource, newName); } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, "Error updating data source with ID " + dataSource.getId() + " to " + newName, ex); //NON-NLS } catch (NoCurrentCaseException ex) { LOGGER.log(Level.SEVERE, "No open case", ex); } } } // DATA_SOURCE_NAME_CHANGED } }
package org.uibk.iis.robotprojectapp; import android.content.Context; import android.content.SharedPreferences; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; public class QuadDriverFragment extends Fragment implements QuadDriverListener { /** * The fragment argument representing the section number for this fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; private Context context; private TextView textLog; private Thread quadDriverJob; private double distancePerEdge; private short robotSpeed; private float robotSpeedCmL; private float robotSpeedCmR; /** * Returns a new instance of this fragment for the given section number. */ public static QuadDriverFragment newInstance(int sectionNumber) { QuadDriverFragment fragment = new QuadDriverFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { final View rootView = inflater.inflate(R.layout.fragment_robot_quad_drive, container, false); textLog = (TextView) rootView.findViewById(R.id.robot_quad_drive_textLog); context = rootView.getContext(); // get Robot speed from the preferences robotSpeed = (short) PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getInt( rootView.getContext().getString(R.string.prefRobotSlowVelocity), 18); robotSpeedCmL = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotLeftWheelSlow), 8.2f); robotSpeedCmR = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotRightWheelSlow), 8.2f); distancePerEdge = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotQuadDriveDistance), 20.0f); Spinner spinner = (Spinner) rootView.findViewById(R.id.robot_quad_drive_speed_spinner); // Create an ArrayAdapter using the string array and a default spinner // layout ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(rootView.getContext(), R.array.robot_quad_drive_speed_array, android.R.layout.simple_spinner_item); // Specify the layout to use when the list of choices appears adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); // Apply the adapter to the spinner spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { if (pos == 0) { // Slow... robotSpeed = (short) PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getInt( rootView.getContext().getString(R.string.prefRobotSlowVelocity), 18); robotSpeedCmL = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotLeftWheelSlow), 8.2f); robotSpeedCmR = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotRightWheelSlow), 8.2f); } else if (pos == 1) { // Medium... robotSpeed = (short) PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getInt( rootView.getContext().getString(R.string.prefRobotMedmVelocity), 32); robotSpeedCmL = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotLeftWheelMedm), 14.6f); robotSpeedCmR = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotRightWheelMedm), 14.6f); } else if (pos == 2) { // Fast... robotSpeed = (short) PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getInt( rootView.getContext().getString(R.string.prefRobotFastVelocity), 55); robotSpeedCmL = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotLeftWheelFast), 25.5f); robotSpeedCmR = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotRightWheelFast), 25.5f); } logText("Robot speed was set to: " + robotSpeed + ", left Wheel: " + robotSpeedCmL + "cm/s, right Wheel: " + robotSpeedCmR + "cm/s"); } @Override public void onNothingSelected(AdapterView<?> arg0) { } }); // setup of the textfield to edit the distance to drive each edge final EditText editText = (EditText) rootView.findViewById(R.id.robot_quad_drive_distance_to_drive); editText.setText("" + PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).getFloat( rootView.getContext().getString(R.string.prefRobotQuadDriveDistance), 20.0f)); editText.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { boolean handled = false; if (actionId == EditorInfo.IME_ACTION_DONE) { handled = true; distancePerEdge = Double.parseDouble(editText.getText().toString()); SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(rootView.getContext()).edit(); editor.putFloat(rootView.getContext().getString(R.string.prefRobotQuadDriveDistance), (float) distancePerEdge); editor.commit(); logText("set the distance to drive each edge to: " + distancePerEdge + "cm"); } return handled; } }); // set all the button onClick methods ((Button) rootView.findViewById(R.id.robot_quad_drive_buttonStart)).setOnClickListener(new OnClickListener() { public void onClick(View v) { buttonStartQuadDrive_onClick(v); } }); return rootView; } public void logText(String text) { if (text.length() > 0) { textLog.append("[" + text.length() + "] " + text + "\n"); } } // main loop public void buttonStartQuadDrive_onClick(View v) { if (ComDriver.getInstance().isConnected()) { logText("starting the Quad Drive..."); quadDriverJob = new Thread(new QuadDriveRunnable(distancePerEdge, robotSpeed, robotSpeedCmL, robotSpeedCmR, this)); quadDriverJob.start(); } } private static class QuadDriveRunnable implements Runnable { private short robotSpeed; private float robotSpeedCmL; private float robotSpeedCmR; private double distancePerEdge; private QuadDriverListener listener; public QuadDriveRunnable(double distancePerEdge, short robotSpeed, float robotSpeedCmL, float robotSpeedCmR, QuadDriverListener listener) { this.robotSpeed = robotSpeed; this.robotSpeedCmL = robotSpeedCmL; this.robotSpeedCmR = robotSpeedCmR; this.listener = listener; this.distancePerEdge = distancePerEdge; } private void driveForward() throws InterruptedException { ComDriver cm = ComDriver.getInstance(); float timeL = (float) (distancePerEdge / (double) robotSpeedCmL); float timeR = (float) (distancePerEdge / (double) robotSpeedCmR); cm.comReadWrite(new byte[]{'i', (byte) robotSpeed, (byte) ((float) robotSpeed * timeR / timeL), '\r', '\n'}); Thread.sleep((long) (timeL * 1000.0f)); cm.comReadWrite(new byte[]{'i', (byte) 0, (byte) 0, '\r', '\n'}); } private void turnRight90Degrees() throws InterruptedException { double wheelDistance = Math.PI / 2.0 * CalibrationTask.ROBOT_AXLE_LENGTH; float time = (float) wheelDistance / robotSpeedCmL; ComDriver.getInstance().comReadWrite(new byte[]{'i', (byte) robotSpeed, (byte) 0, '\r', '\n'}); Thread.sleep((long) (time * 1000.0f)); ComDriver.getInstance().comReadWrite(new byte[]{'i', (byte) 0, (byte) 0, '\r', '\n'}); } @SuppressWarnings("unused") private void turnLeft90Degrees() throws InterruptedException { double wheelDistance = Math.PI / 2.0 * CalibrationTask.ROBOT_AXLE_LENGTH; float time = (float) wheelDistance / robotSpeedCmR; ComDriver.getInstance().comReadWrite(new byte[]{'i', (byte) 0, (byte) robotSpeed, '\r', '\n'}); Thread.sleep((long) (time * 1000.0f)); ComDriver.getInstance().comReadWrite(new byte[]{'i', (byte) 0, (byte) 0, '\r', '\n'}); } @Override public void run() { ComDriver cm = ComDriver.getInstance(); if (cm.isConnected()) { try { // ************* The interesting Stuff is here ;) // ************* listener.onUpdate("driving first edge"); driveForward(); listener.onUpdate("turning the first time"); turnRight90Degrees(); listener.onUpdate("driving second edge"); driveForward(); listener.onUpdate("turning the second time"); turnRight90Degrees(); listener.onUpdate("driving third edge"); driveForward(); listener.onUpdate("turning the third time"); turnRight90Degrees(); listener.onUpdate("driving last edge"); driveForward(); listener.onUpdate("turning the last time"); turnRight90Degrees(); } catch (InterruptedException e) { e.printStackTrace(); listener.onFinished(); } } listener.onFinished(); } } @Override public void onUpdate(final String message) { Handler mainHandler = new Handler(context.getMainLooper()); mainHandler.post(new Runnable() { @Override public void run() { logText(message); } }); } @Override public void onFinished() { Handler mainHandler = new Handler(context.getMainLooper()); mainHandler.post(new Runnable() { @Override public void run() { try { quadDriverJob.join(); } catch (InterruptedException e) { e.printStackTrace(); } } }); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_04_01.implementation; import com.microsoft.azure.arm.collection.InnerSupportsGet; import com.microsoft.azure.arm.collection.InnerSupportsDelete; import com.microsoft.azure.arm.collection.InnerSupportsListing; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in AzureFirewalls. */ public class AzureFirewallsInner implements InnerSupportsGet<AzureFirewallInner>, InnerSupportsDelete<Void>, InnerSupportsListing<AzureFirewallInner> { /** The Retrofit service to perform REST calls. */ private AzureFirewallsService service; /** The service client containing this operation class. */ private NetworkManagementClientImpl client; /** * Initializes an instance of AzureFirewallsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public AzureFirewallsInner(Retrofit retrofit, NetworkManagementClientImpl client) { this.service = retrofit.create(AzureFirewallsService.class); this.client = client; } /** * The interface defining all the services for AzureFirewalls to be * used by Retrofit to perform actually REST calls. */ interface AzureFirewallsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("azureFirewallName") String azureFirewallName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls beginDelete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("azureFirewallName") String azureFirewallName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls getByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}") Observable<Response<ResponseBody>> getByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("azureFirewallName") String azureFirewallName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("azureFirewallName") String azureFirewallName, @Path("subscriptionId") String subscriptionId, @Body AzureFirewallInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls beginCreateOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}") Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("azureFirewallName") String azureFirewallName, @Path("subscriptionId") String subscriptionId, @Body AzureFirewallInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls listByResourceGroup" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls") Observable<Response<ResponseBody>> listByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls list" }) @GET("subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewalls") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls listByResourceGroupNext" }) @GET Observable<Response<ResponseBody>> listByResourceGroupNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_04_01.AzureFirewalls listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String azureFirewallName) { deleteWithServiceResponseAsync(resourceGroupName, azureFirewallName).toBlocking().last().body(); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String azureFirewallName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, azureFirewallName), serviceCallback); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<Void> deleteAsync(String resourceGroupName, String azureFirewallName) { return deleteWithServiceResponseAsync(resourceGroupName, azureFirewallName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String azureFirewallName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (azureFirewallName == null) { throw new IllegalArgumentException("Parameter azureFirewallName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2019-04-01"; Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, azureFirewallName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType()); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete(String resourceGroupName, String azureFirewallName) { beginDeleteWithServiceResponseAsync(resourceGroupName, azureFirewallName).toBlocking().single().body(); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String azureFirewallName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, azureFirewallName), serviceCallback); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> beginDeleteAsync(String resourceGroupName, String azureFirewallName) { return beginDeleteWithServiceResponseAsync(resourceGroupName, azureFirewallName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String azureFirewallName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (azureFirewallName == null) { throw new IllegalArgumentException("Parameter azureFirewallName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2019-04-01"; return service.beginDelete(resourceGroupName, azureFirewallName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = beginDeleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AzureFirewallInner object if successful. */ public AzureFirewallInner getByResourceGroup(String resourceGroupName, String azureFirewallName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, azureFirewallName).toBlocking().single().body(); } /** * Gets the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AzureFirewallInner> getByResourceGroupAsync(String resourceGroupName, String azureFirewallName, final ServiceCallback<AzureFirewallInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, azureFirewallName), serviceCallback); } /** * Gets the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AzureFirewallInner object */ public Observable<AzureFirewallInner> getByResourceGroupAsync(String resourceGroupName, String azureFirewallName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, azureFirewallName).map(new Func1<ServiceResponse<AzureFirewallInner>, AzureFirewallInner>() { @Override public AzureFirewallInner call(ServiceResponse<AzureFirewallInner> response) { return response.body(); } }); } /** * Gets the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AzureFirewallInner object */ public Observable<ServiceResponse<AzureFirewallInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String azureFirewallName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (azureFirewallName == null) { throw new IllegalArgumentException("Parameter azureFirewallName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2019-04-01"; return service.getByResourceGroup(resourceGroupName, azureFirewallName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<AzureFirewallInner>>>() { @Override public Observable<ServiceResponse<AzureFirewallInner>> call(Response<ResponseBody> response) { try { ServiceResponse<AzureFirewallInner> clientResponse = getByResourceGroupDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<AzureFirewallInner> getByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<AzureFirewallInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<AzureFirewallInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AzureFirewallInner object if successful. */ public AzureFirewallInner createOrUpdate(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters).toBlocking().last().body(); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AzureFirewallInner> createOrUpdateAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters, final ServiceCallback<AzureFirewallInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters), serviceCallback); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<AzureFirewallInner> createOrUpdateAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters).map(new Func1<ServiceResponse<AzureFirewallInner>, AzureFirewallInner>() { @Override public AzureFirewallInner call(ServiceResponse<AzureFirewallInner> response) { return response.body(); } }); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<AzureFirewallInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (azureFirewallName == null) { throw new IllegalArgumentException("Parameter azureFirewallName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2019-04-01"; Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, azureFirewallName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<AzureFirewallInner>() { }.getType()); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AzureFirewallInner object if successful. */ public AzureFirewallInner beginCreateOrUpdate(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters).toBlocking().single().body(); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AzureFirewallInner> beginCreateOrUpdateAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters, final ServiceCallback<AzureFirewallInner> serviceCallback) { return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters), serviceCallback); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AzureFirewallInner object */ public Observable<AzureFirewallInner> beginCreateOrUpdateAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, azureFirewallName, parameters).map(new Func1<ServiceResponse<AzureFirewallInner>, AzureFirewallInner>() { @Override public AzureFirewallInner call(ServiceResponse<AzureFirewallInner> response) { return response.body(); } }); } /** * Creates or updates the specified Azure Firewall. * * @param resourceGroupName The name of the resource group. * @param azureFirewallName The name of the Azure Firewall. * @param parameters Parameters supplied to the create or update Azure Firewall operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AzureFirewallInner object */ public Observable<ServiceResponse<AzureFirewallInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String azureFirewallName, AzureFirewallInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (azureFirewallName == null) { throw new IllegalArgumentException("Parameter azureFirewallName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2019-04-01"; return service.beginCreateOrUpdate(resourceGroupName, azureFirewallName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<AzureFirewallInner>>>() { @Override public Observable<ServiceResponse<AzureFirewallInner>> call(Response<ResponseBody> response) { try { ServiceResponse<AzureFirewallInner> clientResponse = beginCreateOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<AzureFirewallInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<AzureFirewallInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<AzureFirewallInner>() { }.getType()) .register(201, new TypeToken<AzureFirewallInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Lists all Azure Firewalls in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;AzureFirewallInner&gt; object if successful. */ public PagedList<AzureFirewallInner> listByResourceGroup(final String resourceGroupName) { ServiceResponse<Page<AzureFirewallInner>> response = listByResourceGroupSinglePageAsync(resourceGroupName).toBlocking().single(); return new PagedList<AzureFirewallInner>(response.body()) { @Override public Page<AzureFirewallInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all Azure Firewalls in a resource group. * * @param resourceGroupName The name of the resource group. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<AzureFirewallInner>> listByResourceGroupAsync(final String resourceGroupName, final ListOperationCallback<AzureFirewallInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupSinglePageAsync(resourceGroupName), new Func1<String, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all Azure Firewalls in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<Page<AzureFirewallInner>> listByResourceGroupAsync(final String resourceGroupName) { return listByResourceGroupWithServiceResponseAsync(resourceGroupName) .map(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Page<AzureFirewallInner>>() { @Override public Page<AzureFirewallInner> call(ServiceResponse<Page<AzureFirewallInner>> response) { return response.body(); } }); } /** * Lists all Azure Firewalls in a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listByResourceGroupWithServiceResponseAsync(final String resourceGroupName) { return listByResourceGroupSinglePageAsync(resourceGroupName) .concatMap(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(ServiceResponse<Page<AzureFirewallInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all Azure Firewalls in a resource group. * ServiceResponse<PageImpl<AzureFirewallInner>> * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;AzureFirewallInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2019-04-01"; return service.listByResourceGroup(resourceGroupName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<AzureFirewallInner>> result = listByResourceGroupDelegate(response); return Observable.just(new ServiceResponse<Page<AzureFirewallInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<AzureFirewallInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<AzureFirewallInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<AzureFirewallInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all the Azure Firewalls in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;AzureFirewallInner&gt; object if successful. */ public PagedList<AzureFirewallInner> list() { ServiceResponse<Page<AzureFirewallInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<AzureFirewallInner>(response.body()) { @Override public Page<AzureFirewallInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all the Azure Firewalls in a subscription. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<AzureFirewallInner>> listAsync(final ListOperationCallback<AzureFirewallInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all the Azure Firewalls in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<Page<AzureFirewallInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Page<AzureFirewallInner>>() { @Override public Page<AzureFirewallInner> call(ServiceResponse<Page<AzureFirewallInner>> response) { return response.body(); } }); } /** * Gets all the Azure Firewalls in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(ServiceResponse<Page<AzureFirewallInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all the Azure Firewalls in a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;AzureFirewallInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listSinglePageAsync() { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2019-04-01"; return service.list(this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<AzureFirewallInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<AzureFirewallInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<AzureFirewallInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<AzureFirewallInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<AzureFirewallInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Lists all Azure Firewalls in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;AzureFirewallInner&gt; object if successful. */ public PagedList<AzureFirewallInner> listByResourceGroupNext(final String nextPageLink) { ServiceResponse<Page<AzureFirewallInner>> response = listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<AzureFirewallInner>(response.body()) { @Override public Page<AzureFirewallInner> nextPage(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all Azure Firewalls in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<AzureFirewallInner>> listByResourceGroupNextAsync(final String nextPageLink, final ServiceFuture<List<AzureFirewallInner>> serviceFuture, final ListOperationCallback<AzureFirewallInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listByResourceGroupNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all Azure Firewalls in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<Page<AzureFirewallInner>> listByResourceGroupNextAsync(final String nextPageLink) { return listByResourceGroupNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Page<AzureFirewallInner>>() { @Override public Page<AzureFirewallInner> call(ServiceResponse<Page<AzureFirewallInner>> response) { return response.body(); } }); } /** * Lists all Azure Firewalls in a resource group. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listByResourceGroupNextWithServiceResponseAsync(final String nextPageLink) { return listByResourceGroupNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(ServiceResponse<Page<AzureFirewallInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all Azure Firewalls in a resource group. * ServiceResponse<PageImpl<AzureFirewallInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;AzureFirewallInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listByResourceGroupNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<AzureFirewallInner>> result = listByResourceGroupNextDelegate(response); return Observable.just(new ServiceResponse<Page<AzureFirewallInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<AzureFirewallInner>> listByResourceGroupNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<AzureFirewallInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<AzureFirewallInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Gets all the Azure Firewalls in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;AzureFirewallInner&gt; object if successful. */ public PagedList<AzureFirewallInner> listNext(final String nextPageLink) { ServiceResponse<Page<AzureFirewallInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<AzureFirewallInner>(response.body()) { @Override public Page<AzureFirewallInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Gets all the Azure Firewalls in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<AzureFirewallInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<AzureFirewallInner>> serviceFuture, final ListOperationCallback<AzureFirewallInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Gets all the Azure Firewalls in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<Page<AzureFirewallInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Page<AzureFirewallInner>>() { @Override public Page<AzureFirewallInner> call(ServiceResponse<Page<AzureFirewallInner>> response) { return response.body(); } }); } /** * Gets all the Azure Firewalls in a subscription. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;AzureFirewallInner&gt; object */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<AzureFirewallInner>>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(ServiceResponse<Page<AzureFirewallInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Gets all the Azure Firewalls in a subscription. * ServiceResponse<PageImpl<AzureFirewallInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;AzureFirewallInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<AzureFirewallInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<AzureFirewallInner>>>>() { @Override public Observable<ServiceResponse<Page<AzureFirewallInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<AzureFirewallInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<AzureFirewallInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<AzureFirewallInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<AzureFirewallInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<AzureFirewallInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
package br.odb.derelict2d; import android.app.Activity; import android.media.MediaPlayer; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.ArrayAdapter; import android.widget.Spinner; import android.widget.Toast; import br.odb.derelict.core.DerelictGame; import br.odb.derelict.core.commands.PickCommand; import br.odb.derelict.core.commands.ToggleCommand; import br.odb.derelict.core.commands.UseCommand; import br.odb.derelict.core.commands.UseWithCommand; import br.odb.gameapp.ApplicationClient; import br.odb.gameapp.ConsoleApplication; import br.odb.gameapp.GameUpdateDelegate; import br.odb.gameapp.UserCommandLineAction; import br.odb.gamerendering.rendering.AssetManager; import br.odb.gameworld.Item; import br.odb.utils.FileServerDelegate; public class ManageInventoryActivity extends Activity implements ApplicationClient, GameUpdateDelegate, OnClickListener { private DerelictGame game; private Spinner spnCollectedItems; private Spinner spnLocationItems; private Spinner spnActions; private AssetManager resManager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_manage_inventory); resManager = (( Derelict2DApplication)getApplication()).getAssetManager(); spnCollectedItems = (Spinner) findViewById( R.id.spnCollected ); spnLocationItems = (Spinner) findViewById( R.id.spnLocationItems ); spnActions = (Spinner) findViewById( R.id.spnActions ); findViewById( R.id.btnDo ).setOnClickListener( this ); game = (( Derelict2DApplication)getApplication()).game; game.setApplicationClient(this); game.printPreamble().setGameUpdateDelegate(this).showUI(); update(); } @Override public void update() { spnLocationItems.setAdapter( new ArrayAdapter<Item>( this, android.R.layout.simple_spinner_item, game.getCollectableItems() ) ); spnCollectedItems.setAdapter( new ArrayAdapter<Item>( this, android.R.layout.simple_spinner_item, game.getCollectedItems() ) ); spnActions.setAdapter( new ArrayAdapter<UserCommandLineAction>( this, android.R.layout.simple_spinner_item, game.getAvailableCommands() ) ); } @Override public void setClientId(String id) { // TODO Auto-generated method stub } @Override public void printWarning(String msg) { // TODO Auto-generated method stub } @Override public void printError(String msg) { // TODO Auto-generated method stub } @Override public void printVerbose(String msg) { // TODO Auto-generated method stub } @Override public String requestFilenameForSave() { // TODO Auto-generated method stub return null; } @Override public String requestFilenameForOpen() { // TODO Auto-generated method stub return null; } @Override public String getInput(String msg) { // TODO Auto-generated method stub return null; } @Override public int chooseOption(String question, String[] options) { // TODO Auto-generated method stub return 0; } @Override public FileServerDelegate getFileServer() { // TODO Auto-generated method stub return null; } @Override public void printNormal(String string) { // TODO Auto-generated method stub } @Override public void alert(String string) { // TODO Auto-generated method stub } @Override public void onClick(View v) { int index; Item locationItem = null; Item collectedItem = null; try { UserCommandLineAction cmd = ( ( UserCommandLineAction ) spnActions.getSelectedItem() ); index = spnLocationItems.getSelectedItemPosition(); if ( index != -1 ) { locationItem = game.getCollectableItems()[ index ]; } index = spnCollectedItems.getSelectedItemPosition(); if ( index != -1 ) { collectedItem = game.getCollectedItems()[ index ]; } String data = ""; if ( cmd instanceof PickCommand ) { data = cmd.toString() + " " + locationItem.getName(); } else if ( cmd instanceof ToggleCommand ) { data = cmd.toString() + " " + collectedItem.getName(); } else if ( cmd instanceof UseCommand ) { data = cmd.toString() + " " + collectedItem.getName(); } else if ( cmd instanceof UseWithCommand ) { data = cmd.toString() + " " + locationItem.getName() + " " + collectedItem.getName(); } else { Toast.makeText( this, "Action not supported", Toast.LENGTH_SHORT ).show(); } game.sendData( data ); Toast.makeText( this, data, Toast.LENGTH_SHORT ).show(); } catch (Exception e) { Toast.makeText( this, "Action not supported", Toast.LENGTH_SHORT ).show(); } } @Override public void playMedia(String uri, String alt) { MediaPlayer.create( this, resManager.getResIdForUri( uri ) ).start(); } @Override public void clear() { // TODO Auto-generated method stub } @Override public void sendQuit() { // TODO Auto-generated method stub } @Override public boolean isConnected() { // TODO Auto-generated method stub return true; } @Override public String openHTTP(String url) { return ConsoleApplication.defaultJavaHTTPGet( url, this ); } @Override public void shortPause() { // TODO Auto-generated method stub } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.profile; import org.apache.lucene.search.Query; import java.util.*; import java.util.concurrent.LinkedBlockingDeque; /** * This class tracks the dependency tree for queries (scoring and rewriting) and * generates {@link ProfileBreakdown} for each node in the tree. It also finalizes the tree * and returns a list of {@link ProfileResult} that can be serialized back to the client */ final class InternalProfileTree { private ArrayList<ProfileBreakdown> timings; /** Maps the Query to it's list of children. This is basically the dependency tree */ private ArrayList<ArrayList<Integer>> tree; /** A list of the original queries, keyed by index position */ private ArrayList<Query> queries; /** A list of top-level "roots". Each root can have its own tree of profiles */ private ArrayList<Integer> roots; /** Rewrite time */ private long rewriteTime; private long rewriteScratch; /** A temporary stack used to record where we are in the dependency tree. Only used by scoring queries */ private Deque<Integer> stack; private int currentToken = 0; public InternalProfileTree() { timings = new ArrayList<>(10); stack = new LinkedBlockingDeque<>(10); tree = new ArrayList<>(10); queries = new ArrayList<>(10); roots = new ArrayList<>(10); } /** * Returns a {@link ProfileBreakdown} for a scoring query. Scoring queries (e.g. those * that are past the rewrite phase and are now being wrapped by createWeight() ) follow * a recursive progression. We can track the dependency tree by a simple stack * * The only hiccup is that the first scoring query will be identical to the last rewritten * query, so we need to take special care to fix that * * @param query The scoring query we wish to profile * @return A ProfileBreakdown for this query */ public ProfileBreakdown getQueryBreakdown(Query query) { int token = currentToken; boolean stackEmpty = stack.isEmpty(); // If the stack is empty, we are a new root query if (stackEmpty) { // We couldn't find a rewritten query to attach to, so just add it as a // top-level root. This is just a precaution: it really shouldn't happen. // We would only get here if a top-level query that never rewrites for some reason. roots.add(token); // Increment the token since we are adding a new node, but notably, do not // updateParent() because this was added as a root currentToken += 1; stack.add(token); return addDependencyNode(query, token); } updateParent(token); // Increment the token since we are adding a new node currentToken += 1; stack.add(token); return addDependencyNode(query, token); } /** * Begin timing a query for a specific Timing context */ public void startRewriteTime() { assert rewriteScratch == 0; rewriteScratch = System.nanoTime(); } /** * Halt the timing process and add the elapsed rewriting time. * startRewriteTime() must be called for a particular context prior to calling * stopAndAddRewriteTime(), otherwise the elapsed time will be negative and * nonsensical * * @return The elapsed time */ public long stopAndAddRewriteTime() { long time = Math.max(1, System.nanoTime() - rewriteScratch); rewriteTime += time; rewriteScratch = 0; return time; } /** * Helper method to add a new node to the dependency tree. * * Initializes a new list in the dependency tree, saves the query and * generates a new {@link ProfileBreakdown} to track the timings * of this query * * @param query The query to profile * @param token The assigned token for this query * @return A ProfileBreakdown to profile this query */ private ProfileBreakdown addDependencyNode(Query query, int token) { // Add a new slot in the dependency tree tree.add(new ArrayList<>(5)); // Save our query for lookup later queries.add(query); ProfileBreakdown queryTimings = new ProfileBreakdown(); timings.add(token, queryTimings); return queryTimings; } /** * Removes the last (e.g. most recent) value on the stack */ public void pollLast() { stack.pollLast(); } /** * After the query has been run and profiled, we need to merge the flat timing map * with the dependency graph to build a data structure that mirrors the original * query tree * * @return a hierarchical representation of the profiled query tree */ public List<ProfileResult> getQueryTree() { ArrayList<ProfileResult> results = new ArrayList<>(5); for (Integer root : roots) { results.add(doGetQueryTree(root)); } return results; } /** * Recursive helper to finalize a node in the dependency tree * @param token The node we are currently finalizing * @return A hierarchical representation of the tree inclusive of children at this level */ private ProfileResult doGetQueryTree(int token) { Query query = queries.get(token); ProfileBreakdown breakdown = timings.get(token); Map<String, Long> timings = breakdown.toTimingMap(); List<Integer> children = tree.get(token); List<ProfileResult> childrenProfileResults = Collections.emptyList(); if (children != null) { childrenProfileResults = new ArrayList<>(children.size()); for (Integer child : children) { ProfileResult childNode = doGetQueryTree(child); childrenProfileResults.add(childNode); } } // TODO this would be better done bottom-up instead of top-down to avoid // calculating the same times over and over...but worth the effort? long nodeTime = getNodeTime(timings, childrenProfileResults); String queryDescription = query.getClass().getSimpleName(); String luceneName = query.toString(); return new ProfileResult(queryDescription, luceneName, timings, childrenProfileResults, nodeTime); } public long getRewriteTime() { return rewriteTime; } /** * Internal helper to add a child to the current parent node * * @param childToken The child to add to the current parent */ private void updateParent(int childToken) { Integer parent = stack.peekLast(); ArrayList<Integer> parentNode = tree.get(parent); parentNode.add(childToken); tree.set(parent, parentNode); } /** * Internal helper to calculate the time of a node, inclusive of children * * @param timings A map of breakdown timing for the node * @param children All children profile results at this node * @return The total time at this node, inclusive of children */ private static long getNodeTime(Map<String, Long> timings, List<ProfileResult> children) { long nodeTime = 0; for (long time : timings.values()) { nodeTime += time; } // Then add up our children for (ProfileResult child : children) { nodeTime += getNodeTime(child.getTimeBreakdown(), child.getProfiledChildren()); } return nodeTime; } }
/* * (c) Copyright 2020 EntIT Software LLC, a Micro Focus company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.abbyy.http; import io.cloudslang.content.abbyy.constants.Headers; import io.cloudslang.content.abbyy.entities.others.ExportFormat; import io.cloudslang.content.abbyy.entities.others.LocationId; import io.cloudslang.content.abbyy.entities.requests.HttpClientRequest; import io.cloudslang.content.abbyy.entities.responses.HttpClientResponse; import io.cloudslang.content.abbyy.exceptions.AbbyySdkException; import io.cloudslang.content.abbyy.exceptions.ClientSideException; import io.cloudslang.content.abbyy.entities.inputs.AbbyyInput; import io.cloudslang.content.constants.ReturnCodes; import org.apache.commons.lang3.StringUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.nio.file.Path; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; @RunWith(PowerMockRunner.class) @PrepareForTest({AbbyyApi.class, HttpClient.class}) public class AbbyyApiTest { private AbbyyApi sut; @Rule public ExpectedException exception = ExpectedException.none(); @Mock private AbbyyResponseParser responseParserMock; @Before public void setUp() throws ParserConfigurationException { this.sut = new AbbyyApi(responseParserMock); } @Test public void postRequest_httpClientCallReturns401_ClientSideException() throws Exception { //Arrange final String statusCode = "401"; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Assert this.exception.expect(ClientSideException.class); //Act this.sut.request(abbyyInput); assertEquals(statusCode, this.sut.getLastStatusCode()); } @Test public void postRequest_httpClientCallReturnsSuccess_Success() throws Exception { //Arrange final String statusCode = "200"; final String url = "url"; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Act this.sut.request(abbyyInput); //Assert verify(this.responseParserMock).parseResponse(responseMock); assertEquals(statusCode, this.sut.getLastStatusCode()); } @Test public void getTaskStatus_httpClientCallSucceeds_Success() throws Exception { //Arrange final String statusCode = "203"; final String taskId = "taskid"; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Act this.sut.getTaskStatus(abbyyInput, taskId); //Assert verify(responseParserMock).parseResponse(responseMock); assertEquals(statusCode, this.sut.getLastStatusCode()); } @Test public void getResult_httpClientCallNotOk_AbbyySdkException() throws Exception { //Arrange final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final String downloadPath = null; final boolean useSpecificCharSet = false; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn((short) 0); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Assert this.exception.expect(AbbyySdkException.class); //Act this.sut.getResult(abbyyInput, resultUrl, exportFormat, downloadPath, useSpecificCharSet); } @Test public void getResult_httpClientCallSucceeds_Success() throws Exception { //Arrange final String statusCode = "200"; final String expectedReturnResult = "expected"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final String downloadPath = null; final boolean useSpecificCharSet = false; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); when(responseMock.getReturnResult()).thenReturn(expectedReturnResult); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Act String returnResult = this.sut.getResult(abbyyInput, resultUrl, exportFormat, downloadPath, useSpecificCharSet); //Assert assertEquals(expectedReturnResult, returnResult); assertEquals(statusCode, this.sut.getLastStatusCode()); } @Test public void getResultSize_httpClientCallIsNotOk_AbbyySdkException() throws Exception { //Arrange final String statusCode = "0"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); try { //Act this.sut.getResultSize(abbyyInput, resultUrl, exportFormat); //Assert fail(); } catch (AbbyySdkException ex) { assertEquals(statusCode, this.sut.getLastStatusCode()); } } @Test public void getResultSize_contentLengthHeaderIsMissing_AbbyySdkException() throws Exception { //Arrange final String statusCode = "200"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); Properties responseHeaders = mock(Properties.class); when(responseHeaders.getProperty(eq(Headers.CONTENT_LENGTH))).thenReturn(StringUtils.EMPTY); when(responseMock.getResponseHeaders()).thenReturn(responseHeaders); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); try { //Act this.sut.getResultSize(abbyyInput, resultUrl, exportFormat); //Assert fail(); } catch (AbbyySdkException ex) { assertEquals(statusCode, this.sut.getLastStatusCode()); } } @Test public void getResultSize_httpClientCallSucceeds_Success() throws Exception { //Arrange final String statusCode = "200"; final String expectedSize = "123"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); Properties responseHeaders = mock(Properties.class); when(responseHeaders.getProperty(eq(Headers.CONTENT_LENGTH))).thenReturn(expectedSize); when(responseMock.getResponseHeaders()).thenReturn(responseHeaders); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Act long size = this.sut.getResultSize(abbyyInput, resultUrl, exportFormat); //Assert assertEquals(expectedSize, String.valueOf(size)); assertEquals(statusCode, this.sut.getLastStatusCode()); } @Test public void getResultChunk_startByteIndexIsNegative_IllegalArgumentException() throws Exception { //Arrange final AbbyyInput abbyyInput = mock(AbbyyInput.class); final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final int startByteIndex = -1; final int endByteIndex = startByteIndex + 1; //Assert this.exception.expect(IllegalArgumentException.class); //Act this.sut.getResultChunk(abbyyInput, resultUrl, exportFormat, startByteIndex, endByteIndex); } @Test public void getResultChunk_endByteIndexIsNegative_IllegalArgumentException() throws Exception { //Arrange final AbbyyInput abbyyInput = mock(AbbyyInput.class); final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final int startByteIndex = 0; final int endByteIndex = -1; //Assert this.exception.expect(IllegalArgumentException.class); //Act this.sut.getResultChunk(abbyyInput, resultUrl, exportFormat, startByteIndex, endByteIndex); } @Test public void getResultChunk_illegalInterval_IllegalArgumentException() throws Exception { //Arrange final AbbyyInput abbyyInput = mock(AbbyyInput.class); final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final int startByteIndex = 2; final int endByteIndex = startByteIndex - 1; //Assert this.exception.expect(IllegalArgumentException.class); //Act this.sut.getResultChunk(abbyyInput, resultUrl, exportFormat, startByteIndex, endByteIndex); } @Test public void getResultChunk_httpClientCallIsNotOk_AbbyySdkException() throws Exception { //Arrange final String statusCode = "0"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final int startByteIndex = 2; final int endByteIndex = startByteIndex + 1; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); try { //Act this.sut.getResultChunk(abbyyInput, resultUrl, exportFormat, startByteIndex, endByteIndex); //Assert fail(); } catch (AbbyySdkException ex) { assertEquals(statusCode, this.sut.getLastStatusCode()); } } @Test public void getResultChunk_httpClientCallSucceeds_Success() throws Exception { //Arrange final String statusCode = "206"; final String expectedReturnResult = "expected"; final String resultUrl = "resultUrl"; final ExportFormat exportFormat = ExportFormat.XML; final int startByteIndex = 2; final int endByteIndex = startByteIndex + 1; final AbbyyInput abbyyInput = mock(AbbyyInput.class); when(abbyyInput.getLocationId()).thenReturn(LocationId.EU); when(abbyyInput.getSourceFile()).thenReturn(mock(Path.class)); HttpClientRequest.Builder builderSpy = new HttpClientRequest.Builder(); PowerMockito.spy(builderSpy); HttpClientResponse responseMock = mock(HttpClientResponse.class); when(responseMock.getReturnCode()).thenReturn(ReturnCodes.SUCCESS); when(responseMock.getStatusCode()).thenReturn(Short.parseShort(statusCode)); when(responseMock.getReturnResult()).thenReturn(expectedReturnResult); PowerMockito.mockStatic(HttpClient.class); PowerMockito.when(HttpClient.class, "execute", any(HttpClientRequest.class)).thenReturn(responseMock); //Act String returnResult = this.sut.getResultChunk(abbyyInput, resultUrl, exportFormat, startByteIndex, endByteIndex); //Assert assertEquals(expectedReturnResult, returnResult); assertEquals(statusCode, this.sut.getLastStatusCode()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.iterate; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkPositionIndex; import java.io.IOException; import java.sql.SQLException; import java.util.Comparator; import java.util.List; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.phoenix.execute.DescVarLengthFastByteComparisons; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.expression.OrderByExpression; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.tuple.Tuple; import org.apache.phoenix.util.ServerUtil; import org.apache.phoenix.util.SizedUtil; import com.google.common.base.Function; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; /** * Result scanner that sorts aggregated rows by columns specified in the ORDER BY clause. * <p> * Note that currently the sort is entirely done in memory. * * * @since 0.1 */ public class OrderedResultIterator implements PeekingResultIterator { /** A container that holds pointers to a {@link Result} and its sort keys. */ protected static class ResultEntry { protected final ImmutableBytesWritable[] sortKeys; protected final Tuple result; ResultEntry(ImmutableBytesWritable[] sortKeys, Tuple result) { this.sortKeys = sortKeys; this.result = result; } ImmutableBytesWritable getSortKey(int index) { checkPositionIndex(index, sortKeys.length); return sortKeys[index]; } Tuple getResult() { return result; } } /** A function that returns Nth key for a given {@link ResultEntry}. */ private static class NthKey implements Function<ResultEntry, ImmutableBytesWritable> { private final int index; NthKey(int index) { this.index = index; } @Override public ImmutableBytesWritable apply(ResultEntry entry) { return entry.getSortKey(index); } } /** Returns the expression of a given {@link OrderByExpression}. */ private static final Function<OrderByExpression, Expression> TO_EXPRESSION = new Function<OrderByExpression, Expression>() { @Override public Expression apply(OrderByExpression column) { return column.getExpression(); } }; private final int thresholdBytes; private final Integer limit; private final Integer offset; private final ResultIterator delegate; private final List<OrderByExpression> orderByExpressions; private final long estimatedByteSize; private PeekingResultIterator resultIterator; private long byteSize; protected ResultIterator getDelegate() { return delegate; } public OrderedResultIterator(ResultIterator delegate, List<OrderByExpression> orderByExpressions, int thresholdBytes, Integer limit, Integer offset) { this(delegate, orderByExpressions, thresholdBytes, limit, offset, 0); } public OrderedResultIterator(ResultIterator delegate, List<OrderByExpression> orderByExpressions, int thresholdBytes) throws SQLException { this(delegate, orderByExpressions, thresholdBytes, null, null); } public OrderedResultIterator(ResultIterator delegate, List<OrderByExpression> orderByExpressions, int thresholdBytes, Integer limit, Integer offset,int estimatedRowSize) { checkArgument(!orderByExpressions.isEmpty()); this.delegate = delegate; this.orderByExpressions = orderByExpressions; this.thresholdBytes = thresholdBytes; this.offset = offset == null ? 0 : offset; if (limit != null) { this.limit = limit + this.offset; } else { this.limit = null; } long estimatedEntrySize = // ResultEntry SizedUtil.OBJECT_SIZE + // ImmutableBytesWritable[] SizedUtil.ARRAY_SIZE + orderByExpressions.size() * SizedUtil.IMMUTABLE_BYTES_WRITABLE_SIZE + // Tuple SizedUtil.OBJECT_SIZE + estimatedRowSize; // Make sure we don't overflow Long, though this is really unlikely to happen. assert(limit == null || Long.MAX_VALUE / estimatedEntrySize >= limit + this.offset); this.estimatedByteSize = limit == null ? 0 : (limit + this.offset) * estimatedEntrySize; } public Integer getLimit() { return limit; } public long getEstimatedByteSize() { return estimatedByteSize; } public long getByteSize() { return byteSize; } /** * Builds a comparator from the list of columns in ORDER BY clause. * @param orderByExpressions the columns in ORDER BY clause. * @return the comparator built from the list of columns in ORDER BY clause. */ // ImmutableBytesWritable.Comparator doesn't implement generics @SuppressWarnings("unchecked") private static Comparator<ResultEntry> buildComparator(List<OrderByExpression> orderByExpressions) { Ordering<ResultEntry> ordering = null; int pos = 0; for (OrderByExpression col : orderByExpressions) { Expression e = col.getExpression(); Comparator<ImmutableBytesWritable> comparator = e.getSortOrder() == SortOrder.DESC && !e.getDataType().isFixedWidth() ? buildDescVarLengthComparator() : new ImmutableBytesWritable.Comparator(); Ordering<ImmutableBytesWritable> o = Ordering.from(comparator); if(!col.isAscending()) o = o.reverse(); o = col.isNullsLast() ? o.nullsLast() : o.nullsFirst(); Ordering<ResultEntry> entryOrdering = o.onResultOf(new NthKey(pos++)); ordering = ordering == null ? entryOrdering : ordering.compound(entryOrdering); } return ordering; } /* * Same as regular comparator, but if all the bytes match and the length is * different, returns the longer length as bigger. */ private static Comparator<ImmutableBytesWritable> buildDescVarLengthComparator() { return new Comparator<ImmutableBytesWritable>() { @Override public int compare(ImmutableBytesWritable o1, ImmutableBytesWritable o2) { return DescVarLengthFastByteComparisons.compareTo( o1.get(), o1.getOffset(), o1.getLength(), o2.get(), o2.getOffset(), o2.getLength()); } }; } @Override public Tuple next() throws SQLException { return getResultIterator().next(); } private PeekingResultIterator getResultIterator() throws SQLException { if (resultIterator != null) { return resultIterator; } final int numSortKeys = orderByExpressions.size(); List<Expression> expressions = Lists.newArrayList(Collections2.transform(orderByExpressions, TO_EXPRESSION)); final Comparator<ResultEntry> comparator = buildComparator(orderByExpressions); try{ final MappedByteBufferSortedQueue queueEntries = new MappedByteBufferSortedQueue(comparator, limit, thresholdBytes); resultIterator = new PeekingResultIterator() { int count = 0; @Override public Tuple next() throws SQLException { ResultEntry entry = queueEntries.poll(); while (entry != null && offset != null && count < offset) { count++; if (entry.getResult() == null) { return null; } entry = queueEntries.poll(); } if (entry == null || (limit != null && count++ > limit)) { resultIterator.close(); resultIterator = PeekingResultIterator.EMPTY_ITERATOR; return null; } return entry.getResult(); } @Override public Tuple peek() throws SQLException { ResultEntry entry = queueEntries.peek(); while (entry != null && offset != null && count < offset) { entry = queueEntries.poll(); count++; if (entry == null) { return null; } } if (limit != null && count > limit) { return null; } entry = queueEntries.peek(); if (entry == null) { return null; } return entry.getResult(); } @Override public void explain(List<String> planSteps) { } @Override public void close() throws SQLException { queueEntries.close(); } }; for (Tuple result = delegate.next(); result != null; result = delegate.next()) { int pos = 0; ImmutableBytesWritable[] sortKeys = new ImmutableBytesWritable[numSortKeys]; for (Expression expression : expressions) { final ImmutableBytesWritable sortKey = new ImmutableBytesWritable(); boolean evaluated = expression.evaluate(result, sortKey); // set the sort key that failed to get evaluated with null sortKeys[pos++] = evaluated && sortKey.getLength() > 0 ? sortKey : null; } queueEntries.add(new ResultEntry(sortKeys, result)); } this.byteSize = queueEntries.getByteSize(); } catch (IOException e) { ServerUtil.createIOException(e.getMessage(), e); } finally { delegate.close(); } return resultIterator; } @Override public Tuple peek() throws SQLException { return getResultIterator().peek(); } @Override public void close() throws SQLException { // Guard against resultIterator being null if (null != resultIterator) { resultIterator.close(); } resultIterator = PeekingResultIterator.EMPTY_ITERATOR; } @Override public void explain(List<String> planSteps) { delegate.explain(planSteps); planSteps.add("CLIENT" + (offset == null || offset == 0 ? "" : " OFFSET " + offset) + (limit == null ? "" : " TOP " + limit + " ROW" + (limit == 1 ? "" : "S")) + " SORTED BY " + orderByExpressions.toString()); } @Override public String toString() { return "OrderedResultIterator [thresholdBytes=" + thresholdBytes + ", limit=" + limit + ", offset=" + offset + ", delegate=" + delegate + ", orderByExpressions=" + orderByExpressions + ", estimatedByteSize=" + estimatedByteSize + ", resultIterator=" + resultIterator + ", byteSize=" + byteSize + "]"; } }