gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.arrah.framework.profile; /*********************************************** * Copyright to Vivek Kumar Singh * * * * Any part of code or file can be changed, * * redistributed, modified with the copyright * * information intact * * * * Author$ : Vivek Singh * * * ***********************************************/ /* * This is Table which shows profiler data * for all tables and columns in Table structure * */ import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Enumeration; import java.util.Vector; import org.arrah.framework.ndtable.ReportTableModel; import org.arrah.framework.rdbms.QueryBuilder; import org.arrah.framework.rdbms.Rdbms_conn; public class AllTableProfile { private ReportTableModel __rt = new ReportTableModel(new String[] { "Table", "Column", "Record", "Unique", "Pattern", "Null", "Zero", "Empty","Max","Min" }); // it is extended from Panel public AllTableProfile() { try { createRows(); } catch (SQLException e) { // Handle exception System.out.println("\n SQL exception in Data Info"); System.out.println(e.getMessage()); } } private void createRows() throws SQLException { Vector<String> vc = new Vector<String>(); // For table // Open the connection Rdbms_conn.openConn(); DatabaseMetaData dbmd = Rdbms_conn.getMetaData(); // Get the metaDataSet String s_pattern = Rdbms_conn.getHValue("Database_SchemaPattern"); String t_pattern = Rdbms_conn.getHValue("Database_TablePattern"); String t_type = Rdbms_conn.getHValue("Database_TableType"); String n_catalog = Rdbms_conn.getHValue("Database_Catalog"); n_catalog = ""; // Oracle ODBC does not support String d_dsn = Rdbms_conn.getHValue("Database_DSN"); // Add table names here // TODO - need to make it multi threaded // Get tables ResultSet rs = dbmd.getTables( n_catalog.compareTo("") == 0 ? n_catalog = null : n_catalog, s_pattern.compareTo("") == 0 ? s_pattern = null : s_pattern, t_pattern.compareTo("") == 0 ? t_pattern = null : t_pattern, t_type.split(",")); String tbl = ""; while (rs.next()) { tbl = rs.getString(3); vc.add(tbl); } // MS SQL Does not support multiple query rs.close(); // Add count to top String dbType = Rdbms_conn.getDBType(); ResultSet rs_col; Vector<String> vc_c; QueryBuilder c_prof; String all_c, dist_c, null_c, zero_c, empty_c, pattern_c,top_sel_query_c,bot_sel_query_c; String all_v, dist_v, null_v, zero_v, empty_v, pattern_v,top_sel_query_v,bot_sel_query_v; Enumeration<String> e = vc.elements(); while (e.hasMoreElements()) { tbl = (String) e.nextElement(); vc_c = new Vector<String>(); // For columns rs_col = dbmd.getColumns(n_catalog, s_pattern, tbl, null); while (rs_col.next()) { String tmp = rs_col.getString(4); vc_c.add(tmp); } rs_col.close(); // In loop for all the children of table node Enumeration<String> et = vc_c.elements(); while (et.hasMoreElements()) { all_v = "0"; dist_v = "0"; null_v = "0"; zero_v = "0"; empty_v = "0"; pattern_v = "0"; top_sel_query_v = ""; bot_sel_query_v = ""; String col = (String) et.nextElement(); c_prof = new QueryBuilder(d_dsn, tbl, col, dbType); all_c = c_prof.count_query_w(false, "row_count"); dist_c = c_prof.count_query_w(true, "row_count"); null_c = c_prof.get_nullCount_query_w("Null"); zero_c = c_prof.get_zeroCount_query_w("0"); empty_c = c_prof.get_zeroCount_query_w("''"); pattern_c = c_prof.get_pattern_query(); top_sel_query_c = c_prof.top_query(false,"top_count", "1"); bot_sel_query_c = c_prof.bottom_query(false,"bot_count", "1"); try { rs_col = Rdbms_conn.runQuery(all_c); while (rs_col.next()) { all_v = rs_col.getString("row_count"); } rs_col.close(); } catch (SQLException s_exp) { all_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(dist_c); while (rs_col.next()) { dist_v = rs_col.getString("row_count"); } rs_col.close(); } catch (SQLException s_exp) { dist_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(null_c); while (rs_col.next()) { null_v = rs_col.getString("equal_count"); } rs_col.close(); } catch (SQLException s_exp) { null_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(zero_c); while (rs_col.next()) { zero_v = rs_col.getString("equal_count"); } rs_col.close(); } catch (SQLException s_exp) { zero_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(empty_c); while (rs_col.next()) { empty_v = rs_col.getString("equal_count"); } rs_col.close(); } catch (SQLException s_exp) { empty_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(pattern_c); while (rs_col.next()) { pattern_v = rs_col.getString("row_count"); } rs_col.close(); } catch (SQLException s_exp) { pattern_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(top_sel_query_c); while (rs_col.next()) { top_sel_query_v = rs_col.getString("top_count"); } rs_col.close(); } catch (SQLException s_exp) { top_sel_query_v = "N/A"; } try { rs_col = Rdbms_conn.runQuery(bot_sel_query_c); while (rs_col.next()) { bot_sel_query_v = rs_col.getString("bot_count"); } rs_col.close(); } catch (SQLException s_exp) { bot_sel_query_v = "N/A"; } String[] row = { tbl, col, all_v, dist_v, pattern_v, null_v, zero_v, empty_v,top_sel_query_v,bot_sel_query_v}; __rt.addFillRow(row); } // Close column } // Close table Rdbms_conn.closeConn(); } public ReportTableModel getTable() { return __rt; } }
package facebook.models; import java.io.IOException; import java.util.Collection; import org.apache.http.ParseException; import facebook.utilities.FriendUtilities; import json.JSONException; import json.JSONObject; /** * * @author Ben Holland */ public class FacebookAccount { private long id = ((long)-1); private String name = ""; private String firstName = ""; private String lastName = ""; private String username = ""; private String link = ""; private String gender = ""; private String locale = ""; // lazy loading private boolean loaded = false; public FacebookAccount(long id) throws IOException { this.id = id; loadByID(id); loaded = true; } public FacebookAccount(long id, boolean lazyLoad) throws IOException { this.id = id; if(!lazyLoad){ loadByID(id); loaded = true; } } public FacebookAccount(String username) throws IOException { this.username = username; loadByUsername(username); loaded = true; } public FacebookAccount(String username, boolean lazyLoad) throws IOException { this.username = username; if(!lazyLoad){ loadByUsername(username); loaded = true; } } private void loadByID(long id) throws IOException{ String name = ""; String firstName = ""; String lastName = ""; String username = ""; String link = ""; String gender = ""; String locale = ""; String response = http.HttpInterface.get("https://graph.facebook.com/" + id); try { JSONObject json = new JSONObject(response); name = ((String)json.get("name")); firstName = ((String)json.get("first_name")); lastName = ((String)json.get("last_name")); gender = ((String)json.get("gender")); locale = ((String)json.get("locale")); // at this point we don't know if this account has a vanity name, // so make the assumption that it does, and if we break something // then default to the assumption that it doesn't try { // try to get the account vanity name username = ((String)json.get("username")); } catch (Exception ex){ // no vanity name, get account link username = ""; link = ((String)json.get("link")); } } catch(JSONException e){ throw new IllegalArgumentException("Could not retrieve account details, check identifier."); } this.id = id; this.name = name; this.firstName = firstName; this.lastName = lastName; this.gender = gender; this.locale = locale; this.username = username; this.link = link; } public void loadByUsername(String username) throws IOException { long id = 0; String name = ""; String firstName = ""; String lastName = ""; String gender = ""; String locale = ""; String link = ""; String response = http.HttpInterface.get("https://graph.facebook.com/" + username); try { JSONObject json = new JSONObject(response); id = Long.parseLong(((String)json.get("id"))); name = ((String)json.get("name")); firstName = ((String)json.get("first_name")); lastName = ((String)json.get("last_name")); gender = ((String)json.get("gender")); locale = ((String)json.get("locale")); username = ((String)json.get("username")); } catch(JSONException e){ throw new IllegalArgumentException("Could not retrieve account details, check identifier."); } this.username = username; this.id = id; this.name = name; this.firstName = firstName; this.lastName = lastName; this.gender = gender; this.locale = locale; this.link = link; } public void load() throws IOException { if(!loaded){ if(id == ((long)-1)){ loadByUsername(username); } else { loadByID(id); } } } public long getID() throws IOException { if(!loaded) load(); return id; } public String getName() throws IOException { if(!loaded) load(); return name; } public String getFirstName() throws IOException { if(!loaded) load(); return firstName; } public String getLastName() throws IOException { if(!loaded) load(); return lastName; } public String getUsername() throws IOException { if(!loaded) load(); return username; } public boolean hasVanityUsername() throws IOException { if(!loaded) load(); return username.equals(""); } public String getLink() throws IOException { if(!loaded) load(); return link.equals("") ? ("http://www.facebook.com/" + username) : link; } public String getMobileLink() throws IOException { if(!loaded) load(); return link.equals("") ? ("http://m.facebook.com/" + username) : link.replace("http://www.facebook.com", "http://m.facebook.com"); } public String getGender() throws IOException { if(!loaded) load(); return gender; } public String getLocale() throws IOException { if(!loaded) load(); return locale; } public String getCurrentProfilePicture() throws IOException { if(!loaded) load(); return "http://graph.facebook.com/" + id + "/picture?type=normal"; } public String getCurrentProfilePicture(String type) throws IllegalArgumentException, IOException { if(!loaded) load(); // Supported types: small, normal, large, square if(type.equalsIgnoreCase("small") || type.equalsIgnoreCase("normal") || type.equalsIgnoreCase("large") || type.equalsIgnoreCase("square")){ return "http://graph.facebook.com/" + id + "/picture?type=" + type; } else { throw new IllegalArgumentException("Supported types: small, normal, large, square"); } } @Override public String toString(){ if(loaded){ try { return "[" + name + ", " + id + ", " + gender + ", " + getLink() + "]"; } catch (Exception e){ return "[" + name + ", " + id + ", " + gender + "]"; } } else { try { load(); try { return "[" + name + ", " + id + ", " + gender + ", " + getLink() + "]"; } catch (Exception e){ return "[" + name + ", " + id + ", " + gender + "]"; } } catch (Exception e){ if(id == ((long)-1)){ return "[" + username + "]"; } else { return "[" + id + "]"; } } } } public Collection<FacebookAccount> getFriends(AuthenticatedFacebookAccount account) throws ParseException, IOException { return FriendUtilities.getFriendsAccounts(getID(), account); } public Collection<FacebookAccount> getFriends(AuthenticatedFacebookAccount account, boolean logging) throws ParseException, IOException { System.out.println("Getting friends of " + getID()); return FriendUtilities.getFriendsAccounts(getID(), account, logging); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.enrich; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.Semaphore; import static org.elasticsearch.xpack.core.ClientHelper.ENRICH_ORIGIN; public class EnrichPolicyMaintenanceService implements LocalNodeMasterListener { private static final Logger logger = LogManager.getLogger(EnrichPolicyMaintenanceService.class); private static final String MAPPING_POLICY_FIELD_PATH = "_meta." + EnrichPolicyRunner.ENRICH_POLICY_NAME_FIELD_NAME; private static final IndicesOptions IGNORE_UNAVAILABLE = IndicesOptions.fromOptions(true, false, false, false); private final Settings settings; private final Client client; private final ClusterService clusterService; private final ThreadPool threadPool; private final EnrichPolicyLocks enrichPolicyLocks; private volatile boolean isMaster = false; private volatile Scheduler.Cancellable cancellable; private final Semaphore maintenanceLock = new Semaphore(1); EnrichPolicyMaintenanceService( Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool, EnrichPolicyLocks enrichPolicyLocks ) { this.settings = settings; this.client = new OriginSettingClient(client, ENRICH_ORIGIN); this.clusterService = clusterService; this.threadPool = threadPool; this.enrichPolicyLocks = enrichPolicyLocks; } void initialize() { clusterService.addLocalNodeMasterListener(this); } @Override public void onMaster() { if (cancellable == null || cancellable.isCancelled()) { isMaster = true; scheduleNext(); clusterService.addLifecycleListener(new LifecycleListener() { @Override public void beforeStop() { offMaster(); } }); } } @Override public void offMaster() { if (cancellable != null && cancellable.isCancelled() == false) { isMaster = false; cancellable.cancel(); } } private void scheduleNext() { if (isMaster) { try { TimeValue waitTime = EnrichPlugin.ENRICH_CLEANUP_PERIOD.get(settings); cancellable = threadPool.schedule(this::execute, waitTime, ThreadPool.Names.GENERIC); } catch (EsRejectedExecutionException e) { if (e.isExecutorShutdown()) { logger.debug("Failed to schedule next [enrich] maintenance task; Shutting down", e); } else { throw e; } } } else { logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task"); } } private void execute() { logger.debug("Triggering scheduled [enrich] maintenance task"); if (isMaster) { maybeCleanUpEnrichIndices(); scheduleNext(); } else { logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task"); } } private void maybeCleanUpEnrichIndices() { if (maintenanceLock.tryAcquire()) { cleanUpEnrichIndices(); } else { logger.debug("Previous [enrich] maintenance task still in progress; Skipping this execution"); } } void concludeMaintenance() { maintenanceLock.release(); } void cleanUpEnrichIndices() { final Map<String, EnrichPolicy> policies = EnrichStore.getPolicies(clusterService.state()); GetIndexRequest indices = new GetIndexRequest().indices(EnrichPolicy.ENRICH_INDEX_NAME_BASE + "*") .indicesOptions(IndicesOptions.lenientExpand()); // Check that no enrich policies are being executed final EnrichPolicyLocks.EnrichPolicyExecutionState executionState = enrichPolicyLocks.captureExecutionState(); if (executionState.isAnyPolicyInFlight() == false) { client.admin().indices().getIndex(indices, new ActionListener<>() { @Override public void onResponse(GetIndexResponse getIndexResponse) { // Ensure that no enrich policy executions started while we were retrieving the snapshot of index data // If executions were kicked off, we can't be sure that the indices we are about to process are a // stable state of the system (they could be new indices created by a policy that hasn't been published yet). if (enrichPolicyLocks.isSameState(executionState)) { String[] removeIndices = Arrays.stream(getIndexResponse.getIndices()) .filter(indexName -> shouldRemoveIndex(getIndexResponse, policies, indexName)) .toArray(String[]::new); deleteIndices(removeIndices); } else { logger.debug("Skipping enrich index cleanup since enrich policy was executed while gathering indices"); concludeMaintenance(); } } @Override public void onFailure(Exception e) { logger.error("Failed to get indices during enrich index maintenance task", e); concludeMaintenance(); } }); } else { concludeMaintenance(); } } private boolean shouldRemoveIndex(GetIndexResponse getIndexResponse, Map<String, EnrichPolicy> policies, String indexName) { // Find the policy on the index logger.debug("Checking if should remove enrich index [{}]", indexName); MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName); Map<String, Object> mapping = mappingMetadata.getSourceAsMap(); String policyName = ObjectPath.eval(MAPPING_POLICY_FIELD_PATH, mapping); // Check if index has a corresponding policy if (policyName == null || policies.containsKey(policyName) == false) { // No corresponding policy. Index should be marked for removal. logger.debug("Enrich index [{}] does not correspond to any existing policy. Found policy name [{}]", indexName, policyName); return true; } // Check if index is currently linked to an alias final String aliasName = EnrichPolicy.getBaseName(policyName); List<AliasMetadata> aliasMetadata = getIndexResponse.aliases().get(indexName); if (aliasMetadata == null) { logger.debug("Enrich index [{}] is not marked as a live index since it has no alias information", indexName); return true; } boolean hasAlias = aliasMetadata.stream().anyMatch((am -> am.getAlias().equals(aliasName))); // Index is not currently published to the enrich alias. Should be marked for removal. if (hasAlias == false) { logger.debug("Enrich index [{}] is not marked as a live index since it lacks the alias [{}]", indexName, aliasName); return true; } logger.debug( "Enrich index [{}] was spared since it is associated with the valid policy [{}] and references alias [{}]", indexName, policyName, aliasName ); return false; } private void deleteIndices(String[] removeIndices) { if (removeIndices.length != 0) { DeleteIndexRequest deleteIndices = new DeleteIndexRequest().indices(removeIndices).indicesOptions(IGNORE_UNAVAILABLE); client.admin().indices().delete(deleteIndices, new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { logger.debug("Completed deletion of stale enrich indices [{}]", () -> Arrays.toString(removeIndices)); concludeMaintenance(); } @Override public void onFailure(Exception e) { logger.error( () -> "Enrich maintenance task could not delete abandoned enrich indices [" + Arrays.toString(removeIndices) + "]", e ); concludeMaintenance(); } }); } else { concludeMaintenance(); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.utils; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.encoding.EncodingRegistry; import com.intellij.psi.impl.source.parsing.xml.XmlBuilder; import com.intellij.psi.impl.source.parsing.xml.XmlBuilderDriver; import consulo.localize.LocalizeValue; import org.jdom.Element; import org.jdom.IllegalNameException; import javax.annotation.Nullable; import java.io.IOException; import java.util.*; public class MavenJDOMUtil { @Nullable public static Element read(final VirtualFile file, @Nullable final ErrorHandler handler) { String text; if(!file.isValid()) { return null; } try { text = ReadAction.compute(() -> VfsUtil.loadText(file)); } catch(IOException e) { if(handler != null) { handler.onReadError(e); } return null; } return doRead(text, handler); } @Nullable public static Element read(byte[] bytes, @Nullable ErrorHandler handler) { return doRead(CharsetToolkit.bytesToString(bytes, EncodingRegistry.getInstance().getDefaultCharset()), handler); } @Nullable private static Element doRead(String text, final ErrorHandler handler) { final LinkedList<Element> stack = new LinkedList<Element>(); final Element[] result = {null}; XmlBuilderDriver driver = new XmlBuilderDriver(text); XmlBuilder builder = new XmlBuilder() { public void doctype(@Nullable CharSequence publicId, @Nullable CharSequence systemId, int startOffset, int endOffset) { } public ProcessingOrder startTag(CharSequence localName, String namespace, int startoffset, int endoffset, int headerEndOffset) { String name = localName.toString(); if(StringUtil.isEmptyOrSpaces(name)) { return ProcessingOrder.TAGS; } Element newElement; try { newElement = new Element(name); } catch(IllegalNameException e) { newElement = new Element("invalidName"); } Element parent = stack.isEmpty() ? null : stack.getLast(); if(parent == null) { result[0] = newElement; } else { parent.addContent(newElement); } stack.addLast(newElement); return ProcessingOrder.TAGS_AND_TEXTS; } public void endTag(CharSequence localName, String namespace, int startoffset, int endoffset) { String name = localName.toString(); if(StringUtil.isEmptyOrSpaces(name)) { return; } for(Iterator<Element> itr = stack.descendingIterator(); itr.hasNext(); ) { Element element = itr.next(); if(element.getName().equals(name)) { while(stack.removeLast() != element) { } break; } } } public void textElement(CharSequence text, CharSequence physical, int startoffset, int endoffset) { stack.getLast().addContent(JDOMUtil.legalizeText(text.toString())); } public void attribute(CharSequence name, CharSequence value, int startoffset, int endoffset) { } public void entityRef(CharSequence ref, int startOffset, int endOffset) { } public void error(LocalizeValue message, int startOffset, int endOffset) { if(handler != null) { handler.onSyntaxError(); } } }; driver.build(builder); return result[0]; } @Nullable public static Element findChildByPath(@Nullable Element element, String path) { int i = 0; while(element != null) { int dot = path.indexOf('.', i); if(dot == -1) { return element.getChild(path.substring(i)); } element = element.getChild(path.substring(i, dot)); i = dot + 1; } return null; } public static String findChildValueByPath(@Nullable Element element, String path, String defaultValue) { Element child = findChildByPath(element, path); if(child == null) { return defaultValue; } String childValue = child.getTextTrim(); return childValue.isEmpty() ? defaultValue : childValue; } public static String findChildValueByPath(@Nullable Element element, String path) { return findChildValueByPath(element, path, null); } public static boolean hasChildByPath(@Nullable Element element, String path) { return findChildByPath(element, path) != null; } public static List<Element> findChildrenByPath(@Nullable Element element, String path, String subPath) { return collectChildren(findChildByPath(element, path), subPath); } public static List<String> findChildrenValuesByPath(@Nullable Element element, String path, String childrenName) { List<String> result = new ArrayList<String>(); for(Element each : findChildrenByPath(element, path, childrenName)) { String value = each.getTextTrim(); if(!value.isEmpty()) { result.add(value); } } return result; } private static List<Element> collectChildren(@Nullable Element container, String subPath) { if(container == null) { return Collections.emptyList(); } int firstDot = subPath.indexOf('.'); if(firstDot == -1) { //noinspection unchecked return (List<Element>) container.getChildren(subPath); } String childName = subPath.substring(0, firstDot); String pathInChild = subPath.substring(firstDot + 1); List<Element> result = new ArrayList<Element>(); //noinspection unchecked for(Element each : (Iterable<? extends Element>) container.getChildren(childName)) { Element child = findChildByPath(each, pathInChild); if(child != null) { result.add(child); } } return result; } public interface ErrorHandler { void onReadError(IOException e); void onSyntaxError(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.cli; import com.google.common.collect.ImmutableList; import io.trino.client.ClientTypeSignature; import io.trino.client.Column; import org.testng.annotations.Test; import java.io.StringWriter; import java.util.Arrays; import java.util.List; import java.util.Map; import static io.trino.client.ClientStandardTypes.ARRAY; import static io.trino.client.ClientStandardTypes.BIGINT; import static io.trino.client.ClientStandardTypes.MAP; import static io.trino.client.ClientStandardTypes.VARBINARY; import static io.trino.client.ClientStandardTypes.VARCHAR; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Arrays.asList; import static java.util.stream.Collectors.toMap; import static org.testng.Assert.assertEquals; public class TestAlignedTablePrinter { @Test public void testAlignedPrinting() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("first", VARCHAR)) .add(column("last", VARCHAR)) .add(column("quantity", BIGINT)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); printer.printRows(rows( row("hello", "world", 123), row("a", null, 4.5), row("b", null, null), row("some long\ntext that\ndoes not\nfit on\none line", "more\ntext", 4567), row("bye", "done", -15)), true); printer.finish(); String expected = "" + " first | last | quantity \n" + "-----------+-------+----------\n" + " hello | world | 123 \n" + " a | NULL | 4.5 \n" + " b | NULL | NULL \n" + " some long+| more +| 4567 \n" + " text that+| text | \n" + " does not +| | \n" + " fit on +| | \n" + " one line | | \n" + " bye | done | -15 \n" + "(5 rows)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testHexPrintingInLists() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("list", ARRAY)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); byte[] value = "hello".getBytes(UTF_8); printer.printRows(rows(row(list(value))), true); printer.finish(); String expected = "" + " list \n" + "------------------\n" + " [68 65 6c 6c 6f] \n" + "(1 row)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testHexPrintingInMaps() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("map", MAP)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); byte[] value = "hello".getBytes(UTF_8); printer.printRows(rows(row(map(item("key", value), item("key2", value)))), true); printer.finish(); String expected = "" + " map \n" + "-------------------------------------------\n" + " {key2=68 65 6c 6c 6f, key=68 65 6c 6c 6f} \n" + "(1 row)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testHexPrintingInMapKeys() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("map", MAP)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); byte[] value = "hello".getBytes(UTF_8); printer.printRows(rows(row(map(item(value, "world")))), true); printer.finish(); String expected = "" + " map \n" + "------------------------\n" + " {68 65 6c 6c 6f=world} \n" + "(1 row)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testHexPrintingInNestedStructures() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("map", MAP)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); byte[] value = "hello".getBytes(UTF_8); printer.printRows(rows(row(map(item("key", list(value, null)), item("key2", map(item("nested", value)))))), true); printer.finish(); String expected = "" + " map \n" + "------------------------------------------------------------\n" + " {key2={nested=68 65 6c 6c 6f}, key=[68 65 6c 6c 6f, NULL]} \n" + "(1 row)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testAlignedPrintingOneRow() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("first", VARCHAR)) .add(column("last", VARCHAR)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); printer.printRows(rows(row("a long line\nwithout wrapping", "text")), true); printer.finish(); String expected = "" + " first | last \n" + "------------------+------\n" + " a long line | text \n" + " without wrapping | \n" + "(1 row)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testAlignedPrintingNoRows() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("first", VARCHAR)) .add(column("last", VARCHAR)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); printer.finish(); String expected = "" + " first | last \n" + "-------+------\n" + "(0 rows)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testAlignedPrintingHex() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("first", VARCHAR)) .add(column("binary", VARBINARY)) .add(column("last", VARCHAR)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); printer.printRows(rows( row("hello", bytes("hello"), "world"), row("a", bytes("some long text that is more than 16 bytes"), "b"), row("cat", bytes(""), "dog")), true); printer.finish(); String expected = "" + " first | binary | last \n" + "-------+-------------------------------------------------+-------\n" + " hello | 68 65 6c 6c 6f | world \n" + " a | 73 6f 6d 65 20 6c 6f 6e 67 20 74 65 78 74 20 74+| b \n" + " | 68 61 74 20 69 73 20 6d 6f 72 65 20 74 68 61 6e+| \n" + " | 20 31 36 20 62 79 74 65 73 | \n" + " cat | | dog \n" + "(3 rows)\n"; assertEquals(writer.getBuffer().toString(), expected); } @Test public void testAlignedPrintingWideCharacters() throws Exception { List<Column> columns = ImmutableList.<Column>builder() .add(column("go\u7f51", VARCHAR)) .add(column("last", VARCHAR)) .add(column("quantity\u7f51", BIGINT)) .build(); StringWriter writer = new StringWriter(); OutputPrinter printer = new AlignedTablePrinter(columns, writer); printer.printRows(rows( row("hello", "wide\u7f51", 123), row("some long\ntext \u7f51\ndoes not\u7f51\nfit", "more\ntext", 4567), row("bye", "done", -15)), true); printer.finish(); String expected = "" + " go\u7f51 | last | quantity\u7f51 \n" + "------------+--------+------------\n" + " hello | wide\u7f51 | 123 \n" + " some long +| more +| 4567 \n" + " text \u7f51 +| text | \n" + " does not\u7f51+| | \n" + " fit | | \n" + " bye | done | -15 \n" + "(3 rows)\n"; assertEquals(writer.getBuffer().toString(), expected); } static Column column(String name, String type) { return new Column(name, type, new ClientTypeSignature(type)); } static List<?> row(Object... values) { return asList(values); } static Map<?, ?> map(KeyValue... values) { return Arrays.stream(values).collect(toMap(KeyValue::getKey, KeyValue::getValue)); } static KeyValue item(Object key, Object value) { return new KeyValue(key, value); } static List<List<?>> rows(List<?>... rows) { return asList(rows); } static List<?> list(Object... objects) { return asList(objects); } static byte[] bytes(String s) { return s.getBytes(UTF_8); } static class KeyValue { private final Object key; private final Object value; KeyValue(Object key, Object value) { this.key = key; this.value = value; } public Object getKey() { return key; } public Object getValue() { return value; } } }
package org.spongycastle.math.ec.custom.sec; import java.math.BigInteger; import org.spongycastle.math.raw.Nat; import org.spongycastle.math.raw.Nat128; import org.spongycastle.math.raw.Nat256; public class SecP128R1Field { private static final long M = 0xFFFFFFFFL; // 2^128 - 2^97 - 1 static final int[] P = new int[] { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFD }; static final int[] PExt = new int[] { 0x00000001, 0x00000000, 0x00000000, 0x00000004, 0xFFFFFFFE, 0xFFFFFFFF, 0x00000003, 0xFFFFFFFC }; private static final int[] PExtInv = new int[]{ 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFB, 0x00000001, 0x00000000, 0xFFFFFFFC, 0x00000003 }; private static final int P3 = 0xFFFFFFFD; private static final int PExt7 = 0xFFFFFFFC; public static void add(int[] x, int[] y, int[] z) { int c = Nat128.add(x, y, z); if (c != 0 || (z[3] == P3 && Nat128.gte(z, P))) { addPInvTo(z); } } public static void addExt(int[] xx, int[] yy, int[] zz) { int c = Nat256.add(xx, yy, zz); if (c != 0 || (zz[7] == PExt7 && Nat256.gte(zz, PExt))) { Nat.addTo(PExtInv.length, PExtInv, zz); } } public static void addOne(int[] x, int[] z) { int c = Nat.inc(4, x, z); if (c != 0 || (z[3] == P3 && Nat128.gte(z, P))) { addPInvTo(z); } } public static int[] fromBigInteger(BigInteger x) { int[] z = Nat128.fromBigInteger(x); if (z[3] == P3 && Nat128.gte(z, P)) { Nat128.subFrom(P, z); } return z; } public static void half(int[] x, int[] z) { if ((x[0] & 1) == 0) { Nat.shiftDownBit(4, x, 0, z); } else { int c = Nat128.add(x, P, z); Nat.shiftDownBit(4, z, c); } } public static void multiply(int[] x, int[] y, int[] z) { int[] tt = Nat128.createExt(); Nat128.mul(x, y, tt); reduce(tt, z); } public static void multiplyAddToExt(int[] x, int[] y, int[] zz) { int c = Nat128.mulAddTo(x, y, zz); if (c != 0 || (zz[7] == PExt7 && Nat256.gte(zz, PExt))) { Nat.addTo(PExtInv.length, PExtInv, zz); } } public static void negate(int[] x, int[] z) { if (Nat128.isZero(x)) { Nat128.zero(z); } else { Nat128.sub(P, x, z); } } public static void reduce(int[] xx, int[] z) { long x0 = xx[0] & M, x1 = xx[1] & M, x2 = xx[2] & M, x3 = xx[3] & M; long x4 = xx[4] & M, x5 = xx[5] & M, x6 = xx[6] & M, x7 = xx[7] & M; x3 += x7; x6 += (x7 << 1); x2 += x6; x5 += (x6 << 1); x1 += x5; x4 += (x5 << 1); x0 += x4; x3 += (x4 << 1); z[0] = (int)x0; x1 += (x0 >>> 32); z[1] = (int)x1; x2 += (x1 >>> 32); z[2] = (int)x2; x3 += (x2 >>> 32); z[3] = (int)x3; reduce32((int)(x3 >>> 32), z); } public static void reduce32(int x, int[] z) { while (x != 0) { long c, x4 = x & M; c = (z[0] & M) + x4; z[0] = (int)c; c >>= 32; if (c != 0) { c += (z[1] & M); z[1] = (int)c; c >>= 32; c += (z[2] & M); z[2] = (int)c; c >>= 32; } c += (z[3] & M) + (x4 << 1); z[3] = (int)c; c >>= 32; // assert c >= 0 && c <= 2; x = (int)c; } } public static void square(int[] x, int[] z) { int[] tt = Nat128.createExt(); Nat128.square(x, tt); reduce(tt, z); } public static void squareN(int[] x, int n, int[] z) { // assert n > 0; int[] tt = Nat128.createExt(); Nat128.square(x, tt); reduce(tt, z); while (--n > 0) { Nat128.square(z, tt); reduce(tt, z); } } public static void subtract(int[] x, int[] y, int[] z) { int c = Nat128.sub(x, y, z); if (c != 0) { subPInvFrom(z); } } public static void subtractExt(int[] xx, int[] yy, int[] zz) { int c = Nat.sub(10, xx, yy, zz); if (c != 0) { Nat.subFrom(PExtInv.length, PExtInv, zz); } } public static void twice(int[] x, int[] z) { int c = Nat.shiftUpBit(4, x, 0, z); if (c != 0 || (z[3] == P3 && Nat128.gte(z, P))) { addPInvTo(z); } } private static void addPInvTo(int[] z) { long c = (z[0] & M) + 1; z[0] = (int)c; c >>= 32; if (c != 0) { c += (z[1] & M); z[1] = (int)c; c >>= 32; c += (z[2] & M); z[2] = (int)c; c >>= 32; } c += (z[3] & M) + 2; z[3] = (int)c; } private static void subPInvFrom(int[] z) { long c = (z[0] & M) - 1; z[0] = (int)c; c >>= 32; if (c != 0) { c += (z[1] & M); z[1] = (int)c; c >>= 32; c += (z[2] & M); z[2] = (int)c; c >>= 32; } c += (z[3] & M) - 2; z[3] = (int)c; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal.step.map; import org.apache.tinkerpop.gremlin.LoadGraphWith; import org.apache.tinkerpop.gremlin.process.AbstractGremlinProcessTest; import org.apache.tinkerpop.gremlin.process.GremlinProcessRunner; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.process.traversal.step.util.MapHelper; import org.apache.tinkerpop.gremlin.process.traversal.strategy.finalization.MatchAlgorithmStrategy; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.GRATEFUL; import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.MODERN; import static org.apache.tinkerpop.gremlin.process.traversal.Order.decr; import static org.apache.tinkerpop.gremlin.process.traversal.P.eq; import static org.apache.tinkerpop.gremlin.process.traversal.P.neq; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.and; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.as; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.inE; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.match; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.not; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.or; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.out; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.repeat; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.values; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.where; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * @author Joshua Shinavier (http://fortytwo.net) * @author Marko A. Rodriguez (http://markorodriguez.com) */ @RunWith(GremlinProcessRunner.class) public abstract class MatchTest extends AbstractGremlinProcessTest { public abstract Traversal<Vertex, Map<String, Object>> get_g_V_valueMap_matchXa_selectXnameX_bX(); // very basic query public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_out_bX(); // query with selection public abstract Traversal<Vertex, Object> get_g_V_matchXa_out_bX_selectXb_idX(); // linked traversals public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__b_created_cX(); // a basic tree with two leaves public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__a_created_cX(); // a tree with three leaves public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXd_0knows_a__d_hasXname_vadasX__a_knows_b__b_created_cX(); public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__a_repeatXoutX_timesX2XX_selectXa_bX(); // illustrates early deduplication in "predicate" traversals public abstract Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_lop_b__b_0created_29_c__c_whereXrepeatXoutX_timesX2XXX(); public abstract Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_lop_b__b_0created_29_cX_whereXc_repeatXoutX_timesX2XX_selectXa_b_cX(); public abstract Traversal<Vertex, String> get_g_V_out_out_matchXa_0created_b__b_0knows_cX_selectXcX_outXcreatedX_name(); //TODO: with Traversal.reverse() public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__b_0created_aX(); // contains an unreachable label public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__c_knows_bX(); // nested match() public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__b_created_lop__b_matchXb_created_d__d_0created_cX_selectXcX_cX_selectXa_b_cX(); // contains a pair of traversals which connect the same labels, together with a predicate traversal public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__a_0sungBy_bX(); // contains an identical pair of sets of traversals, up to variable names and has() conditions public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0sungBy_c__b_writtenBy_d__c_writtenBy_e__d_hasXname_George_HarisonX__e_hasXname_Bob_MarleyXX(); // forms a non-trivial DAG public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_d__c_sungBy_d__d_hasXname_GarciaXX(); public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_dX_whereXc_sungBy_dX_whereXd_hasXname_GarciaXX(); // inclusion of where public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__b_0created_cX_whereXa_neq_cX_selectXa_cX(); //TODO: with Traversal.reverse() public abstract Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_b__c_created_bX_selectXa_b_cX_byXnameX(); public abstract Traversal<Vertex, String> get_g_V_out_asXcX_matchXb_knows_a__c_created_eX_selectXcX(); // nested or/and with patterns in order that won't execute serially public abstract Traversal<Vertex, Map<String, Object>> get_g_V_matchXa_whereXa_neqXcXX__a_created_b__orXa_knows_vadas__a_0knows_and_a_hasXlabel_personXX__b_0created_c__b_0created_count_isXgtX1XXX_selectXa_b_cX_byXidX(); // uses local barrier count() and no start key public abstract Traversal<Vertex, Map<String, Object>> get_g_V_asXaX_out_asXbX_matchXa_out_count_c__b_in_count_cX(); // pulls out has container for index lookup and uses an where() with startKey and predicate public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__b_followedBy_c__c_writtenBy_d__whereXd_neqXaXXX(); // nested and with oddly dependent end steps public abstract Traversal<Vertex, Map<String, Object>> get_g_V_matchXa_knows_b__andXa_created_c__b_created_c__andXb_created_count_d__a_knows_count_dXXX(); // nested or with infix and and variable dependencies at different depths public abstract Traversal<Vertex, Map<String, Object>> get_g_V_asXaX_out_asXbX_matchXa_out_count_c__orXa_knows_b__b_in_count_c__and__c_isXgtX2XXXX(); // uses a not traversal pattern public abstract Traversal<Vertex, Map<String, Object>> get_g_V_matchXa__a_out_b__notXa_created_bXX(); // uses 'out of order' conjunction nested where() public abstract Traversal<Vertex, Map<String, Object>> get_g_V_matchXwhereXandXa_created_b__b_0created_count_isXeqX3XXXX__a_both_b__whereXb_inXX(); // distinct values public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa__a_both_b__b_both_cX_dedupXa_bX(); // distinct values with by()-modulation public abstract Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_both_b__b_both_cX_dedupXa_bX_byXlabelX(); public abstract Traversal<Vertex, Long> get_g_V_hasLabelXsongsX_matchXa_name_b__a_performances_cX_selectXb_cX_count(); // reducing barrier on lazy standard shouldn't yield an empty barrier public abstract Traversal<Vertex, Long> get_g_V_matchXa_knows_count_bX_selectXbX(); // verifying keep labels and dedup labels interactions public abstract Traversal<Vertex, String> get_g_V_matchXa_knows_b__b_created_c__a_created_cX_dedupXa_b_cX_selectXaX_byXnameX(); // test not(match) public abstract Traversal<Vertex, String> get_g_V_notXmatchXa_age_b__a_name_cX_whereXb_eqXcXX_selectXaXX_name(); // test inline counts public abstract Traversal<Vertex, Long> get_g_V_matchXa_followedBy_count_isXgtX10XX_b__a_0followedBy_count_isXgtX10XX_bX_count(); // test mid-clause variables public abstract Traversal<Vertex, String> get_g_V_matchXa_hasXsong_name_sunshineX__a_mapX0followedBy_weight_meanX_b__a_0followedBy_c__c_filterXweight_whereXgteXbXXX_outV_dX_selectXdX_byXnameX(); // test order barriers public abstract Traversal<Vertex, Map<String, String>> get_g_V_matchXa_outEXcreatedX_order_byXweight_decrX_limitX1X_inV_b__b_hasXlang_javaXX_selectXa_bX_byXnameX(); @Test @LoadGraphWith(MODERN) public void g_V_valueMap_matchXa_selectXnameX_bX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_valueMap_matchXa_selectXnameX_bX(); printTraversalForm(traversal); int counter = 0; while (traversal.hasNext()) { counter++; final Map<String, Object> map = traversal.next(); assertTrue(Map.class.isAssignableFrom(map.get("a").getClass())); final String name = ((Map<String, List<String>>) map.get("a")).get("name").get(0); assertEquals(name, ((List<String>) map.get("b")).get(0)); } assertEquals(6, counter); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_out_bX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_out_bX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "vadas"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "ripple"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "peter"), "b", convertToVertex(graph, "lop")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_out_bX_selectXb_idX() throws Exception { final Traversal<Vertex, Object> traversal = get_g_V_matchXa_out_bX_selectXb_idX(); printTraversalForm(traversal); int counter = 0; final Object vadasId = convertToVertexId("vadas"); final Object joshId = convertToVertexId("josh"); final Object lopId = convertToVertexId("lop"); final Object rippleId = convertToVertexId("ripple"); Map<Object, Long> idCounts = new HashMap<>(); while (traversal.hasNext()) { counter++; MapHelper.incr(idCounts, traversal.next(), 1l); } assertFalse(traversal.hasNext()); assertEquals(idCounts.get(vadasId), Long.valueOf(1l)); assertEquals(idCounts.get(lopId), Long.valueOf(3l)); assertEquals(idCounts.get(joshId), Long.valueOf(1l)); assertEquals(idCounts.get(rippleId), Long.valueOf(1l)); assertEquals(6, counter); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__b_created_cX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_knows_b__b_created_cX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "ripple")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__a_created_cX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_knows_b__a_created_cX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "vadas"), "c", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "lop")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXd_0knows_a__d_hasXname_vadasX__a_knows_b__b_created_cX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXd_0knows_a__d_hasXname_vadasX__a_knows_b__b_created_cX(); printTraversalForm(traversal); checkResults(makeMapList(4, "d", convertToVertex(graph, "vadas"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "lop"), "d", convertToVertex(graph, "vadas"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "ripple")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_created_b__a_repeatXoutX_timesX2XX_selectXa_bX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_created_b__a_repeatXoutX_timesX2XX_selectXa_bX(); printTraversalForm(traversal); assertTrue(traversal.hasNext()); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_created_lop_b__b_0created_29_cX_whereXc_repeatXoutX_timesX2XX_selectXa_b_cX() throws Exception { final Traversal<Vertex, Map<String, String>> traversal = get_g_V_matchXa_created_lop_b__b_0created_29_cX_whereXc_repeatXoutX_timesX2XX_selectXa_b_cX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "peter"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_created_lop_b__b_0created_29_c__c_whereXrepeatXoutX_timesX2XXX() throws Exception { final Traversal<Vertex, Map<String, String>> traversal = get_g_V_matchXa_created_lop_b__b_0created_29_c__c_whereXrepeatXoutX_timesX2XXX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "peter"), "b", convertToVertex(graph, "lop"), "c", convertToVertex(graph, "marko")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_out_out_matchXa_0created_b__b_0knows_cX_selectXcX_outXcreatedX_name() throws Exception { final Traversal<Vertex, String> traversal = get_g_V_out_out_matchXa_0created_b__b_0knows_cX_selectXcX_outXcreatedX_name(); printTraversalForm(traversal); assertEquals("lop", traversal.next()); assertEquals("lop", traversal.next()); assertFalse(traversal.hasNext()); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_created_b__b_0created_aX() { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_created_b__b_0created_aX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "peter"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "ripple")), traversal); } // TODO: this test requires Traversal.reverse() @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__c_knows_bX() { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_knows_b__c_knows_bX(); try { printTraversalForm(traversal); traversal.iterate(); fail("Should have tossed an exception because match pattern is not solvable"); } catch (Exception ex) { //final Throwable root = ExceptionUtils.getRootCause(ex); //assertThat(root.getMessage(), startsWith("The provided match pattern is unsolvable:")); } } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__b_created_lop__b_matchXb_created_d__d_0created_cX_selectXcX_cX_selectXa_b_cX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_knows_b__b_created_lop__b_matchXb_created_d__d_0created_cX_selectXcX_cX_selectXa_b_cX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "josh"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "josh"), // expected duplicate: two paths to this solution "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "peter")), traversal); } // TODO: this test requires Traversal.reverse() @LoadGraphWith(MODERN) public void g_V_matchXa_created_b__c_created_bX_selectXa_b_cX_byXnameX() throws Exception { final Traversal<Vertex, Map<String, String>> traversal = get_g_V_matchXa_created_b__c_created_bX_selectXa_b_cX_byXnameX(); try { printTraversalForm(traversal); traversal.iterate(); fail("Should have tossed an exception because match pattern is not solvable"); } catch (Exception ex) { //final Throwable root = ExceptionUtils.getRootCause(ex); //assertThat(root.getMessage(), startsWith("The provided match pattern is unsolvable:")); } } @LoadGraphWith(MODERN) public void g_V_out_asXcX_matchXb_knows_a__c_created_eX_selectXcX() throws Exception { final Traversal<Vertex, String> traversal = get_g_V_out_asXcX_matchXb_knows_a__c_created_eX_selectXcX(); try { printTraversalForm(traversal); traversal.iterate(); fail("Should have tossed an exception because match pattern is not solvable"); } catch (Exception ex) { //final Throwable root = ExceptionUtils.getRootCause(ex); //assertThat(root.getMessage(), startsWith("The provided match pattern is unsolvable:")); } } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__a_0sungBy_bX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__a_0sungBy_bX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CREAM PUFF WAR"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT")), traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_0sungBy_b__a_0sungBy_c__b_writtenBy_d__c_writtenBy_e__d_hasXname_George_HarisonX__e_hasXname_Bob_MarleyXX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_0sungBy_b__a_0sungBy_c__b_writtenBy_d__c_writtenBy_e__d_hasXname_George_HarisonX__e_hasXname_Bob_MarleyXX(); printTraversalForm(traversal); checkResults(makeMapList(5, "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "I WANT TO TELL YOU"), "c", convertToVertex(graph, "STIR IT UP"), "d", convertToVertex(graph, "George_Harrison"), "e", convertToVertex(graph, "Bob_Marley")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_created_b__b_0created_cX_whereXa_neq_cX_selectXa_cX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_created_b__b_0created_cX_whereXa_neq_cX_selectXa_cX(); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "c", convertToVertex(graph, "josh"), "a", convertToVertex(graph, "marko"), "c", convertToVertex(graph, "peter"), "a", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "peter"), "a", convertToVertex(graph, "peter"), "c", convertToVertex(graph, "marko"), "a", convertToVertex(graph, "peter"), "c", convertToVertex(graph, "josh")), traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_d__c_sungBy_d__d_hasXname_GarciaXX() throws Exception { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_d__c_sungBy_d__d_hasXname_GarciaXX(); printTraversalForm(traversal); assertCommon(traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_dX_whereXc_sungBy_dX_whereXd_hasXname_GarciaXX() throws Exception { // TODO: the where() is trying to get Garcia's name. Why is ComputerVerificationStrategy allowing this? final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_dX_whereXc_sungBy_dX_whereXd_hasXname_GarciaXX(); printTraversalForm(traversal); assertCommon(traversal); } private void assertCommon(Traversal<Vertex, Map<String, Vertex>> traversal) { checkResults(makeMapList(4, "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CREAM PUFF WAR"), "c", convertToVertex(graph, "CREAM PUFF WAR"), "d", convertToVertex(graph, "Garcia"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CREAM PUFF WAR"), "c", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "d", convertToVertex(graph, "Garcia"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "c", convertToVertex(graph, "CREAM PUFF WAR"), "d", convertToVertex(graph, "Garcia"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "c", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "d", convertToVertex(graph, "Garcia"), "a", convertToVertex(graph, "Grateful_Dead"), "b", convertToVertex(graph, "CANT COME DOWN"), "c", convertToVertex(graph, "DOWN SO LONG"), "d", convertToVertex(graph, "Garcia"), "a", convertToVertex(graph, "Grateful_Dead"), "b", convertToVertex(graph, "THE ONLY TIME IS NOW"), "c", convertToVertex(graph, "DOWN SO LONG"), "d", convertToVertex(graph, "Garcia")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_whereXa_neqXcXX__a_created_b__orXa_knows_vadas__a_0knows_and_a_hasXlabel_personXX__b_0created_c__b_0created_count_isXgtX1XXX_selectXa_b_cX_byXidX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_matchXa_whereXa_neqXcXX__a_created_b__orXa_knows_vadas__a_0knows_and_a_hasXlabel_personXX__b_0created_c__b_0created_count_isXgtX1XXX_selectXa_b_cX_byXidX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertexId("marko"), "b", convertToVertexId("lop"), "c", convertToVertexId("josh"), "a", convertToVertexId("marko"), "b", convertToVertexId("lop"), "c", convertToVertexId("peter"), "a", convertToVertexId("josh"), "b", convertToVertexId("lop"), "c", convertToVertexId("marko"), "a", convertToVertexId("josh"), "b", convertToVertexId("lop"), "c", convertToVertexId("peter")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_asXaX_out_asXbX_matchXa_out_count_c__b_in_count_cX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_asXaX_out_asXbX_matchXa_out_count_c__b_in_count_cX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "c", 3l, "b", convertToVertex(graph, "lop")), traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__b_followedBy_c__c_writtenBy_d__whereXd_neqXaXXX() { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__b_followedBy_c__c_writtenBy_d__whereXd_neqXaXXX(); printTraversalForm(traversal); checkResults(makeMapList(4, "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "c", convertToVertex(graph, "WHARF RAT"), "d", convertToVertex(graph, "Hunter"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "c", convertToVertex(graph, "THE OTHER ONE"), "d", convertToVertex(graph, "Weir"), "a", convertToVertex(graph, "Garcia"), "b", convertToVertex(graph, "CRYPTICAL ENVELOPMENT"), "c", convertToVertex(graph, "DRUMS"), "d", convertToVertex(graph, "Grateful_Dead")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__andXa_created_c__b_created_c__andXb_created_count_d__a_knows_count_dXXX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_matchXa_knows_b__andXa_created_c__b_created_c__andXb_created_count_d__a_knows_count_dXXX(); printTraversalForm(traversal); checkResults(makeMapList(4, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", convertToVertex(graph, "lop"), "d", 2l), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_asXaX_out_asXbX_matchXa_out_count_c__orXa_knows_b__b_in_count_c__and__c_isXgtX2XXXX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_asXaX_out_asXbX_matchXa_out_count_c__orXa_knows_b__b_in_count_c__and__c_isXgtX2XXXX(); printTraversalForm(traversal); checkResults(makeMapList(3, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "c", 3l, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "vadas"), "c", 3l, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "c", 3l), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa__a_out_b__notXa_created_bXX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_matchXa__a_out_b__notXa_created_bXX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "josh"), "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "vadas")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXwhereXandXa_created_b__b_0created_count_isXeqX3XXXX__a_both_b__whereXb_inXX() { final Traversal<Vertex, Map<String, Object>> traversal = get_g_V_matchXwhereXandXa_created_b__b_0created_count_isXeqX3XXXX__a_both_b__whereXb_inXX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", convertToVertex(graph, "marko"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "josh"), "b", convertToVertex(graph, "lop"), "a", convertToVertex(graph, "peter"), "b", convertToVertex(graph, "lop")), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa__a_both_b__b_both_cX_dedupXa_bX() { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa__a_both_b__b_both_cX_dedupXa_bX(); printTraversalForm(traversal); int counter = 0; final Set<List<Vertex>> results = new HashSet<>(); while (traversal.hasNext()) { final Map<String, Vertex> map = traversal.next(); assertEquals(3, map.size()); assertTrue(results.add(Arrays.asList(map.get("a"), map.get("b")))); counter++; } assertEquals(12, counter); assertEquals(results.size(), counter); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_both_b__b_both_cX_dedupXa_bX_byXlabelX() { final Traversal<Vertex, Map<String, Vertex>> traversal = get_g_V_matchXa_both_b__b_both_cX_dedupXa_bX_byXlabelX(); printTraversalForm(traversal); int counter = 0; final Set<List<String>> results = new HashSet<>(); while (traversal.hasNext()) { final Map<String, Vertex> map = traversal.next(); assertEquals(3, map.size()); assertTrue(results.add(Arrays.asList(map.get("a").label(), map.get("b").label()))); counter++; } assertEquals(3, counter); assertEquals(results.size(), counter); } @Test @LoadGraphWith(GRATEFUL) public void g_V_hasLabelXsongsX_matchXa_name_b__a_performances_cX_selectXb_cX_count() { final Traversal<Vertex, Long> traversal = get_g_V_hasLabelXsongsX_matchXa_name_b__a_performances_cX_selectXb_cX_count(); printTraversalForm(traversal); assertEquals(584, traversal.next().longValue()); assertFalse(traversal.hasNext()); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_count_bX_selectXbX() { final Traversal<Vertex, Long> traversal = get_g_V_matchXa_knows_count_bX_selectXbX(); printTraversalForm(traversal); checkResults(Arrays.asList(0L, 0L, 0L, 0L, 0L, 2L), traversal); assertFalse(traversal.hasNext()); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_knows_b__b_created_c__a_created_cX_dedupXa_b_cX_selectXaX_byXnameX() { final Traversal<Vertex, String> traversal = get_g_V_matchXa_knows_b__b_created_c__a_created_cX_dedupXa_b_cX_selectXaX_byXnameX(); printTraversalForm(traversal); assertEquals("marko", traversal.next()); assertFalse(traversal.hasNext()); } @Test @LoadGraphWith(MODERN) public void g_V_notXmatchXa_age_b__a_name_cX_whereXb_eqXcXX_selectXaXX_name() { final Traversal<Vertex, String> traversal = get_g_V_notXmatchXa_age_b__a_name_cX_whereXb_eqXcXX_selectXaXX_name(); printTraversalForm(traversal); checkResults(Arrays.asList("marko", "peter", "josh", "vadas", "lop", "ripple"), traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_followedBy_count_isXgtX10XX_b__a_0followedBy_count_isXgtX10XX_bX_count() { final Traversal<Vertex, Long> traversal = get_g_V_matchXa_followedBy_count_isXgtX10XX_b__a_0followedBy_count_isXgtX10XX_bX_count(); printTraversalForm(traversal); checkResults(Collections.singletonList(6L), traversal); } @Test @LoadGraphWith(GRATEFUL) public void g_V_matchXa_hasXsong_name_sunshineX__a_mapX0followedBy_weight_meanX_b__a_0followedBy_c__c_filterXweight_whereXgteXbXXX_outV_dX_selectXdX_byXnameX() { final Traversal<Vertex, String> traversal = get_g_V_matchXa_hasXsong_name_sunshineX__a_mapX0followedBy_weight_meanX_b__a_0followedBy_c__c_filterXweight_whereXgteXbXXX_outV_dX_selectXdX_byXnameX(); printTraversalForm(traversal); checkResults(Arrays.asList("THE MUSIC NEVER STOPPED", "PROMISED LAND", "PLAYING IN THE BAND", "CASEY JONES", "BIG RIVER", "EL PASO", "LIBERTY", "LOOKS LIKE RAIN"), traversal); } @Test @LoadGraphWith(MODERN) public void g_V_matchXa_outEXcreatedX_order_byXweight_decrX_limitX1X_inV_b__b_hasXlang_javaXX_selectXa_bX_byXnameX() { final Traversal<Vertex, Map<String, String>> traversal = get_g_V_matchXa_outEXcreatedX_order_byXweight_decrX_limitX1X_inV_b__b_hasXlang_javaXX_selectXa_bX_byXnameX(); printTraversalForm(traversal); checkResults(makeMapList(2, "a", "marko", "b", "lop", "a", "peter", "b", "lop", "a", "josh", "b", "ripple"), traversal); } public static class GreedyMatchTraversals extends Traversals { @Before public void setupTest() { super.setupTest(); g = g.withStrategies(MatchAlgorithmStrategy.build().algorithm(MatchStep.GreedyMatchAlgorithm.class).create()); } } public static class CountMatchTraversals extends Traversals { } public abstract static class Traversals extends MatchTest { @Override public Traversal<Vertex, Map<String, Object>> get_g_V_valueMap_matchXa_selectXnameX_bX() { return g.V().valueMap().match(as("a").select("name").as("b")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_out_bX() { return g.V().match(as("a").out().as("b")); } @Override public Traversal<Vertex, Object> get_g_V_matchXa_out_bX_selectXb_idX() { return g.V().match(as("a").out().as("b")).select("b").by(T.id); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__b_created_cX() { return g.V().match( as("a").out("knows").as("b"), as("b").out("created").as("c")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__a_created_cX() { return g.V().match( as("a").out("knows").as("b"), as("a").out("created").as("c")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXd_0knows_a__d_hasXname_vadasX__a_knows_b__b_created_cX() { return g.V().match( as("d").in("knows").as("a"), as("d").has("name", "vadas"), as("a").out("knows").as("b"), as("b").out("created").as("c")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__a_repeatXoutX_timesX2XX_selectXa_bX() { return g.V().match( as("a").out("created").as("b"), __.<Vertex>as("a").repeat(out()).times(2).as("b")).<Vertex>select("a", "b"); } @Override public Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_lop_b__b_0created_29_c__c_whereXrepeatXoutX_timesX2XXX() { return g.V().match( as("a").out("created").has("name", "lop").as("b"), as("b").in("created").has("age", 29).as("c"), as("c").where(repeat(out()).times(2))); } @Override public Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_lop_b__b_0created_29_cX_whereXc_repeatXoutX_timesX2XX_selectXa_b_cX() { return g.V().match( as("a").out("created").has("name", "lop").as("b"), as("b").in("created").has("age", 29).as("c")) .where(__.<Vertex>as("c").repeat(out()).times(2)).select("a", "b", "c"); } @Override public Traversal<Vertex, String> get_g_V_out_out_matchXa_0created_b__b_0knows_cX_selectXcX_outXcreatedX_name() { return g.V().out().out().match( as("a").in("created").as("b"), as("b").in("knows").as("c")).select("c").out("created").values("name"); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__b_0created_aX() { return g.V().match( as("a").out("created").as("b"), as("b").in("created").as("a")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__c_knows_bX() { return g.V().match(as("a").out("knows").as("b"), as("c").out("knows").as("b")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_knows_b__b_created_lop__b_matchXb_created_d__d_0created_cX_selectXcX_cX_selectXa_b_cX() { return g.V().match( as("a").out("knows").as("b"), as("b").out("created").has("name", "lop"), as("b").match( as("b").out("created").as("d"), as("d").in("created").as("c")).select("c").as("c")).<Vertex>select("a", "b", "c"); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__a_0sungBy_bX() { return g.V().match( as("a").has("name", "Garcia"), as("a").in("writtenBy").as("b"), as("a").in("sungBy").as("b")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0sungBy_c__b_writtenBy_d__c_writtenBy_e__d_hasXname_George_HarisonX__e_hasXname_Bob_MarleyXX() { return g.V().match( as("a").in("sungBy").as("b"), as("a").in("sungBy").as("c"), as("b").out("writtenBy").as("d"), as("c").out("writtenBy").as("e"), as("d").has("name", "George_Harrison"), as("e").has("name", "Bob_Marley")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_d__c_sungBy_d__d_hasXname_GarciaXX() { return g.V().match( as("a").in("sungBy").as("b"), as("a").in("writtenBy").as("c"), as("b").out("writtenBy").as("d"), as("c").out("sungBy").as("d"), as("d").has("name", "Garcia")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_0sungBy_b__a_0writtenBy_c__b_writtenBy_dX_whereXc_sungBy_dX_whereXd_hasXname_GarciaXX() { return g.V().<Vertex>match( as("a").in("sungBy").as("b"), as("a").in("writtenBy").as("c"), as("b").out("writtenBy").as("d")) .where(as("c").out("sungBy").as("d")) .where(as("d").has("name", "Garcia")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_created_b__b_0created_cX_whereXa_neq_cX_selectXa_cX() { return g.V().match( as("a").out("created").as("b"), as("b").in("created").as("c")) .where("a", neq("c")) .<Vertex>select("a", "c"); } @Override public Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_b__c_created_bX_selectXa_b_cX_byXnameX() { return g.V().match( as("a").out("created").as("b"), as("c").out("created").as("b")).<String>select("a", "b", "c").by("name"); } @Override public Traversal<Vertex, String> get_g_V_out_asXcX_matchXb_knows_a__c_created_eX_selectXcX() { return g.V().out().as("c").match( as("b").out("knows").as("a"), as("c").out("created").as("e")).select("c"); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_matchXa_whereXa_neqXcXX__a_created_b__orXa_knows_vadas__a_0knows_and_a_hasXlabel_personXX__b_0created_c__b_0created_count_isXgtX1XXX_selectXa_b_cX_byXidX() { return g.V().match( where("a", P.neq("c")), as("a").out("created").as("b"), or( as("a").out("knows").has("name", "vadas"), as("a").in("knows").and().as("a").has(T.label, "person") ), as("b").in("created").as("c"), as("b").in("created").count().is(P.gt(1))) .select("a", "b", "c").by(T.id); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_asXaX_out_asXbX_matchXa_out_count_c__b_in_count_cX() { return g.V().as("a").out().as("b").match(as("a").out().count().as("c"), as("b").in().count().as("c")); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_hasXname_GarciaX__a_0writtenBy_b__b_followedBy_c__c_writtenBy_d__whereXd_neqXaXXX() { return g.V().match( as("a").has("name", "Garcia"), as("a").in("writtenBy").as("b"), as("b").out("followedBy").as("c"), as("c").out("writtenBy").as("d"), where("d", P.neq("a"))); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_matchXa_knows_b__andXa_created_c__b_created_c__andXb_created_count_d__a_knows_count_dXXX() { return g.V().match( as("a").out("knows").as("b"), and( as("a").out("created").as("c"), as("b").out("created").as("c"), and( as("b").out("created").count().as("d"), as("a").out("knows").count().as("d") ) )); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_asXaX_out_asXbX_matchXa_out_count_c__orXa_knows_b__b_in_count_c__and__c_isXgtX2XXXX() { return g.V().as("a").out().as("b"). match( as("a").out().count().as("c"), or( as("a").out("knows").as("b"), as("b").in().count().as("c").and().as("c").is(P.gt(2)) ) ); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_matchXa__a_out_b__notXa_created_bXX() { return g.V().match( as("a").out().as("b"), not(as("a").out("created").as("b"))); } @Override public Traversal<Vertex, Map<String, Object>> get_g_V_matchXwhereXandXa_created_b__b_0created_count_isXeqX3XXXX__a_both_b__whereXb_inXX() { return g.V().match( where(and( as("a").out("created").as("b"), as("b").in("created").count().is(eq(3)))), as("a").both().as("b"), where(as("b").in())); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa__a_both_b__b_both_cX_dedupXa_bX() { return g.V().<Vertex>match( as("a").both().as("b"), as("b").both().as("c")).dedup("a", "b"); } @Override public Traversal<Vertex, Map<String, Vertex>> get_g_V_matchXa_both_b__b_both_cX_dedupXa_bX_byXlabelX() { return g.V().<Vertex>match( as("a").both().as("b"), as("b").both().as("c")).dedup("a", "b").by(T.label); } @Override public Traversal<Vertex, Long> get_g_V_hasLabelXsongsX_matchXa_name_b__a_performances_cX_selectXb_cX_count() { return g.V().hasLabel("song").match( __.as("a").values("name").as("b"), __.as("a").values("performances").as("c") ).select("b", "c").count(); } @Override public Traversal<Vertex, Long> get_g_V_matchXa_knows_count_bX_selectXbX() { return g.V().match(as("a").out("knows").count().as("b")).select("b"); } @Override public Traversal<Vertex, String> get_g_V_matchXa_knows_b__b_created_c__a_created_cX_dedupXa_b_cX_selectXaX_byXnameX() { return g.V().match( as("a").out("knows").as("b"), as("b").out("created").as("c"), as("a").out("created").as("c")).dedup("a", "b", "c").<String>select("a").by("name"); } @Override public Traversal<Vertex, String> get_g_V_notXmatchXa_age_b__a_name_cX_whereXb_eqXcXX_selectXaXX_name() { return g.V().not(match(__.as("a").values("age").as("b"), __.as("a").values("name").as("c")).where("b", eq("c")).select("a")).values("name"); } @Override public Traversal<Vertex, Long> get_g_V_matchXa_followedBy_count_isXgtX10XX_b__a_0followedBy_count_isXgtX10XX_bX_count() { return g.V().match( as("a").out("followedBy").count().is(P.gt(10)).as("b"), as("a").in("followedBy").count().is(P.gt(10)).as("b")).count(); } @Override public Traversal<Vertex, String> get_g_V_matchXa_hasXsong_name_sunshineX__a_mapX0followedBy_weight_meanX_b__a_0followedBy_c__c_filterXweight_whereXgteXbXXX_outV_dX_selectXdX_byXnameX() { return g.V().match( as("a").has("song", "name", "HERE COMES SUNSHINE"), as("a").map(inE("followedBy").values("weight").mean()).as("b"), as("a").inE("followedBy").as("c"), as("c").filter(values("weight").where(P.gte("b"))).outV().as("d")). <String>select("d").by("name"); } @Override public Traversal<Vertex, Map<String, String>> get_g_V_matchXa_outEXcreatedX_order_byXweight_decrX_limitX1X_inV_b__b_hasXlang_javaXX_selectXa_bX_byXnameX() { return g.V().match( as("a").outE("created").order().by("weight", decr).limit(1).inV().as("b"), as("b").has("lang", "java")). <String>select("a", "b").by("name"); } } }
/* * Copyright 2019 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http2; import io.netty.buffer.ByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelConfig; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelId; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelProgressivePromise; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultChannelConfig; import io.netty.channel.DefaultChannelPipeline; import io.netty.channel.EventLoop; import io.netty.channel.MessageSizeEstimator; import io.netty.channel.RecvByteBufAllocator; import io.netty.channel.VoidChannelPromise; import io.netty.channel.WriteBufferWaterMark; import io.netty.handler.codec.http2.Http2FrameCodec.DefaultHttp2FrameStream; import io.netty.util.DefaultAttributeMap; import io.netty.util.ReferenceCountUtil; import io.netty.util.internal.StringUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.IOException; import java.net.SocketAddress; import java.nio.channels.ClosedChannelException; import java.util.ArrayDeque; import java.util.Queue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import static io.netty.handler.codec.http2.Http2CodecUtil.isStreamIdValid; import static java.lang.Math.min; abstract class AbstractHttp2StreamChannel extends DefaultAttributeMap implements Http2StreamChannel { static final Http2FrameStreamVisitor WRITABLE_VISITOR = new Http2FrameStreamVisitor() { @Override public boolean visit(Http2FrameStream stream) { final AbstractHttp2StreamChannel childChannel = (AbstractHttp2StreamChannel) ((DefaultHttp2FrameStream) stream).attachment; childChannel.trySetWritable(); return true; } }; private static final InternalLogger logger = InternalLoggerFactory.getInstance(AbstractHttp2StreamChannel.class); private static final ChannelMetadata METADATA = new ChannelMetadata(false, 16); /** * Number of bytes to consider non-payload messages. 9 is arbitrary, but also the minimum size of an HTTP/2 frame. * Primarily is non-zero. */ private static final int MIN_HTTP2_FRAME_SIZE = 9; /** * Returns the flow-control size for DATA frames, and {@value MIN_HTTP2_FRAME_SIZE} for all other frames. */ private static final class FlowControlledFrameSizeEstimator implements MessageSizeEstimator { static final FlowControlledFrameSizeEstimator INSTANCE = new FlowControlledFrameSizeEstimator(); private static final Handle HANDLE_INSTANCE = new Handle() { @Override public int size(Object msg) { return msg instanceof Http2DataFrame ? // Guard against overflow. (int) min(Integer.MAX_VALUE, ((Http2DataFrame) msg).initialFlowControlledBytes() + (long) MIN_HTTP2_FRAME_SIZE) : MIN_HTTP2_FRAME_SIZE; } }; @Override public Handle newHandle() { return HANDLE_INSTANCE; } } private static final AtomicLongFieldUpdater<AbstractHttp2StreamChannel> TOTAL_PENDING_SIZE_UPDATER = AtomicLongFieldUpdater.newUpdater(AbstractHttp2StreamChannel.class, "totalPendingSize"); private static final AtomicIntegerFieldUpdater<AbstractHttp2StreamChannel> UNWRITABLE_UPDATER = AtomicIntegerFieldUpdater.newUpdater(AbstractHttp2StreamChannel.class, "unwritable"); /** * The current status of the read-processing for a {@link AbstractHttp2StreamChannel}. */ private enum ReadStatus { /** * No read in progress and no read was requested (yet) */ IDLE, /** * Reading in progress */ IN_PROGRESS, /** * A read operation was requested. */ REQUESTED } private final AbstractHttp2StreamChannel.Http2StreamChannelConfig config = new Http2StreamChannelConfig(this); private final AbstractHttp2StreamChannel.Http2ChannelUnsafe unsafe = new Http2ChannelUnsafe(); private final ChannelId channelId; private final ChannelPipeline pipeline; private final DefaultHttp2FrameStream stream; private final ChannelPromise closePromise; private volatile boolean registered; private volatile long totalPendingSize; private volatile int unwritable; // Cached to reduce GC private Runnable fireChannelWritabilityChangedTask; private boolean outboundClosed; private int flowControlledBytes; /** * This variable represents if a read is in progress for the current channel or was requested. * Note that depending upon the {@link RecvByteBufAllocator} behavior a read may extend beyond the * {@link Http2ChannelUnsafe#beginRead()} method scope. The {@link Http2ChannelUnsafe#beginRead()} loop may * drain all pending data, and then if the parent channel is reading this channel may still accept frames. */ private ReadStatus readStatus = ReadStatus.IDLE; private Queue<Object> inboundBuffer; /** {@code true} after the first HEADERS frame has been written **/ private boolean firstFrameWritten; private boolean readCompletePending; AbstractHttp2StreamChannel(DefaultHttp2FrameStream stream, int id, ChannelHandler inboundHandler) { this.stream = stream; stream.attachment = this; pipeline = new DefaultChannelPipeline(this) { @Override protected void incrementPendingOutboundBytes(long size) { AbstractHttp2StreamChannel.this.incrementPendingOutboundBytes(size, true); } @Override protected void decrementPendingOutboundBytes(long size) { AbstractHttp2StreamChannel.this.decrementPendingOutboundBytes(size, true); } }; closePromise = pipeline.newPromise(); channelId = new Http2StreamChannelId(parent().id(), id); if (inboundHandler != null) { // Add the handler to the pipeline now that we are registered. pipeline.addLast(inboundHandler); } } private void incrementPendingOutboundBytes(long size, boolean invokeLater) { if (size == 0) { return; } long newWriteBufferSize = TOTAL_PENDING_SIZE_UPDATER.addAndGet(this, size); if (newWriteBufferSize > config().getWriteBufferHighWaterMark()) { setUnwritable(invokeLater); } } private void decrementPendingOutboundBytes(long size, boolean invokeLater) { if (size == 0) { return; } long newWriteBufferSize = TOTAL_PENDING_SIZE_UPDATER.addAndGet(this, -size); // Once the totalPendingSize dropped below the low water-mark we can mark the child channel // as writable again. Before doing so we also need to ensure the parent channel is writable to // prevent excessive buffering in the parent outbound buffer. If the parent is not writable // we will mark the child channel as writable once the parent becomes writable by calling // trySetWritable() later. if (newWriteBufferSize < config().getWriteBufferLowWaterMark() && parent().isWritable()) { setWritable(invokeLater); } } final void trySetWritable() { // The parent is writable again but the child channel itself may still not be writable. // Lets try to set the child channel writable to match the state of the parent channel // if (and only if) the totalPendingSize is smaller then the low water-mark. // If this is not the case we will try again later once we drop under it. if (totalPendingSize < config().getWriteBufferLowWaterMark()) { setWritable(false); } } private void setWritable(boolean invokeLater) { for (;;) { final int oldValue = unwritable; final int newValue = oldValue & ~1; if (UNWRITABLE_UPDATER.compareAndSet(this, oldValue, newValue)) { if (oldValue != 0 && newValue == 0) { fireChannelWritabilityChanged(invokeLater); } break; } } } private void setUnwritable(boolean invokeLater) { for (;;) { final int oldValue = unwritable; final int newValue = oldValue | 1; if (UNWRITABLE_UPDATER.compareAndSet(this, oldValue, newValue)) { if (oldValue == 0 && newValue != 0) { fireChannelWritabilityChanged(invokeLater); } break; } } } private void fireChannelWritabilityChanged(boolean invokeLater) { final ChannelPipeline pipeline = pipeline(); if (invokeLater) { Runnable task = fireChannelWritabilityChangedTask; if (task == null) { fireChannelWritabilityChangedTask = task = new Runnable() { @Override public void run() { pipeline.fireChannelWritabilityChanged(); } }; } eventLoop().execute(task); } else { pipeline.fireChannelWritabilityChanged(); } } @Override public Http2FrameStream stream() { return stream; } void closeOutbound() { outboundClosed = true; } void streamClosed() { unsafe.readEOS(); // Attempt to drain any queued data from the queue and deliver it to the application before closing this // channel. unsafe.doBeginRead(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override public ChannelConfig config() { return config; } @Override public boolean isOpen() { return !closePromise.isDone(); } @Override public boolean isActive() { return isOpen(); } @Override public boolean isWritable() { return unwritable == 0; } @Override public ChannelId id() { return channelId; } @Override public EventLoop eventLoop() { return parent().eventLoop(); } @Override public Channel parent() { return parentContext().channel(); } @Override public boolean isRegistered() { return registered; } @Override public SocketAddress localAddress() { return parent().localAddress(); } @Override public SocketAddress remoteAddress() { return parent().remoteAddress(); } @Override public ChannelFuture closeFuture() { return closePromise; } @Override public long bytesBeforeUnwritable() { long bytes = config().getWriteBufferHighWaterMark() - totalPendingSize; // If bytes is negative we know we are not writable, but if bytes is non-negative we have to check // writability. Note that totalPendingSize and isWritable() use different volatile variables that are not // synchronized together. totalPendingSize will be updated before isWritable(). if (bytes > 0) { return isWritable() ? bytes : 0; } return 0; } @Override public long bytesBeforeWritable() { long bytes = totalPendingSize - config().getWriteBufferLowWaterMark(); // If bytes is negative we know we are writable, but if bytes is non-negative we have to check writability. // Note that totalPendingSize and isWritable() use different volatile variables that are not synchronized // together. totalPendingSize will be updated before isWritable(). if (bytes > 0) { return isWritable() ? 0 : bytes; } return 0; } @Override public Unsafe unsafe() { return unsafe; } @Override public ChannelPipeline pipeline() { return pipeline; } @Override public ByteBufAllocator alloc() { return config().getAllocator(); } @Override public Channel read() { pipeline().read(); return this; } @Override public Channel flush() { pipeline().flush(); return this; } @Override public ChannelFuture bind(SocketAddress localAddress) { return pipeline().bind(localAddress); } @Override public ChannelFuture connect(SocketAddress remoteAddress) { return pipeline().connect(remoteAddress); } @Override public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) { return pipeline().connect(remoteAddress, localAddress); } @Override public ChannelFuture disconnect() { return pipeline().disconnect(); } @Override public ChannelFuture close() { return pipeline().close(); } @Override public ChannelFuture deregister() { return pipeline().deregister(); } @Override public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) { return pipeline().bind(localAddress, promise); } @Override public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) { return pipeline().connect(remoteAddress, promise); } @Override public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) { return pipeline().connect(remoteAddress, localAddress, promise); } @Override public ChannelFuture disconnect(ChannelPromise promise) { return pipeline().disconnect(promise); } @Override public ChannelFuture close(ChannelPromise promise) { return pipeline().close(promise); } @Override public ChannelFuture deregister(ChannelPromise promise) { return pipeline().deregister(promise); } @Override public ChannelFuture write(Object msg) { return pipeline().write(msg); } @Override public ChannelFuture write(Object msg, ChannelPromise promise) { return pipeline().write(msg, promise); } @Override public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) { return pipeline().writeAndFlush(msg, promise); } @Override public ChannelFuture writeAndFlush(Object msg) { return pipeline().writeAndFlush(msg); } @Override public ChannelPromise newPromise() { return pipeline().newPromise(); } @Override public ChannelProgressivePromise newProgressivePromise() { return pipeline().newProgressivePromise(); } @Override public ChannelFuture newSucceededFuture() { return pipeline().newSucceededFuture(); } @Override public ChannelFuture newFailedFuture(Throwable cause) { return pipeline().newFailedFuture(cause); } @Override public ChannelPromise voidPromise() { return pipeline().voidPromise(); } @Override public int hashCode() { return id().hashCode(); } @Override public boolean equals(Object o) { return this == o; } @Override public int compareTo(Channel o) { if (this == o) { return 0; } return id().compareTo(o.id()); } @Override public String toString() { return parent().toString() + "(H2 - " + stream + ')'; } /** * Receive a read message. This does not notify handlers unless a read is in progress on the * channel. */ void fireChildRead(Http2Frame frame) { assert eventLoop().inEventLoop(); if (!isActive()) { ReferenceCountUtil.release(frame); } else if (readStatus != ReadStatus.IDLE) { // If a read is in progress or has been requested, there cannot be anything in the queue, // otherwise we would have drained it from the queue and processed it during the read cycle. assert inboundBuffer == null || inboundBuffer.isEmpty(); final RecvByteBufAllocator.Handle allocHandle = unsafe.recvBufAllocHandle(); flowControlledBytes += unsafe.doRead0(frame, allocHandle); // We currently don't need to check for readEOS because the parent channel and child channel are limited // to the same EventLoop thread. There are a limited number of frame types that may come after EOS is // read (unknown, reset) and the trade off is less conditionals for the hot path (headers/data) at the // cost of additional readComplete notifications on the rare path. if (allocHandle.continueReading()) { if (!readCompletePending) { readCompletePending = true; addChannelToReadCompletePendingQueue(); } } else { unsafe.notifyReadComplete(allocHandle, true); } } else { if (inboundBuffer == null) { inboundBuffer = new ArrayDeque<Object>(4); } inboundBuffer.add(frame); } } void fireChildReadComplete() { assert eventLoop().inEventLoop(); assert readStatus != ReadStatus.IDLE || !readCompletePending; unsafe.notifyReadComplete(unsafe.recvBufAllocHandle(), false); } private final class Http2ChannelUnsafe implements Unsafe { private final VoidChannelPromise unsafeVoidPromise = new VoidChannelPromise(AbstractHttp2StreamChannel.this, false); @SuppressWarnings("deprecation") private RecvByteBufAllocator.Handle recvHandle; private boolean writeDoneAndNoFlush; private boolean closeInitiated; private boolean readEOS; @Override public void connect(final SocketAddress remoteAddress, SocketAddress localAddress, final ChannelPromise promise) { if (!promise.setUncancellable()) { return; } promise.setFailure(new UnsupportedOperationException()); } @Override public RecvByteBufAllocator.Handle recvBufAllocHandle() { if (recvHandle == null) { recvHandle = config().getRecvByteBufAllocator().newHandle(); recvHandle.reset(config()); } return recvHandle; } @Override public SocketAddress localAddress() { return parent().unsafe().localAddress(); } @Override public SocketAddress remoteAddress() { return parent().unsafe().remoteAddress(); } @Override public void register(EventLoop eventLoop, ChannelPromise promise) { if (!promise.setUncancellable()) { return; } if (registered) { promise.setFailure(new UnsupportedOperationException("Re-register is not supported")); return; } registered = true; promise.setSuccess(); pipeline().fireChannelRegistered(); if (isActive()) { pipeline().fireChannelActive(); } } @Override public void bind(SocketAddress localAddress, ChannelPromise promise) { if (!promise.setUncancellable()) { return; } promise.setFailure(new UnsupportedOperationException()); } @Override public void disconnect(ChannelPromise promise) { close(promise); } @Override public void close(final ChannelPromise promise) { if (!promise.setUncancellable()) { return; } if (closeInitiated) { if (closePromise.isDone()) { // Closed already. promise.setSuccess(); } else if (!(promise instanceof VoidChannelPromise)) { // Only needed if no VoidChannelPromise. // This means close() was called before so we just register a listener and return closePromise.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { promise.setSuccess(); } }); } return; } closeInitiated = true; // Just set to false as removing from an underlying queue would even be more expensive. readCompletePending = false; final boolean wasActive = isActive(); updateLocalWindowIfNeeded(); // Only ever send a reset frame if the connection is still alive and if the stream was created before // as otherwise we may send a RST on a stream in an invalid state and cause a connection error. if (parent().isActive() && !readEOS && Http2CodecUtil.isStreamIdValid(stream.id())) { Http2StreamFrame resetFrame = new DefaultHttp2ResetFrame(Http2Error.CANCEL).stream(stream()); write(resetFrame, unsafe().voidPromise()); flush(); } if (inboundBuffer != null) { for (;;) { Object msg = inboundBuffer.poll(); if (msg == null) { break; } ReferenceCountUtil.release(msg); } inboundBuffer = null; } // The promise should be notified before we call fireChannelInactive(). outboundClosed = true; closePromise.setSuccess(); promise.setSuccess(); fireChannelInactiveAndDeregister(voidPromise(), wasActive); } @Override public void closeForcibly() { close(unsafe().voidPromise()); } @Override public void deregister(ChannelPromise promise) { fireChannelInactiveAndDeregister(promise, false); } private void fireChannelInactiveAndDeregister(final ChannelPromise promise, final boolean fireChannelInactive) { if (!promise.setUncancellable()) { return; } if (!registered) { promise.setSuccess(); return; } // As a user may call deregister() from within any method while doing processing in the ChannelPipeline, // we need to ensure we do the actual deregister operation later. This is necessary to preserve the // behavior of the AbstractChannel, which always invokes channelUnregistered and channelInactive // events 'later' to ensure the current events in the handler are completed before these events. // // See: // https://github.com/netty/netty/issues/4435 invokeLater(new Runnable() { @Override public void run() { if (fireChannelInactive) { pipeline.fireChannelInactive(); } // The user can fire `deregister` events multiple times but we only want to fire the pipeline // event if the channel was actually registered. if (registered) { registered = false; pipeline.fireChannelUnregistered(); } safeSetSuccess(promise); } }); } private void safeSetSuccess(ChannelPromise promise) { if (!(promise instanceof VoidChannelPromise) && !promise.trySuccess()) { logger.warn("Failed to mark a promise as success because it is done already: {}", promise); } } private void invokeLater(Runnable task) { try { // This method is used by outbound operation implementations to trigger an inbound event later. // They do not trigger an inbound event immediately because an outbound operation might have been // triggered by another inbound event handler method. If fired immediately, the call stack // will look like this for example: // // handlerA.inboundBufferUpdated() - (1) an inbound handler method closes a connection. // -> handlerA.ctx.close() // -> channel.unsafe.close() // -> handlerA.channelInactive() - (2) another inbound handler method called while in (1) yet // // which means the execution of two inbound handler methods of the same handler overlap undesirably. eventLoop().execute(task); } catch (RejectedExecutionException e) { logger.warn("Can't invoke task later as EventLoop rejected it", e); } } @Override public void beginRead() { if (!isActive()) { return; } updateLocalWindowIfNeeded(); switch (readStatus) { case IDLE: readStatus = ReadStatus.IN_PROGRESS; doBeginRead(); break; case IN_PROGRESS: readStatus = ReadStatus.REQUESTED; break; default: break; } } private Object pollQueuedMessage() { return inboundBuffer == null ? null : inboundBuffer.poll(); } void doBeginRead() { // Process messages until there are none left (or the user stopped requesting) and also handle EOS. while (readStatus != ReadStatus.IDLE) { Object message = pollQueuedMessage(); if (message == null) { if (readEOS) { unsafe.closeForcibly(); } break; } final RecvByteBufAllocator.Handle allocHandle = recvBufAllocHandle(); allocHandle.reset(config()); boolean continueReading = false; do { flowControlledBytes += doRead0((Http2Frame) message, allocHandle); } while ((readEOS || (continueReading = allocHandle.continueReading())) && (message = pollQueuedMessage()) != null); if (continueReading && isParentReadInProgress() && !readEOS) { // Currently the parent and child channel are on the same EventLoop thread. If the parent is // currently reading it is possible that more frames will be delivered to this child channel. In // the case that this child channel still wants to read we delay the channelReadComplete on this // child channel until the parent is done reading. if (!readCompletePending) { readCompletePending = true; addChannelToReadCompletePendingQueue(); } } else { notifyReadComplete(allocHandle, true); } } } void readEOS() { readEOS = true; } private void updateLocalWindowIfNeeded() { if (flowControlledBytes != 0) { int bytes = flowControlledBytes; flowControlledBytes = 0; write0(parentContext(), new DefaultHttp2WindowUpdateFrame(bytes).stream(stream)); writeDoneAndNoFlush = true; } } void notifyReadComplete(RecvByteBufAllocator.Handle allocHandle, boolean forceReadComplete) { if (!readCompletePending && !forceReadComplete) { return; } // Set to false just in case we added the channel multiple times before. readCompletePending = false; if (readStatus == ReadStatus.REQUESTED) { readStatus = ReadStatus.IN_PROGRESS; } else { readStatus = ReadStatus.IDLE; } allocHandle.readComplete(); pipeline().fireChannelReadComplete(); // Reading data may result in frames being written (e.g. WINDOW_UPDATE, RST, etc..). If the parent // channel is not currently reading we need to force a flush at the child channel, because we cannot // rely upon flush occurring in channelReadComplete on the parent channel. flush(); if (readEOS) { unsafe.closeForcibly(); } } @SuppressWarnings("deprecation") int doRead0(Http2Frame frame, RecvByteBufAllocator.Handle allocHandle) { pipeline().fireChannelRead(frame); allocHandle.incMessagesRead(1); if (frame instanceof Http2DataFrame) { final int numBytesToBeConsumed = ((Http2DataFrame) frame).initialFlowControlledBytes(); allocHandle.attemptedBytesRead(numBytesToBeConsumed); allocHandle.lastBytesRead(numBytesToBeConsumed); return numBytesToBeConsumed; } else { allocHandle.attemptedBytesRead(MIN_HTTP2_FRAME_SIZE); allocHandle.lastBytesRead(MIN_HTTP2_FRAME_SIZE); } return 0; } @Override public void write(Object msg, final ChannelPromise promise) { // After this point its not possible to cancel a write anymore. if (!promise.setUncancellable()) { ReferenceCountUtil.release(msg); return; } if (!isActive() || // Once the outbound side was closed we should not allow header / data frames outboundClosed && (msg instanceof Http2HeadersFrame || msg instanceof Http2DataFrame)) { ReferenceCountUtil.release(msg); promise.setFailure(new ClosedChannelException()); return; } try { if (msg instanceof Http2StreamFrame) { Http2StreamFrame frame = validateStreamFrame((Http2StreamFrame) msg).stream(stream()); if (!firstFrameWritten && !isStreamIdValid(stream().id())) { if (!(frame instanceof Http2HeadersFrame)) { ReferenceCountUtil.release(frame); promise.setFailure( new IllegalArgumentException("The first frame must be a headers frame. Was: " + frame.name())); return; } firstFrameWritten = true; ChannelFuture f = write0(parentContext(), frame); if (f.isDone()) { firstWriteComplete(f, promise); } else { final long bytes = FlowControlledFrameSizeEstimator.HANDLE_INSTANCE.size(msg); incrementPendingOutboundBytes(bytes, false); f.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { firstWriteComplete(future, promise); decrementPendingOutboundBytes(bytes, false); } }); writeDoneAndNoFlush = true; } return; } } else { String msgStr = msg.toString(); ReferenceCountUtil.release(msg); promise.setFailure(new IllegalArgumentException( "Message must be an " + StringUtil.simpleClassName(Http2StreamFrame.class) + ": " + msgStr)); return; } ChannelFuture f = write0(parentContext(), msg); if (f.isDone()) { writeComplete(f, promise); } else { final long bytes = FlowControlledFrameSizeEstimator.HANDLE_INSTANCE.size(msg); incrementPendingOutboundBytes(bytes, false); f.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { writeComplete(future, promise); decrementPendingOutboundBytes(bytes, false); } }); writeDoneAndNoFlush = true; } } catch (Throwable t) { promise.tryFailure(t); } } private void firstWriteComplete(ChannelFuture future, ChannelPromise promise) { Throwable cause = future.cause(); if (cause == null) { promise.setSuccess(); } else { // If the first write fails there is not much we can do, just close closeForcibly(); promise.setFailure(wrapStreamClosedError(cause)); } } private void writeComplete(ChannelFuture future, ChannelPromise promise) { Throwable cause = future.cause(); if (cause == null) { promise.setSuccess(); } else { Throwable error = wrapStreamClosedError(cause); // To make it more consistent with AbstractChannel we handle all IOExceptions here. if (error instanceof IOException) { if (config.isAutoClose()) { // Close channel if needed. closeForcibly(); } else { // TODO: Once Http2StreamChannel extends DuplexChannel we should call shutdownOutput(...) outboundClosed = true; } } promise.setFailure(error); } } private Throwable wrapStreamClosedError(Throwable cause) { // If the error was caused by STREAM_CLOSED we should use a ClosedChannelException to better // mimic other transports and make it easier to reason about what exceptions to expect. if (cause instanceof Http2Exception && ((Http2Exception) cause).error() == Http2Error.STREAM_CLOSED) { return new ClosedChannelException().initCause(cause); } return cause; } private Http2StreamFrame validateStreamFrame(Http2StreamFrame frame) { if (frame.stream() != null && frame.stream() != stream) { String msgString = frame.toString(); ReferenceCountUtil.release(frame); throw new IllegalArgumentException( "Stream " + frame.stream() + " must not be set on the frame: " + msgString); } return frame; } @Override public void flush() { // If we are currently in the parent channel's read loop we should just ignore the flush. // We will ensure we trigger ctx.flush() after we processed all Channels later on and // so aggregate the flushes. This is done as ctx.flush() is expensive when as it may trigger an // write(...) or writev(...) operation on the socket. if (!writeDoneAndNoFlush || isParentReadInProgress()) { // There is nothing to flush so this is a NOOP. return; } try { flush0(parentContext()); } finally { writeDoneAndNoFlush = false; } } @Override public ChannelPromise voidPromise() { return unsafeVoidPromise; } @Override public ChannelOutboundBuffer outboundBuffer() { // Always return null as we not use the ChannelOutboundBuffer and not even support it. return null; } } /** * {@link ChannelConfig} so that the high and low writebuffer watermarks can reflect the outbound flow control * window, without having to create a new {@link WriteBufferWaterMark} object whenever the flow control window * changes. */ private final class Http2StreamChannelConfig extends DefaultChannelConfig { Http2StreamChannelConfig(Channel channel) { super(channel); } @Override public MessageSizeEstimator getMessageSizeEstimator() { return FlowControlledFrameSizeEstimator.INSTANCE; } @Override public ChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) { throw new UnsupportedOperationException(); } @Override public ChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) { if (!(allocator.newHandle() instanceof RecvByteBufAllocator.ExtendedHandle)) { throw new IllegalArgumentException("allocator.newHandle() must return an object of type: " + RecvByteBufAllocator.ExtendedHandle.class); } super.setRecvByteBufAllocator(allocator); return this; } } protected void flush0(ChannelHandlerContext ctx) { ctx.flush(); } protected ChannelFuture write0(ChannelHandlerContext ctx, Object msg) { ChannelPromise promise = ctx.newPromise(); ctx.write(msg, promise); return promise; } protected abstract boolean isParentReadInProgress(); protected abstract void addChannelToReadCompletePendingQueue(); protected abstract ChannelHandlerContext parentContext(); }
package com.greenpepper.confluence.actions.server; import java.util.ArrayList; import java.util.List; import java.util.LinkedList; import com.greenpepper.server.GreenPepperServerException; import com.greenpepper.server.domain.Project; import com.greenpepper.server.domain.Repository; import com.greenpepper.server.domain.RepositoryType; import com.greenpepper.server.domain.component.ContentType; import com.greenpepper.confluence.velocity.ConfluenceGreenPepper; @SuppressWarnings("serial") public class FileSystemRegistration extends GreenPepperServerAction { private static RepositoryType FILE = RepositoryType.newInstance("FILE"); private List<Repository> fileRepositories; private Repository newRepository; private String repositoryUid; private String newName; private String newBaseTestUrl; private String newProjectName; private boolean editMode; private LinkedList<Project> projects; public String doGetFileSystemRegistration() { if (!isServerReady()) { addActionError(ConfluenceGreenPepper.SERVER_NOCONFIGURATION); return SUCCESS; } try { setFileRepositories(getService().getAllSpecificationRepositories()); } catch (GreenPepperServerException e) { addActionError(e.getId()); } return SUCCESS; } public String doAddFileSystem() { try { setFileRepositories(getService().getAllSpecificationRepositories()); if(!pathAlreadyExists()) { getNewRepository().setProject(Project.newInstance(newProjectName)); newRepository.setType(FILE); newRepository.setName(newName); newRepository.setContentType(ContentType.TEST); newRepository.setBaseUrl(newBaseTestUrl); newRepository.setBaseRepositoryUrl(newBaseTestUrl); newRepository.setBaseTestUrl(newBaseTestUrl); getService().registerRepository(newRepository); newRepository = null; } } catch (GreenPepperServerException e) { addActionError(e.getId()); } return doGetFileSystemRegistration(); } public String doRemoveFileSystem() { try { getService().removeRepository(repositoryUid); } catch (GreenPepperServerException e) { addActionError(e.getId()); } return doGetFileSystemRegistration(); } public List<Repository> getFileRepositories() { if(fileRepositories != null) { try { setFileRepositories(getService().getAllSpecificationRepositories()); } catch (GreenPepperServerException e) { addActionError(e.getId()); } } return fileRepositories; } public void setFileRepositories(List<Repository> repositories) { fileRepositories = new ArrayList<Repository>(); for(Repository repository : repositories) if(repository.getType().equals(FILE)) fileRepositories.add(repository); } public String getRepositoryUid() { return repositoryUid; } public void setRepositoryUid(String repositoryUid) { this.repositoryUid = repositoryUid; } public boolean isEditMode() { return editMode; } public void setEditMode(boolean editMode) { this.editMode = editMode; } public String getNewBaseTestUrl() { return newBaseTestUrl; } public void setNewBaseTestUrl(String newBaseTestUrl) { newBaseTestUrl = newBaseTestUrl.trim(); if(!newBaseTestUrl.endsWith("/")) newBaseTestUrl += "/"; this.newBaseTestUrl = newBaseTestUrl; } public String getNewName() { return newName; } public void setNewName(String newName) { this.newName = newName.trim(); } public String getProjectName() { return newProjectName; } public void setProjectName(String projectName) { this.newProjectName = projectName; } public Repository getNewRepository() { if(newRepository != null) return newRepository; String uid = gpUtil.getSettingsManager().getGlobalSettings().getSiteTitle() + "-" + getProjectName() + "-F"+fileRepositories.size(); newRepository = Repository.newInstance(uid); return newRepository; } private boolean pathAlreadyExists() throws GreenPepperServerException { if(fileRepositories == null) { setFileRepositories(getService().getAllSpecificationRepositories()); } for(Repository repo : fileRepositories) { if(repo.getBaseTestUrl().equals(newBaseTestUrl)) { return true; } } return false; } @SuppressWarnings("unchecked") public LinkedList<Project> getProjects() { if(projects != null) return projects; try { projects = new LinkedList<Project>(getService().getAllProjects()); } catch (GreenPepperServerException e) { addActionError(e.getId()); } return projects; } }
/* * Copyright (c) 2017 chibchasoft.com * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials are made * available under the terms of the Apache License v2.0 which accompanies * this distribution. * * The Apache License v2.0 is available at * https://www.apache.org/licenses/LICENSE-2.0 * * Author <a href="mailto:jvelez@chibchasoft.com">Juan Velez</a> */ package com.chibchasoft.vertx.verticle.deployment; import java.util.concurrent.atomic.AtomicInteger; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import org.junit.Test; import io.vertx.core.DeploymentOptions; import io.vertx.test.core.VertxTestBase; /** * Test DependentVerticleDeployer. * * @author <a href="mailto:jvelez@chibchasoft.com">Juan Velez</a> */ public class DependentVerticleDeployerTest extends VertxTestBase { /** * In this test, the DependentVerticleDeployer is deployed but itself has nothing to deploy */ @Test public void testNothingToDeploy() { DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); assertNull(dependentVerticle.getDependentsDeployment()); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); assertNull(dependentVerticle.getDependentsDeployment()); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single verticle that has no dependents. * Configuration via Json */ @Test public void testOneVerticleWithNoDependentsUsingJson() { JsonObject config = new JsonObject(); config.put("dependents", new JsonArray()); config.put("name", TestVerticle.class.getCanonicalName()); JsonArray configurations = new JsonArray(); configurations.add(config); JsonObject depDeploymentAsJson = new JsonObject(); depDeploymentAsJson.put("configurations", configurations); DependentsDeployment depDeployment = new DependentsDeployment(depDeploymentAsJson); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); DeploymentConfiguration testVerticleCfg = depDeployment.getConfigurations().get(0); assertTrue(testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single verticle that has no dependents */ @Test public void testOneVerticleWithNoDependents() { DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); assertTrue(testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single non-existent verticle that * has no dependents */ @Test public void testOneNonExistentVerticleWithNoDependents() { DeploymentConfiguration iDontExistVerticleCfg = new DeploymentConfiguration(); iDontExistVerticleCfg.setName("IDon'tExist"); assertNull(iDontExistVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(iDontExistVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertFalse(ar.succeeded()); assertTrue(iDontExistVerticleCfg.failed() && iDontExistVerticleCfg.failCause() != null && iDontExistVerticleCfg.getDeploymentID() == null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single verticle with several instances * that has no dependents */ @Test public void testOneVerticleSeveralInstancesWithNoDependents() { DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); testVerticleCfg.setDeploymentOptions(new DeploymentOptions().setInstances(3)); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); AtomicInteger deployCount = new AtomicInteger(); vertx.eventBus().consumer("TestVerticleStarted", m -> deployCount.incrementAndGet()); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); assertTrue(testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); // The DependentVerticleDeployer and TestVerticle assertEquals(2, vertx.deploymentIDs().size()); waitUntil(() -> deployCount.get() == 3); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys the same verticle twice but none * of the verticles has dependents */ @Test public void testSameVerticleTwiceWithNoDependents() { DeploymentConfiguration testVerticleCfg1 = new DeploymentConfiguration(); testVerticleCfg1.setName(TestVerticle.class.getCanonicalName()); assertNull(testVerticleCfg1.getDeploymentID()); DeploymentConfiguration testVerticleCfg2 = new DeploymentConfiguration(); testVerticleCfg2.setName(TestVerticle.class.getCanonicalName()); assertNull(testVerticleCfg2.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg1); depDeployment.getConfigurations().add(testVerticleCfg2); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); AtomicInteger deployCount = new AtomicInteger(); vertx.eventBus().consumer("TestVerticleStarted", m -> deployCount.incrementAndGet()); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); assertTrue(testVerticleCfg1.succeeded() && testVerticleCfg1.failCause() == null && testVerticleCfg1.getDeploymentID() != null); assertTrue(testVerticleCfg2.succeeded() && testVerticleCfg2.failCause() == null && testVerticleCfg2.getDeploymentID() != null); // The DependentVerticleDeployer and TestVerticle twice assertEquals(3, vertx.deploymentIDs().size()); waitUntil(() -> deployCount.get() == 2); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys two verticles (each with no dependents) * but first one fails */ @Test public void testTwoVerticlesWithNoDependentsDeployedInSequenceButFirstOneFails() { DeploymentConfiguration iDontExistVerticleCfg = new DeploymentConfiguration(); iDontExistVerticleCfg.setName("IDon'tExist"); assertNull(iDontExistVerticleCfg.getDeploymentID()); DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(iDontExistVerticleCfg); depDeployment.getConfigurations().add(testVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertFalse(ar.succeeded()); assertNull(ar.result()); assertTrue(iDontExistVerticleCfg.failed() && iDontExistVerticleCfg.failCause() != null && iDontExistVerticleCfg.failCause().toString().toLowerCase().contains("classnotfoundexception") && iDontExistVerticleCfg.getDeploymentID() == null); waitUntil(() -> testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys two verticles (each with no dependents) * in sequence but last one fails */ @Test public void testTwoVerticlesWithNoDependentsDeployedInSequenceButLastOneFails() { DeploymentConfiguration iDontExistVerticleCfg = new DeploymentConfiguration(); iDontExistVerticleCfg.setName("IDon'tExist"); assertNull(iDontExistVerticleCfg.getDeploymentID()); DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg); depDeployment.getConfigurations().add(iDontExistVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertFalse(ar.succeeded()); assertNull(ar.result()); assertTrue(iDontExistVerticleCfg.failed() && iDontExistVerticleCfg.failCause() != null && iDontExistVerticleCfg.failCause().toString().toLowerCase().contains("classnotfoundexception") && iDontExistVerticleCfg.getDeploymentID() == null); waitUntil(() -> testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single verticle that has one dependent */ @Test public void testOneVerticleWithOneDependent() { DeploymentConfiguration dependentTestVerticleCfg = new DeploymentConfiguration(); dependentTestVerticleCfg.setName(DependentTestVerticle.class.getCanonicalName()); assertNull(dependentTestVerticleCfg.getDeploymentID()); DependentsDeployment innerDepDeployment = new DependentsDeployment(); innerDepDeployment.getConfigurations().add(dependentTestVerticleCfg); DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); testVerticleCfg.getDependents().add(innerDepDeployment); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertTrue(ar.succeeded()); assertNotNull(ar.result()); assertTrue(dependentTestVerticleCfg.succeeded() && dependentTestVerticleCfg.failCause() == null && dependentTestVerticleCfg.getDeploymentID() != null); assertTrue(testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); testComplete(); }); await(); } /** * In this test, the DependentVerticleDeployer deploys a single verticle that has one dependent * but such dependent verticle does not exist */ @Test public void testOneVerticleWithNonExistentDependent() { DeploymentConfiguration iDontExistVerticleCfg = new DeploymentConfiguration(); iDontExistVerticleCfg.setName("IDon'tExist"); assertNull(iDontExistVerticleCfg.getDeploymentID()); DependentsDeployment innerDepDeployment = new DependentsDeployment(); innerDepDeployment.getConfigurations().add(iDontExistVerticleCfg); DeploymentConfiguration testVerticleCfg = new DeploymentConfiguration(); testVerticleCfg.setName(TestVerticle.class.getCanonicalName()); testVerticleCfg.getDependents().add(innerDepDeployment); assertNull(testVerticleCfg.getDeploymentID()); DependentsDeployment depDeployment = new DependentsDeployment(); depDeployment.getConfigurations().add(testVerticleCfg); DependentVerticleDeployer dependentVerticle = new DependentVerticleDeployer(); dependentVerticle.setDependentsDeployment(depDeployment); vertx.deployVerticle(dependentVerticle, ar -> { assertFalse(ar.succeeded()); assertNull(ar.result()); assertTrue(testVerticleCfg.succeeded() && testVerticleCfg.failCause() == null && testVerticleCfg.getDeploymentID() != null); assertTrue(iDontExistVerticleCfg.failed() && iDontExistVerticleCfg.failCause() != null && iDontExistVerticleCfg.failCause().toString().toLowerCase().contains("classnotfoundexception") && iDontExistVerticleCfg.getDeploymentID() == null); testComplete(); }); await(); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.servlet.core; import java.io.File; import java.util.List; import javax.servlet.MultipartConfigElement; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.SingleThreadModel; import javax.servlet.UnavailableException; import io.undertow.server.handlers.form.FormEncodedDataDefinition; import io.undertow.server.handlers.form.FormParserFactory; import io.undertow.server.handlers.form.MultiPartParserDefinition; import io.undertow.server.handlers.resource.ResourceChangeListener; import io.undertow.server.handlers.resource.ResourceManager; import io.undertow.servlet.UndertowServletMessages; import io.undertow.servlet.api.DeploymentManager; import io.undertow.servlet.api.InstanceFactory; import io.undertow.servlet.api.InstanceHandle; import io.undertow.servlet.api.LifecycleInterceptor; import io.undertow.servlet.api.ServletInfo; import io.undertow.servlet.spec.ServletConfigImpl; import io.undertow.servlet.spec.ServletContextImpl; /** * Manager for a servlets lifecycle. * * @author Stuart Douglas */ public class ManagedServlet implements Lifecycle { private final ServletInfo servletInfo; private final ServletContextImpl servletContext; private volatile boolean started = false; private final InstanceStrategy instanceStrategy; private volatile boolean permanentlyUnavailable = false; private long maxRequestSize; private FormParserFactory formParserFactory; private MultipartConfigElement multipartConfig; public ManagedServlet(final ServletInfo servletInfo, final ServletContextImpl servletContext) { this.servletInfo = servletInfo; this.servletContext = servletContext; if (SingleThreadModel.class.isAssignableFrom(servletInfo.getServletClass())) { instanceStrategy = new SingleThreadModelPoolStrategy(servletInfo.getInstanceFactory(), servletInfo, servletContext); } else { instanceStrategy = new DefaultInstanceStrategy(servletInfo.getInstanceFactory(), servletInfo, servletContext); } setupMultipart(servletContext); } public void setupMultipart(ServletContextImpl servletContext) { FormEncodedDataDefinition formDataParser = new FormEncodedDataDefinition() .setDefaultEncoding(servletContext.getDeployment().getDeploymentInfo().getDefaultEncoding()); MultipartConfigElement multipartConfig = servletInfo.getMultipartConfig(); if(multipartConfig == null) { multipartConfig = servletContext.getDeployment().getDeploymentInfo().getDefaultMultipartConfig(); } this.multipartConfig = multipartConfig; if (multipartConfig != null) { //todo: fileSizeThreshold MultipartConfigElement config = multipartConfig; if (config.getMaxRequestSize() != -1) { maxRequestSize = config.getMaxRequestSize(); } else { maxRequestSize = -1; } final File tempDir; if(config.getLocation() == null || config.getLocation().isEmpty()) { tempDir = servletContext.getDeployment().getDeploymentInfo().getTempDir(); } else { String location = config.getLocation(); File locFile = new File(location); if(locFile.isAbsolute()) { tempDir = locFile; } else { tempDir = new File(servletContext.getDeployment().getDeploymentInfo().getTempDir(), location); } } MultiPartParserDefinition multiPartParserDefinition = new MultiPartParserDefinition(tempDir); if(config.getMaxFileSize() > 0) { multiPartParserDefinition.setMaxIndividualFileSize(config.getMaxFileSize()); } multiPartParserDefinition.setDefaultEncoding(servletContext.getDeployment().getDeploymentInfo().getDefaultEncoding()); formParserFactory = FormParserFactory.builder(false) .addParser(formDataParser) .addParser(multiPartParserDefinition) .build(); } else { //no multipart config we don't allow multipart requests formParserFactory = FormParserFactory.builder(false).addParser(formDataParser).build(); maxRequestSize = -1; } } public synchronized void start() throws ServletException { } public void createServlet() throws ServletException { if (permanentlyUnavailable) { return; } try { if (!started && servletInfo.getLoadOnStartup() != null && servletInfo.getLoadOnStartup() >= 0) { instanceStrategy.start(); started = true; } } catch (UnavailableException e) { if (e.isPermanent()) { permanentlyUnavailable = true; stop(); } } } public synchronized void stop() { if (started) { instanceStrategy.stop(); } started = false; } @Override public boolean isStarted() { return started; } public boolean isPermanentlyUnavailable() { return permanentlyUnavailable; } public void setPermanentlyUnavailable(final boolean permanentlyUnavailable) { this.permanentlyUnavailable = permanentlyUnavailable; } public InstanceHandle<? extends Servlet> getServlet() throws ServletException { if(servletContext.getDeployment().getDeploymentState() != DeploymentManager.State.STARTED) { throw UndertowServletMessages.MESSAGES.deploymentStopped(servletContext.getDeployment().getDeploymentInfo().getDeploymentName()); } if (!started) { synchronized (this) { if (!started) { instanceStrategy.start(); started = true; } } } return instanceStrategy.getServlet(); } public ServletInfo getServletInfo() { return servletInfo; } public long getMaxRequestSize() { return maxRequestSize; } public FormParserFactory getFormParserFactory() { return formParserFactory; } public MultipartConfigElement getMultipartConfig() { return multipartConfig; } /** * interface used to abstract the difference between single thread model servlets and normal servlets */ interface InstanceStrategy { void start() throws ServletException; void stop(); InstanceHandle<? extends Servlet> getServlet() throws ServletException; } /** * The default servlet pooling strategy that just uses a single instance for all requests */ private static class DefaultInstanceStrategy implements InstanceStrategy { private final InstanceFactory<? extends Servlet> factory; private final ServletInfo servletInfo; private final ServletContextImpl servletContext; private volatile InstanceHandle<? extends Servlet> handle; private volatile Servlet instance; private ResourceChangeListener changeListener; DefaultInstanceStrategy(final InstanceFactory<? extends Servlet> factory, final ServletInfo servletInfo, final ServletContextImpl servletContext) { this.factory = factory; this.servletInfo = servletInfo; this.servletContext = servletContext; } public synchronized void start() throws ServletException { try { handle = factory.createInstance(); } catch (Exception e) { throw UndertowServletMessages.MESSAGES.couldNotInstantiateComponent(servletInfo.getName(), e); } instance = handle.getInstance(); new LifecyleInterceptorInvocation(servletContext.getDeployment().getDeploymentInfo().getLifecycleInterceptors(), servletInfo, instance, new ServletConfigImpl(servletInfo, servletContext)).proceed(); //if a servlet implements FileChangeCallback it will be notified of file change events final ResourceManager resourceManager = servletContext.getDeployment().getDeploymentInfo().getResourceManager(); if(instance instanceof ResourceChangeListener && resourceManager.isResourceChangeListenerSupported()) { resourceManager.registerResourceChangeListener(changeListener = (ResourceChangeListener) instance); } } public synchronized void stop() { if (handle != null) { final ResourceManager resourceManager = servletContext.getDeployment().getDeploymentInfo().getResourceManager(); if(changeListener != null) { resourceManager.removeResourceChangeListener(changeListener); } invokeDestroy(); handle.release(); } } private void invokeDestroy() { List<LifecycleInterceptor> interceptors = servletContext.getDeployment().getDeploymentInfo().getLifecycleInterceptors(); try { new LifecyleInterceptorInvocation(interceptors, servletInfo, instance).proceed(); } catch (ServletException e) { throw new RuntimeException(e); } } public InstanceHandle<? extends Servlet> getServlet() { return new InstanceHandle<Servlet>() { @Override public Servlet getInstance() { return instance; } @Override public void release() { } }; } } /** * pooling strategy for single thread model servlet */ private static class SingleThreadModelPoolStrategy implements InstanceStrategy { private final InstanceFactory<? extends Servlet> factory; private final ServletInfo servletInfo; private final ServletContextImpl servletContext; private SingleThreadModelPoolStrategy(final InstanceFactory<? extends Servlet> factory, final ServletInfo servletInfo, final ServletContextImpl servletContext) { this.factory = factory; this.servletInfo = servletInfo; this.servletContext = servletContext; } @Override public void start() { } @Override public void stop() { } @Override public InstanceHandle<? extends Servlet> getServlet() throws ServletException { final InstanceHandle<? extends Servlet> instanceHandle; final Servlet instance; //TODO: pooling try { instanceHandle = factory.createInstance(); } catch (Exception e) { throw UndertowServletMessages.MESSAGES.couldNotInstantiateComponent(servletInfo.getName(), e); } instance = instanceHandle.getInstance(); new LifecyleInterceptorInvocation(servletContext.getDeployment().getDeploymentInfo().getLifecycleInterceptors(), servletInfo, instance, new ServletConfigImpl(servletInfo, servletContext)).proceed(); return new InstanceHandle<Servlet>() { @Override public Servlet getInstance() { return instance; } @Override public void release() { instance.destroy(); instanceHandle.release(); } }; } } }
package alien4cloud.rest.topology; import java.util.Map; import javax.annotation.Resource; import lombok.extern.slf4j.Slf4j; import org.hibernate.validator.constraints.NotBlank; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import alien4cloud.audit.annotation.Audit; import alien4cloud.component.ICSARRepositorySearchService; import alien4cloud.csar.services.CsarService; import alien4cloud.dao.IGenericSearchDAO; import alien4cloud.dao.model.FacetedSearchResult; import alien4cloud.exception.AlreadyExistException; import alien4cloud.exception.DeleteReferencedObjectException; import alien4cloud.exception.InvalidArgumentException; import alien4cloud.exception.NotFoundException; import alien4cloud.model.components.CSARDependency; import alien4cloud.model.components.Csar; import alien4cloud.model.components.IncompatiblePropertyDefinitionException; import alien4cloud.model.components.IndexedNodeType; import alien4cloud.model.templates.TopologyTemplate; import alien4cloud.model.topology.NodeTemplate; import alien4cloud.model.topology.SubstitutionMapping; import alien4cloud.model.topology.SubstitutionTarget; import alien4cloud.model.topology.Topology; import alien4cloud.rest.model.RestResponse; import alien4cloud.rest.model.RestResponseBuilder; import alien4cloud.topology.TopologyDTO; import alien4cloud.topology.TopologyService; import alien4cloud.topology.TopologyServiceCore; import alien4cloud.topology.TopologyTemplateVersionService; import com.google.common.collect.Maps; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; @Slf4j @RestController @RequestMapping({"/rest/topologies", "/rest/v1/topologies", "/rest/latest/topologies"}) public class TopologySubstitutionsController { @Resource(name = "alien-es-dao") private IGenericSearchDAO alienDAO; @Resource private TopologyService topologyService; @Resource private TopologyServiceCore topologyServiceCore; @Resource private ICSARRepositorySearchService csarRepoSearchService; @Resource private TopologyTemplateVersionService topologyTemplateVersionService; @Resource private CsarService csarService; @ApiOperation(value = "Define the type this topology can substitute. When this method is called, a new type is created : it is derived from this one.", notes = "Returns a topology with it's details. Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId}/substitutions/type", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE) @Audit @ResponseStatus(value = HttpStatus.CREATED) public RestResponse<TopologyDTO> setSubstitutionType(@PathVariable String topologyId, @NotBlank @RequestParam("elementId") String elementId) { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (!topology.getDelegateType().equals(TopologyTemplate.class.getSimpleName().toLowerCase())) { throw new InvalidArgumentException("This operation is only allowed for topology templates"); } if (topology.getSubstitutionMapping() == null) { topology.setSubstitutionMapping(new SubstitutionMapping()); } IndexedNodeType nodeType = csarRepoSearchService.getElementInDependencies(IndexedNodeType.class, elementId, topology.getDependencies()); if (nodeType != null) { // the node type exists in the dependencies, there is no choices for this type version } else { // the node type does'nt exist in this topology dependencies // we need to find the latest version of this component and use it as default Map<String, String[]> filters = Maps.newHashMap(); filters.put("elementId", new String[] { elementId }); FacetedSearchResult result = csarRepoSearchService.search(IndexedNodeType.class, null, 0, Integer.MAX_VALUE, filters, false); if (result.getTotalResults() > 0) { nodeType = (IndexedNodeType) result.getData()[0]; } // add in dependencies topology.getDependencies().add(new CSARDependency(nodeType.getArchiveName(), nodeType.getArchiveVersion())); } topology.getSubstitutionMapping().setSubstitutionType(nodeType); topologyServiceCore.save(topology); topologyServiceCore.updateSubstitutionType(topology); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Remove the substitution type, delete the corresponding type (if not already used)", notes = "Returns a topology with it's details. Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId}/substitutions/type", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE) @Audit public RestResponse<TopologyDTO> removeSubstitution(@PathVariable String topologyId) { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } IndexedNodeType substitutionType = topology.getSubstitutionMapping().getSubstitutionType(); Csar csar = csarService.getTopologySubstitutionCsar(topologyId); Topology[] topologies = csarService.getDependantTopologies(csar.getName(), csar.getVersion()); if (topologies != null) { for (Topology topologyThatUseCsar : topologies) { if (!topologyThatUseCsar.getId().equals(topologyId)) { throw new DeleteReferencedObjectException( "The substitution can not be removed since it's type is already used in at least another topology"); } } } Csar[] dependantCsars = csarService.getDependantCsars(csar.getName(), csar.getVersion()); if (dependantCsars != null && dependantCsars.length > 0) { throw new DeleteReferencedObjectException("The substitution can not be removed since it's a deendency for another csar"); } topologyService.unloadType(topology, new String[] { substitutionType.getElementId() }); topology.setSubstitutionMapping(null); topologyServiceCore.save(topology); // unset the substitution topologyId on the csar csar.setSubstitutionTopologyId(null); alienDAO.save(csar); // delete the CSAR and the type csarService.deleteCsar(csar.getId()); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Expose the given capability as a capability for the substitution type associated with this topology.", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/capabilities/{substitutionCapabilityId}", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @Audit public RestResponse<TopologyDTO> exposeCapability( @ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution capability name.", required = true) @NotBlank @PathVariable final String substitutionCapabilityId, @ApiParam(value = "The node template id.", required = true) @NotBlank @RequestParam("nodeTemplateName") final String nodeTemplateName, @ApiParam(value = "The source node capability id.", required = true) @NotBlank @RequestParam("capabilityId") final String capabilityId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getNodeTemplates() == null || !topology.getNodeTemplates().containsKey(nodeTemplateName)) { throw new NotFoundException("Node " + nodeTemplateName + " do not exist"); } NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeTemplateName); if (nodeTemplate.getCapabilities() == null || !nodeTemplate.getCapabilities().containsKey(capabilityId)) { throw new NotFoundException("Capability " + capabilityId + " do not exist for node " + nodeTemplateName); } if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionCapabilities = topology.getSubstitutionMapping().getCapabilities(); if (substitutionCapabilities == null) { substitutionCapabilities = Maps.newHashMap(); topology.getSubstitutionMapping().setCapabilities(substitutionCapabilities); } else if (substitutionCapabilities.containsKey(substitutionCapabilityId)) { // ensure name unicity throw new AlreadyExistException(String.format("The substitution capability <%s> already exists", substitutionCapabilityId)); } substitutionCapabilities.put(substitutionCapabilityId, new SubstitutionTarget(nodeTemplateName, capabilityId)); topologyServiceCore.save(topology); // update the type topologyServiceCore.updateSubstitutionType(topology); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Update the substitution capability (typically change it's name).", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/capabilities/{substitutionCapabilityId}", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Audit public RestResponse<TopologyDTO> updateSubstitutionCapability( @ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution capability name.", required = true) @NotBlank @PathVariable final String substitutionCapabilityId, @ApiParam(value = "The new capability name.", required = true) @NotBlank @RequestParam("newCapabilityId") final String newCapabilityId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionCapabilities = topology.getSubstitutionMapping().getCapabilities(); return updateSubstitutionKey(topology, substitutionCapabilities, substitutionCapabilityId, newCapabilityId); } private RestResponse<TopologyDTO> updateSubstitutionKey(Topology topology, Map<String, SubstitutionTarget> targetMap, String oldKey, String newKey) { if (targetMap == null) { throw new NotFoundException("No substitution capabilities or requirements has been found"); } SubstitutionTarget target = targetMap.remove(oldKey); if (target == null) { throw new NotFoundException("No substitution capability or requirement has been found for key " + oldKey); } if (targetMap.containsKey(newKey)) { throw new AlreadyExistException(String.format("Can not rename from <%s> to <%s> since capability or requirement <%s> already exists", oldKey, newKey, newKey)); } targetMap.put(newKey, target); topologyServiceCore.save(topology); // update the type topologyServiceCore.updateSubstitutionType(topology); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Remove the substitution capability from the substitution type.", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/capabilities/{substitutionCapabilityId}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE) @Audit public RestResponse<TopologyDTO> removeSubstitutionCapability( @ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution capability name.", required = true) @NotBlank @PathVariable final String substitutionCapabilityId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionCapabilities = topology.getSubstitutionMapping().getCapabilities(); return removeSubstitutionKey(topology, substitutionCapabilities, substitutionCapabilityId); } private RestResponse<TopologyDTO> removeSubstitutionKey(Topology topology, Map<String, SubstitutionTarget> targetMap, String key) { if (targetMap == null) { throw new NotFoundException("No substitution capabilities or requirements has been found"); } SubstitutionTarget target = targetMap.remove(key); if (target == null) { throw new NotFoundException("No substitution capability or requirement has been found for key " + key); } topologyServiceCore.save(topology); // update the type topologyServiceCore.updateSubstitutionType(topology); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Expose the given requirement as a requirement for the substitution type associated with this topology.", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/requirements/{substitutionRequirementId}", method = RequestMethod.PUT, produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @Audit public RestResponse<TopologyDTO> exposeRequirement(@ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution requirement name.", required = true) @NotBlank @PathVariable final String substitutionRequirementId, @ApiParam(value = "The node template id.", required = true) @NotBlank @RequestParam("nodeTemplateName") final String nodeTemplateName, @ApiParam(value = "The source node requirement id.", required = true) @NotBlank @RequestParam("requirementId") final String requirementId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getNodeTemplates() == null || !topology.getNodeTemplates().containsKey(nodeTemplateName)) { throw new NotFoundException("Node " + nodeTemplateName + " do not exist"); } NodeTemplate nodeTemplate = topology.getNodeTemplates().get(nodeTemplateName); if (nodeTemplate.getRequirements() == null || !nodeTemplate.getRequirements().containsKey(requirementId)) { throw new NotFoundException("Requirement " + requirementId + " do not exist for node " + nodeTemplateName); } if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionRequirements = topology.getSubstitutionMapping().getRequirements(); if (substitutionRequirements == null) { substitutionRequirements = Maps.newHashMap(); topology.getSubstitutionMapping().setRequirements(substitutionRequirements); } else if (substitutionRequirements.containsKey(substitutionRequirementId)) { // ensure name unicity throw new AlreadyExistException(String.format("The substitution requirement <%s> already exists", substitutionRequirementId)); } substitutionRequirements.put(substitutionRequirementId, new SubstitutionTarget(nodeTemplateName, requirementId)); topologyServiceCore.save(topology); // update the type topologyServiceCore.updateSubstitutionType(topology); return RestResponseBuilder.<TopologyDTO> builder().data(topologyService.buildTopologyDTO(topology)).build(); } @ApiOperation(value = "Update the substitution requirement (typically change it's name).", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/requirements/{substitutionRequirementId}", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Audit public RestResponse<TopologyDTO> updateSubstitutionRequirement( @ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution requirement name.", required = true) @NotBlank @PathVariable final String substitutionRequirementId, @ApiParam(value = "The new substution requirement name.", required = true) @NotBlank @RequestParam("newRequirementId") final String newRequirementId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionRequirements = topology.getSubstitutionMapping().getRequirements(); return updateSubstitutionKey(topology, substitutionRequirements, substitutionRequirementId, newRequirementId); } @ApiOperation(value = "Remove the requirement from the substitution type associated to this topology.", notes = "Role required [ ARCHITECT ]") @RequestMapping(value = "/{topologyId:.+}/substitutions/requirements/{substitutionRequirementId}", method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE) @Audit public RestResponse<TopologyDTO> removeSubstitutionRequirement( @ApiParam(value = "The topology id.", required = true) @NotBlank @PathVariable final String topologyId, @ApiParam(value = "The substitution requirement name.", required = true) @NotBlank @PathVariable final String substitutionRequirementId) throws IncompatiblePropertyDefinitionException { Topology topology = topologyServiceCore.getOrFail(topologyId); topologyService.checkEditionAuthorizations(topology); topologyService.throwsErrorIfReleased(topology); if (topology.getSubstitutionMapping() == null || topology.getSubstitutionMapping().getSubstitutionType() == null) { throw new NotFoundException("No substitution type has been found"); } Map<String, SubstitutionTarget> substitutionRequirements = topology.getSubstitutionMapping().getRequirements(); return removeSubstitutionKey(topology, substitutionRequirements, substitutionRequirementId); } }
/* * Copyright 2014 - 2017 Cognizant Technology Solutions * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cognizant.cognizantits.ide.main.scheduler; import com.cognizant.cognizantits.datalib.component.Project; import com.cognizant.cognizantits.datalib.component.Release; import com.cognizant.cognizantits.ide.main.help.Help; import com.cognizant.cognizantits.ide.main.utils.Utils; import java.awt.Color; import java.awt.event.ActionEvent; import java.awt.event.ItemEvent; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.DefaultComboBoxModel; import javax.swing.JSpinner; import javax.swing.SpinnerDateModel; import javax.swing.SwingUtilities; import javax.swing.Timer; /** * * */ public class SchedulerUI extends javax.swing.JDialog { private static final String SCHEDULER_ARG = " -run -project_location \\\"<PATH>\\\" " + "-release \\\"<RELEASE>\\\" " + "-testset \\\"<TESTSET>\\\" "; Timer createdTimer; Project sProject; WinScheduler winScheduler; public SchedulerUI() { initComponents(); init(); } private void init() { winScheduler = new WinScheduler(); createdTimer = new Timer(3000, (ActionEvent event) -> { created.setVisible(false); }); setIconImage(Utils.getFavIcon()); setGlassPane(statusPanel); initDateModel(); created.setVisible(false); } private void initDateModel() { SpinnerDateModel dmodel = new SpinnerDateModel(); dateSpinner.setModel(dmodel); dateSpinner.setEditor(new JSpinner.DateEditor(dateSpinner, "MMM - dd - yyyy")); SpinnerDateModel tmodel = new SpinnerDateModel(); timeSpinner.setModel(tmodel); timeSpinner.setEditor(new JSpinner.DateEditor(timeSpinner, "HH:mm")); } /** * @return the FULL_DATE_FORMAT */ public static SimpleDateFormat getFULL_DATE_FORMAT() { return new SimpleDateFormat("MM/dd/yyyy HH:mm"); } /** * @return the DATE_FORMAT */ public static SimpleDateFormat getDATE_FORMAT() { return new SimpleDateFormat("MM/dd/yyyy"); } /** * @return the TIME_FORMAT */ public static SimpleDateFormat getTIME_FORMAT() { return new SimpleDateFormat("HH:mm"); } public void showScheduler(Project sProject) { this.sProject = sProject; loadReleases(); setLocationRelativeTo(null); setVisible(true); dateSpinner.setValue(new Date()); timeSpinner.setValue(new Date()); } private void loadReleases() { releaseCombo.removeAllItems(); testSetCombo.removeAllItems(); releaseCombo.setModel(new DefaultComboBoxModel(sProject.getReleases().toArray())); releaseCombo.setSelectedIndex(-1); if (releaseCombo.getModel().getSize() > 0) { releaseCombo.setSelectedIndex(0); } } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { statusPanel = new javax.swing.JPanel(); filler5 = new javax.swing.Box.Filler(new java.awt.Dimension(50, 0), new java.awt.Dimension(50, 0), new java.awt.Dimension(50, 32767)); filler6 = new javax.swing.Box.Filler(new java.awt.Dimension(50, 0), new java.awt.Dimension(50, 0), new java.awt.Dimension(50, 32767)); filler7 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 50), new java.awt.Dimension(0, 50), new java.awt.Dimension(32767, 50)); filler8 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 35), new java.awt.Dimension(0, 35), new java.awt.Dimension(32767, 35)); jPanel1 = new javax.swing.JPanel(); jScrollPane2 = new javax.swing.JScrollPane(); statusArea = new javax.swing.JTextArea(); optionPanel = new javax.swing.JPanel(); yesButton = new javax.swing.JButton(); noButton = new javax.swing.JButton(); dateSpinner = new javax.swing.JSpinner(); taskName = new javax.swing.JTextField(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); error = new javax.swing.JLabel(); createTask = new javax.swing.JButton(); jLabel3 = new javax.swing.JLabel(); timeSpinner = new javax.swing.JSpinner(); releaseCombo = new javax.swing.JComboBox<>(); testSetCombo = new javax.swing.JComboBox<>(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); created = new javax.swing.JLabel(); jScrollPane1 = new javax.swing.JScrollPane(); addnlArguments = new javax.swing.JTextArea(); jLabel6 = new javax.swing.JLabel(); statusPanel.setBackground(new Color(0,0,0,.5f)); statusPanel.setLayout(new java.awt.BorderLayout()); filler5.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { filler5MouseClicked(evt); } }); statusPanel.add(filler5, java.awt.BorderLayout.LINE_START); filler6.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { filler6MouseClicked(evt); } }); statusPanel.add(filler6, java.awt.BorderLayout.LINE_END); filler7.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { filler7MouseClicked(evt); } }); statusPanel.add(filler7, java.awt.BorderLayout.PAGE_START); filler8.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { filler8MouseClicked(evt); } }); statusPanel.add(filler8, java.awt.BorderLayout.PAGE_END); jPanel1.setLayout(new java.awt.BorderLayout()); statusArea.setEditable(false); statusArea.setColumns(20); statusArea.setFont(new java.awt.Font("sansserif", 0, 14)); // NOI18N statusArea.setForeground(new java.awt.Color(153, 0, 0)); statusArea.setLineWrap(true); statusArea.setRows(5); statusArea.setWrapStyleWord(true); jScrollPane2.setViewportView(statusArea); jPanel1.add(jScrollPane2, java.awt.BorderLayout.CENTER); yesButton.setText("Yes"); yesButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { yesButtonActionPerformed(evt); } }); optionPanel.add(yesButton); noButton.setText("No"); noButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { noButtonActionPerformed(evt); } }); optionPanel.add(noButton); jPanel1.add(optionPanel, java.awt.BorderLayout.SOUTH); statusPanel.add(jPanel1, java.awt.BorderLayout.CENTER); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Scheduler"); setModalityType(java.awt.Dialog.ModalityType.APPLICATION_MODAL); taskName.setText("New Task"); jLabel1.setText("TaskName"); jLabel2.setText("Date"); error.setText(" "); createTask.setText("Create Task"); createTask.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { createTaskActionPerformed(evt); } }); jLabel3.setText("Time"); releaseCombo.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { releaseComboItemStateChanged(evt); } }); jLabel4.setText("Release"); jLabel5.setText("TestSet"); created.setForeground(java.awt.Color.blue); created.setText("Task Created Successfully"); addnlArguments.setColumns(20); addnlArguments.setRows(5); jScrollPane1.setViewportView(addnlArguments); jLabel6.setText("Arguments"); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(150, 150, 150) .addComponent(createTask)) .addGroup(layout.createSequentialGroup() .addGap(23, 23, 23) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(47, 47, 47) .addComponent(error) .addGap(57, 57, 57) .addComponent(created)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(jLabel3) .addComponent(jLabel4) .addComponent(jLabel6) .addComponent(jLabel5)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(testSetCombo, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(releaseCombo, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(taskName) .addComponent(dateSpinner) .addComponent(timeSpinner, javax.swing.GroupLayout.DEFAULT_SIZE, 258, Short.MAX_VALUE) .addComponent(jScrollPane1)))))) .addContainerGap(30, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(12, 12, 12) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(error) .addComponent(created)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(releaseCombo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel4)) .addGap(26, 26, 26) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(testSetCombo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel5)) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(47, 47, 47) .addComponent(jLabel6)) .addGroup(layout.createSequentialGroup() .addGap(26, 26, 26) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 112, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 27, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(taskName, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel1)) .addGap(24, 24, 24) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(dateSpinner, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel2)) .addGap(26, 26, 26) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(timeSpinner, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(35, 35, 35) .addComponent(createTask) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private void createTaskActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createTaskActionPerformed if (isConditionMet()) { if (isDateValid()) { checkForAndTask(); } } else { showStatus("Task could not be created.\nInsufficient Data to create a task"); } }//GEN-LAST:event_createTaskActionPerformed private void releaseComboItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_releaseComboItemStateChanged if (evt.getStateChange() == ItemEvent.SELECTED) { testSetCombo.setModel(new DefaultComboBoxModel( ((Release) releaseCombo.getSelectedItem()) .getTestSets().toArray())); } }//GEN-LAST:event_releaseComboItemStateChanged private void filler7MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_filler7MouseClicked hideStatus(); }//GEN-LAST:event_filler7MouseClicked private void filler5MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_filler5MouseClicked hideStatus(); }//GEN-LAST:event_filler5MouseClicked private void filler6MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_filler6MouseClicked hideStatus(); }//GEN-LAST:event_filler6MouseClicked private void filler8MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_filler8MouseClicked hideStatus(); }//GEN-LAST:event_filler8MouseClicked private void yesButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_yesButtonActionPerformed hideStatus(); createTask(); }//GEN-LAST:event_yesButtonActionPerformed private void noButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_noButtonActionPerformed hideStatus(); }//GEN-LAST:event_noButtonActionPerformed private Boolean isConditionMet() { return !taskName.getText().trim().isEmpty() && releaseCombo.getSelectedIndex() != -1 && testSetCombo.getSelectedIndex() != -1; } private Boolean isDateValid() { String date = getDATE_FORMAT().format(dateSpinner.getValue()) + " " + getTIME_FORMAT().format(timeSpinner.getValue()); try { if (new Date().before(getFULL_DATE_FORMAT().parse(date))) { return true; } else { showStatus(String.format( "Invalid Schedule Date - [%s].\n" + "Date should be atleast " + "1 min greater than the current date and time", date )); return false; } } catch (ParseException ex) { Logger.getLogger(SchedulerUI.class.getName()).log(Level.OFF, null, ex); } return false; } private void showStatus(String text) { statusArea.setText(text); getGlassPane().setVisible(true); optionPanel.setVisible(false); } private void showStatusWithButton(String text) { showStatus(text); optionPanel.setVisible(true); } private void hideStatus() { getGlassPane().setVisible(false); } private void checkForAndTask() { if (winScheduler.isTask(taskName.getText())) { showStatusWithButton(String.format("A Task with name [%s] already exist.\nDo you want to overwrite?", taskName.getText())); } else { createTask(); } } private void createTask() { SwingUtilities.invokeLater(() -> { String argument = SCHEDULER_ARG.replace("<PATH>", sProject.getLocation()) .replace("<RELEASE>", releaseCombo.getSelectedItem().toString()) .replace("<TESTSET>", testSetCombo.getSelectedItem().toString()) .concat(addnlArguments.getText()); winScheduler.createTask(taskName.getText(), argument, getDATE_FORMAT().format(dateSpinner.getValue()), getTIME_FORMAT().format(timeSpinner.getValue())); if (winScheduler.isTask(taskName.getText())) { created.setVisible(true); createdTimer.start(); } else { showStatus("Could not create task - [" + taskName.getText() + "]"); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JTextArea addnlArguments; private javax.swing.JButton createTask; private javax.swing.JLabel created; private javax.swing.JSpinner dateSpinner; private javax.swing.JLabel error; private javax.swing.Box.Filler filler5; private javax.swing.Box.Filler filler6; private javax.swing.Box.Filler filler7; private javax.swing.Box.Filler filler8; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPanel jPanel1; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JButton noButton; private javax.swing.JPanel optionPanel; private javax.swing.JComboBox<String> releaseCombo; private javax.swing.JTextArea statusArea; private javax.swing.JPanel statusPanel; private javax.swing.JTextField taskName; private javax.swing.JComboBox<String> testSetCombo; private javax.swing.JSpinner timeSpinner; private javax.swing.JButton yesButton; // End of variables declaration//GEN-END:variables }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2013 - 2018 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.adobeio.service.impl; import static com.adobe.acs.commons.adobeio.service.impl.AdobeioConstants.*; import static io.jsonwebtoken.SignatureAlgorithm.RS256; import java.math.BigInteger; import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.spec.InvalidKeySpecException; import java.security.spec.PKCS8EncodedKeySpec; import java.util.Base64; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicNameValuePair; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.ConfigurationPolicy; import org.osgi.service.component.annotations.Modified; import org.osgi.service.component.annotations.Reference; import org.osgi.service.metatype.annotations.Designate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.adobe.acs.commons.adobeio.service.IntegrationService; import com.google.common.collect.Lists; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import io.jsonwebtoken.Jwts; //scheduler is set to once per hour //you can use cronmaker.com for generating cron expressions @Component(service = {IntegrationService.class, Runnable.class}, configurationPolicy = ConfigurationPolicy.REQUIRE, property = "scheduler.expression=0 0 0/1 1/1 * ? *") @Designate(ocd = IntegrationConfiguration.class) public class IntegrationServiceImpl implements IntegrationService, Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(IntegrationServiceImpl.class); private static final Base64.Decoder DECODER = Base64.getMimeDecoder(); @Reference private AdobeioHelper helper; private String accessToken = null; protected IntegrationConfiguration jwtServiceConfig; @Activate @Modified protected void activate(IntegrationConfiguration config) { this.jwtServiceConfig = config; } @Override public void run() { // fetch access token from adobe.io // this method is invoked via the scheduler accessToken = fetchAccessToken(); LOGGER.info("access token in run()-method {}", accessToken); } @Override public String getAccessToken() { if (StringUtils.isEmpty(accessToken)) { accessToken = fetchAccessToken(); } return accessToken; } @Override public String getApiKey() { return jwtServiceConfig.clientId(); } @Override public int getTimeoutinMilliSeconds() { return jwtServiceConfig.timeoutInMilliSeocnds(); } // -------- PRIVATE METHODS ---------- private String fetchAccessToken() { String token = StringUtils.EMPTY; try(CloseableHttpClient client = helper.getHttpClient(getTimeoutinMilliSeconds())) { HttpPost post = new HttpPost(jwtServiceConfig.endpoint()); post.addHeader(CACHE_CONTRL, NO_CACHE); post.addHeader(CONTENT_TYPE, CONTENT_TYPE_URL_ENCODED); List<BasicNameValuePair> params = Lists.newArrayList(); params.add(new BasicNameValuePair(CLIENT_ID, jwtServiceConfig.clientId())); params.add(new BasicNameValuePair(CLIENT_SECRET, jwtServiceConfig.clientSecret())); params.add(new BasicNameValuePair(JWT_TOKEN, getJwtToken())); post.setEntity(new UrlEncodedFormEntity(params)); HttpResponse response = client.execute(post); if (response.getStatusLine().getStatusCode() != 200) { LOGGER.info("response code {} ", response.getStatusLine().getStatusCode()); } String result = IOUtils.toString(response.getEntity().getContent(), "UTF-8"); LOGGER.info("JSON Response : {}", result); JsonParser parser = new JsonParser(); JsonObject json = parser.parse(result).getAsJsonObject(); if (json.has(JSON_ACCESS_TOKEN)) { token = json.get(JSON_ACCESS_TOKEN).getAsString(); } else { LOGGER.error("JSON does not contain an access_token"); } } catch (Exception e) { LOGGER.error(e.getMessage()); } LOGGER.info("JWT Access Token : {}", token); return token; } protected String getJwtToken() { String jwtToken = StringUtils.EMPTY; try { jwtToken = Jwts .builder() .setClaims(getJwtClaims()) .signWith(RS256, getPrivateKey()) .compact(); } catch (Exception e) { LOGGER.error("JWT claims {}", getJwtClaims()); LOGGER.error(e.getMessage()); } LOGGER.info("JWT Token : \n {}", jwtToken); return jwtToken; } private PrivateKey getPrivateKey() throws NoSuchAlgorithmException, InvalidKeySpecException { PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(buildPkcs8Key(jwtServiceConfig.privateKey())); KeyFactory kf = KeyFactory.getInstance("RSA"); return kf.generatePrivate(keySpec); } protected static byte[] buildPkcs8Key(String privateKey) { if (privateKey.contains("--BEGIN PRIVATE KEY--")) { return DECODER.decode(privateKey.replaceAll("-----\\w+ PRIVATE KEY-----", "")); } if (!privateKey.contains("--BEGIN RSA PRIVATE KEY--")) { LOGGER.error("Invalid cert format: {}", privateKey); return StringUtils.EMPTY.getBytes(); } final byte[] innerKey = DECODER.decode(privateKey.replaceAll("-----\\w+ RSA PRIVATE KEY-----", "")); final byte[] result = new byte[innerKey.length + 26]; System.arraycopy(DECODER.decode("MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKY="), 0, result, 0, 26); System.arraycopy(BigInteger.valueOf(result.length - 4).toByteArray(), 0, result, 2, 2); System.arraycopy(BigInteger.valueOf(innerKey.length).toByteArray(), 0, result, 24, 2); System.arraycopy(innerKey, 0, result, 26, innerKey.length); return result; } private Map getJwtClaims() { Map jwtClaims = new HashMap<>(); jwtClaims.put("iss", jwtServiceConfig.amcOrgId()); jwtClaims.put("sub", jwtServiceConfig.techAccountId()); jwtClaims.put("exp", getExpirationDate()); jwtClaims.put("aud", String.format("%s%s", jwtServiceConfig.loginEndpoint(), jwtServiceConfig.clientId())); String [] claims = jwtServiceConfig.adobeLoginClaimKey(); if (claims != null && claims.length > 0) { for( int i=0; i < claims.length; i++) { jwtClaims.put(claims[i], Boolean.TRUE); } } if (LOGGER.isDebugEnabled()) { Gson gson = new Gson(); LOGGER.debug(gson.toJson(jwtClaims)); } return jwtClaims; } private Date getExpirationDate() { Calendar cal = Calendar.getInstance(); cal.setTime(new Date()); cal.add(Calendar.SECOND, jwtServiceConfig.expirationTimeInSeconds()); return cal.getTime(); } }
/* Copyright (c) 2003-2009, Dennis M. Sosnoski. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of JiBX nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jibx.binding.def; import java.util.ArrayList; import org.jibx.binding.classes.*; import org.jibx.runtime.JiBXException; /** * Structure binding definition. This handles one or more child components, * which may be ordered or unordered. * * @author Dennis M. Sosnoski */ public class NestedStructure extends NestedBase { // // Method definitions used in code generation private static final String CHECK_ISSTART_NAME = "org.jibx.runtime.impl.UnmarshallingContext.isStart"; private static final String CHECK_ISSTART_SIGNATURE = "()Z"; private static final String SKIP_ELEMENT_NAME = "org.jibx.runtime.impl.UnmarshallingContext.skipElement"; private static final String SKIP_ELEMENT_SIGNATURE = "()V"; private static final String THROW_EXCEPTION_NAME = "org.jibx.runtime.impl.UnmarshallingContext.throwNameException"; private static final String THROW_EXCEPTION_SIGNATURE = "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"; // // Instance data /** Child supplying ID for bound class. */ private IComponent m_idChild; /** Flag for choice of child content (used by subclasses). */ protected final boolean m_isChoice; /** Flag for duplicate values allowed when unmarshalling unordered group. */ private final boolean m_allowDuplicates; /** Flag for structure has associated object. */ private boolean m_hasObject; /** Flag for already linked (to avoid multiple passes). */ private boolean m_isLinked; /** * Constructor. * * @param parent containing binding definition context * @param objc current object context * @param ord ordered content flag * @param choice choice content flag * @param flex flexible element handling flag * @param ctx define context for structure flag * @param hasobj has associated object flag * @param dupl allow duplicates in unordered group flag */ public NestedStructure(IContainer parent, IContextObj objc, boolean ord, boolean choice, boolean flex, boolean ctx, boolean hasobj, boolean dupl) { super(parent, objc, ord, flex, ctx); m_isChoice = choice; m_hasObject = hasobj; m_allowDuplicates = dupl; } /** * Set the object context. * * @param objc object context */ public void setObjectContext(IContextObj objc) { m_hasObject = false; } /** * Check if the structure is just a mapping reference. This is used to * short-circuit the code generation to avoid multiple layers of binding * methods. As written, the result is only valid prior to a call to * {@link #setLinkages()}. * * @return <code>true</code> if a mapping reference, <code>false</code> if * not */ public boolean isMappingReference() { return m_contents.size() == 1 && m_contents.get(0) instanceof MappingReference; } // // IComponent interface method definitions public void genNewInstance(ContextMethodBuilder mb) { if (isMappingReference()) { throw new IllegalStateException ("Internal error - no instance creation"); } } public boolean hasAttribute() { return m_attributes != null && m_attributes.size() > 0; } public void genAttrPresentTest(ContextMethodBuilder mb) throws JiBXException { if (m_attributes != null && m_attributes.size() > 0) { // if single possibility just test it directly int count = m_attributes.size(); if (count == 1) { ((IComponent)m_attributes.get(0)).genAttrPresentTest(mb); } else { // generate code for chained test with branches to found exit BranchWrapper[] tofound = new BranchWrapper[count]; for (int i = 0; i < count; i++) { IComponent comp = (IComponent)m_attributes.get(i); comp.genAttrPresentTest(mb); tofound[i] = mb.appendIFNE(this); } // fall off end of loop to push "false" on stack and jump to end mb.appendICONST_0(); BranchWrapper toend = mb.appendUnconditionalBranch(this); // generate found target to push "true" on stack and continue for (int i = 0; i < count; i++) { mb.targetNext(tofound[i]); } mb.appendICONST_1(); mb.targetNext(toend); } } else { throw new IllegalStateException ("Internal error - no attributes present"); } } public void genAttributeUnmarshal(ContextMethodBuilder mb) throws JiBXException { if (m_attributes != null && m_attributes.size() > 0) { for (int i = 0; i < m_attributes.size(); i++) { IComponent attr = (IComponent)m_attributes.get(i); attr.genAttributeUnmarshal(mb); } } else { throw new IllegalStateException ("Internal error - no attributes present"); } } public void genAttributeMarshal(ContextMethodBuilder mb) throws JiBXException { if (m_attributes != null && m_attributes.size() > 0) { for (int i = 0; i < m_attributes.size(); i++) { IComponent attr = (IComponent)m_attributes.get(i); attr.genAttributeMarshal(mb); } } else { throw new IllegalStateException ("Internal error - no attributes present"); } } public boolean hasContent() { return m_contents.size() > 0; } public void genContentUnmarshal(ContextMethodBuilder mb) throws JiBXException { if (m_contents.size() > 0) { // check for ordered or unordered content if (m_isOrdered) { // should never get here on choice, but just in case if (m_isChoice) { throw new IllegalStateException("Internal error - generation model uses ordered choice"); } // just generate unmarshal code for each component in order for (int i = 0; i < m_contents.size(); i++) { IComponent child = (IComponent)m_contents.get(i); child.genContentUnmarshal(mb); } } else { // start by finding the number of required elements int count = m_contents.size(); int nreq = 0; boolean dupl = m_allowDuplicates; if (m_isChoice) { dupl = true; } else { for (int i = 0; i < count; i++) { if (!((IComponent)m_contents.get(i)).isOptional()) { nreq++; } } } // create array for tracking elements seen boolean useflag = nreq > 0 || !dupl; if (useflag) { mb.appendLoadConstant(count); mb.appendCreateArray("boolean"); mb.defineSlot(this, ClassItem.typeFromName("boolean[]")); } // generate unmarshal loop code that checks for each component, // branching to the next component until one is found and // exiting the loop only when no component is matched (or in // the case of flexible unmarshalling, only exiting the loop // when the enclosing end tag is seen). this uses the array(s) // of booleans to track elements seen and detect duplicates. BranchWrapper link = null; // TODO: initialize default values BranchTarget first = mb.appendTargetNOP(); BranchWrapper[] toends; if (m_isChoice) { toends = new BranchWrapper[count+1]; } else { toends = new BranchWrapper[1]; } for (int i = 0; i < count; i++) { // start with basic test code if (link != null) { mb.targetNext(link); } IComponent child = (IComponent)m_contents.get(i); child.genContentPresentTest(mb); link = mb.appendIFEQ(this); // check for duplicate (if enforced) if (!dupl) { genFlagTest(true, i, "Duplicate element ", child.getWrapperName(), mb); } // set flag for element seen if (useflag && !(child.isOptional() && dupl)) { mb.appendLoadLocal(mb.getSlot(this)); mb.appendLoadConstant(i); mb.appendLoadConstant(1); mb.appendASTORE("boolean"); } // generate actual unmarshalling code child.genContentUnmarshal(mb); BranchWrapper next = mb.appendUnconditionalBranch(this); if (m_isChoice) { toends[i+1] = next; } else { next.setTarget(first, mb); } } // handle comparison fall through depending on flexible flag if (m_isFlexible) { if (link != null) { // exit loop if not positioned at element start mb.targetNext(link); mb.loadContext(); mb.appendCallVirtual(CHECK_ISSTART_NAME, CHECK_ISSTART_SIGNATURE); toends[0] = mb.appendIFEQ(this); // ignore unknown element and loop back to start mb.loadContext(); mb.appendCallVirtual(SKIP_ELEMENT_NAME, SKIP_ELEMENT_SIGNATURE); mb.appendUnconditionalBranch(this).setTarget(first, mb); } } else { // set final test failure branch to fall through loop toends[0] = link; } // patch all branches that exit loop mb.targetNext(toends); // handle required element present tests if (nreq > 0) { for (int i = 0; i < count; i++) { IComponent child = (IComponent)m_contents.get(i); if (!child.isOptional()) { genFlagTest(false, i, "Missing required element ", child.getWrapperName(), mb); } } } mb.freeSlot(this); } } else { throw new IllegalStateException ("Internal error - no content present"); } } /** * Helper method to generate test code for value in boolean array. If the * test fails, the generated code throws an exception with the appropriate * error message. * * @param cond flag setting resulting in exception * @param pos position of element in list of child components * @param msg basic error message when test fails * @param name * @param mb */ private void genFlagTest(boolean cond, int pos, String msg, NameDefinition name, ContextMethodBuilder mb) { // generate code to load array item value mb.appendLoadLocal(mb.getSlot(this)); mb.appendLoadConstant(pos); mb.appendALOAD("boolean"); // append branch for case where test is passed BranchWrapper ifgood; if (cond) { ifgood = mb.appendIFEQ(this); } else { ifgood = mb.appendIFNE(this); } // generate exception for test failed mb.loadContext(); mb.appendLoadConstant(msg); if (name == null) { mb.appendACONST_NULL(); mb.appendLoadConstant("(unknown name, position " + pos + " in binding structure)"); } else { name.genPushUriPair(mb); } mb.appendCallVirtual(THROW_EXCEPTION_NAME, THROW_EXCEPTION_SIGNATURE); // set target for success branch on test mb.targetNext(ifgood); } public void genContentMarshal(ContextMethodBuilder mb) throws JiBXException { if (m_contents.size() > 0) { for (int i = 0; i < m_contents.size(); i++) { IComponent content = (IComponent)m_contents.get(i); content.genContentMarshal(mb); } } else { throw new IllegalStateException ("Internal error - no content present"); } } public String getType() { if (m_hasObject) { return super.getType(); } else if (m_attributes != null && m_attributes.size() > 0) { return ((IComponent)m_attributes.get(0)).getType(); } else if (m_contents.size() > 0) { return ((IComponent)m_contents.get(0)).getType(); } else { throw new IllegalStateException("Internal error - " + "no type defined for structure"); } } public boolean hasId() { return m_idChild != null; } public void genLoadId(ContextMethodBuilder mb) throws JiBXException { if (m_idChild == null) { throw new IllegalStateException("No ID child defined"); } else { m_idChild.genLoadId(mb); } } public void setLinkages() throws JiBXException { if (!m_isLinked) { // set flag first in case of recursive reference m_isLinked = true; // process all child components to link and sort by type int i = 0; while (i < m_contents.size()) { IComponent comp = (IComponent)m_contents.get(i); comp.setLinkages(); if (comp.hasAttribute()) { if (m_attributes == null) { m_attributes = new ArrayList(); } m_attributes.add(comp); } if (!comp.hasContent()) { m_contents.remove(i); } else { i++; } } } } // DEBUG public void print(int depth) { BindingDefinition.indent(depth); System.out.print("structure " + (m_isChoice ? "choice" : (m_isOrdered ? "ordered" : "unordered"))); if (m_allowDuplicates) { System.out.print(", duplicates allowed"); } if (isFlexible()) { System.out.print(", flexible"); } if (m_idChild != null) { System.out.print(" (ID)"); } System.out.println(); for (int i = 0; i < m_contents.size(); i++) { IComponent comp = (IComponent)m_contents.get(i); comp.print(depth+1); } if (m_attributes != null) { for (int i = 0; i < m_attributes.size(); i++) { IComponent comp = (IComponent)m_attributes.get(i); comp.print(depth+1); } } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; import com.squareup.okhttp.Connection; import com.squareup.okhttp.ConnectionPool; import com.squareup.okhttp.Headers; import com.squareup.okhttp.Response; import com.squareup.okhttp.internal.Internal; import com.squareup.okhttp.internal.Util; import java.io.IOException; import java.net.ProtocolException; import java.net.Socket; import java.net.SocketTimeoutException; import okio.Buffer; import okio.BufferedSink; import okio.BufferedSource; import okio.Okio; import okio.Sink; import okio.Source; import okio.Timeout; import static com.squareup.okhttp.internal.Util.checkOffsetAndCount; import static com.squareup.okhttp.internal.http.StatusLine.HTTP_CONTINUE; import static com.squareup.okhttp.internal.http.Transport.DISCARD_STREAM_TIMEOUT_MILLIS; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * A socket connection that can be used to send HTTP/1.1 messages. This class * strictly enforces the following lifecycle: * <ol> * <li>{@link #writeRequest Send request headers}. * <li>Open a sink to write the request body. Either {@link * #newFixedLengthSink fixed-length} or {@link #newChunkedSink chunked}. * <li>Write to and then close that sink. * <li>{@link #readResponse Read response headers}. * <li>Open a source to read the response body. Either {@link * #newFixedLengthSource fixed-length}, {@link #newChunkedSource chunked} * or {@link #newUnknownLengthSource unknown length}. * <li>Read from and close that source. * </ol> * <p>Exchanges that do not have a request body may skip creating and closing * the request body. Exchanges that do not have a response body can call {@link * #newFixedLengthSource(long) newFixedLengthSource(0)} and may skip reading and * closing that source. */ public final class HttpConnection { private static final int STATE_IDLE = 0; // Idle connections are ready to write request headers. private static final int STATE_OPEN_REQUEST_BODY = 1; private static final int STATE_WRITING_REQUEST_BODY = 2; private static final int STATE_READ_RESPONSE_HEADERS = 3; private static final int STATE_OPEN_RESPONSE_BODY = 4; private static final int STATE_READING_RESPONSE_BODY = 5; private static final int STATE_CLOSED = 6; private static final int ON_IDLE_HOLD = 0; private static final int ON_IDLE_POOL = 1; private static final int ON_IDLE_CLOSE = 2; private final ConnectionPool pool; private final Connection connection; private final Socket socket; private final BufferedSource source; private final BufferedSink sink; private int state = STATE_IDLE; private int onIdle = ON_IDLE_HOLD; public HttpConnection(ConnectionPool pool, Connection connection, Socket socket) throws IOException { this.pool = pool; this.connection = connection; this.socket = socket; this.source = Okio.buffer(Okio.source(socket)); this.sink = Okio.buffer(Okio.sink(socket)); } public void setTimeouts(int readTimeoutMillis, int writeTimeoutMillis) { if (readTimeoutMillis != 0) { source.timeout().timeout(readTimeoutMillis, MILLISECONDS); } if (writeTimeoutMillis != 0) { sink.timeout().timeout(writeTimeoutMillis, MILLISECONDS); } } /** * Configure this connection to put itself back into the connection pool when * the HTTP response body is exhausted. */ public void poolOnIdle() { onIdle = ON_IDLE_POOL; // If we're already idle, go to the pool immediately. if (state == STATE_IDLE) { onIdle = ON_IDLE_HOLD; // Set the on idle policy back to the default. Internal.instance.recycle(pool, connection); } } /** * Configure this connection to close itself when the HTTP response body is * exhausted. */ public void closeOnIdle() throws IOException { onIdle = ON_IDLE_CLOSE; // If we're already idle, close immediately. if (state == STATE_IDLE) { state = STATE_CLOSED; connection.getSocket().close(); } } /** Returns true if this connection is closed. */ public boolean isClosed() { return state == STATE_CLOSED; } public void closeIfOwnedBy(Object owner) throws IOException { Internal.instance.closeIfOwnedBy(connection, owner); } public void flush() throws IOException { sink.flush(); } /** Returns the number of buffered bytes immediately readable. */ public long bufferSize() { return source.buffer().size(); } /** Test for a stale socket. */ public boolean isReadable() { try { int readTimeout = socket.getSoTimeout(); try { socket.setSoTimeout(1); if (source.exhausted()) { return false; // Stream is exhausted; socket is closed. } return true; } finally { socket.setSoTimeout(readTimeout); } } catch (SocketTimeoutException ignored) { return true; // Read timed out; socket is good. } catch (IOException e) { return false; // Couldn't read; socket is closed. } } /** Returns bytes of a request header for sending on an HTTP transport. */ public void writeRequest(Headers headers, String requestLine) throws IOException { if (state != STATE_IDLE) throw new IllegalStateException("state: " + state); sink.writeUtf8(requestLine).writeUtf8("\r\n"); for (int i = 0, size = headers.size(); i < size; i ++) { sink.writeUtf8(headers.name(i)) .writeUtf8(": ") .writeUtf8(headers.value(i)) .writeUtf8("\r\n"); } sink.writeUtf8("\r\n"); state = STATE_OPEN_REQUEST_BODY; } /** Parses bytes of a response header from an HTTP transport. */ public Response.Builder readResponse() throws IOException { if (state != STATE_OPEN_REQUEST_BODY && state != STATE_READ_RESPONSE_HEADERS) { throw new IllegalStateException("state: " + state); } while (true) { StatusLine statusLine = StatusLine.parse(source.readUtf8LineStrict()); Response.Builder responseBuilder = new Response.Builder() .protocol(statusLine.protocol) .code(statusLine.code) .message(statusLine.message); Headers.Builder headersBuilder = new Headers.Builder(); readHeaders(headersBuilder); headersBuilder.add(OkHeaders.SELECTED_PROTOCOL, statusLine.protocol.toString()); responseBuilder.headers(headersBuilder.build()); if (statusLine.code != HTTP_CONTINUE) { state = STATE_OPEN_RESPONSE_BODY; return responseBuilder; } } } /** Reads headers or trailers into {@code builder}. */ public void readHeaders(Headers.Builder builder) throws IOException { // parse the result headers until the first blank line for (String line; (line = source.readUtf8LineStrict()).length() != 0; ) { Internal.instance.addLenient(builder, line); } } public Sink newChunkedSink() { if (state != STATE_OPEN_REQUEST_BODY) throw new IllegalStateException("state: " + state); state = STATE_WRITING_REQUEST_BODY; return new ChunkedSink(); } public Sink newFixedLengthSink(long contentLength) { if (state != STATE_OPEN_REQUEST_BODY) throw new IllegalStateException("state: " + state); state = STATE_WRITING_REQUEST_BODY; return new FixedLengthSink(contentLength); } public void writeRequestBody(RetryableSink requestBody) throws IOException { if (state != STATE_OPEN_REQUEST_BODY) throw new IllegalStateException("state: " + state); state = STATE_READ_RESPONSE_HEADERS; requestBody.writeToSocket(sink); } public Source newFixedLengthSource(long length) throws IOException { if (state != STATE_OPEN_RESPONSE_BODY) throw new IllegalStateException("state: " + state); state = STATE_READING_RESPONSE_BODY; return new FixedLengthSource(length); } public Source newChunkedSource(HttpEngine httpEngine) throws IOException { if (state != STATE_OPEN_RESPONSE_BODY) throw new IllegalStateException("state: " + state); state = STATE_READING_RESPONSE_BODY; return new ChunkedSource(httpEngine); } public Source newUnknownLengthSource() throws IOException { if (state != STATE_OPEN_RESPONSE_BODY) throw new IllegalStateException("state: " + state); state = STATE_READING_RESPONSE_BODY; return new UnknownLengthSource(); } /** An HTTP body with a fixed length known in advance. */ private final class FixedLengthSink implements Sink { private boolean closed; private long bytesRemaining; private FixedLengthSink(long bytesRemaining) { this.bytesRemaining = bytesRemaining; } @Override public Timeout timeout() { return sink.timeout(); } @Override public void write(Buffer source, long byteCount) throws IOException { if (closed) throw new IllegalStateException("closed"); checkOffsetAndCount(source.size(), 0, byteCount); if (byteCount > bytesRemaining) { throw new ProtocolException("expected " + bytesRemaining + " bytes but received " + byteCount); } sink.write(source, byteCount); bytesRemaining -= byteCount; } @Override public void flush() throws IOException { if (closed) return; // Don't throw; this stream might have been closed on the caller's behalf. sink.flush(); } @Override public void close() throws IOException { if (closed) return; closed = true; if (bytesRemaining > 0) throw new ProtocolException("unexpected end of stream"); state = STATE_READ_RESPONSE_HEADERS; } } private static final byte[] CRLF = { '\r', '\n' }; private static final byte[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; private static final byte[] FINAL_CHUNK = { '0', '\r', '\n', '\r', '\n' }; /** * An HTTP body with alternating chunk sizes and chunk bodies. It is the * caller's responsibility to buffer chunks; typically by using a buffered * sink with this sink. */ private final class ChunkedSink implements Sink { /** Scratch space for up to 16 hex digits, and then a constant CRLF. */ private final byte[] hex = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '\r', '\n' }; private boolean closed; @Override public Timeout timeout() { return sink.timeout(); } @Override public void write(Buffer source, long byteCount) throws IOException { if (closed) throw new IllegalStateException("closed"); if (byteCount == 0) return; writeHex(byteCount); sink.write(source, byteCount); sink.write(CRLF); } @Override public synchronized void flush() throws IOException { if (closed) return; // Don't throw; this stream might have been closed on the caller's behalf. sink.flush(); } @Override public synchronized void close() throws IOException { if (closed) return; closed = true; sink.write(FINAL_CHUNK); state = STATE_READ_RESPONSE_HEADERS; } /** * Equivalent to, but cheaper than writing Long.toHexString().getBytes() * followed by CRLF. */ private void writeHex(long i) throws IOException { int cursor = 16; do { hex[--cursor] = HEX_DIGITS[((int) (i & 0xf))]; } while ((i >>>= 4) != 0); sink.write(hex, cursor, hex.length - cursor); } } private abstract class AbstractSource implements Source { protected boolean closed; @Override public Timeout timeout() { return source.timeout(); } /** * Closes the cache entry and makes the socket available for reuse. This * should be invoked when the end of the body has been reached. */ protected final void endOfInput(boolean recyclable) throws IOException { if (state != STATE_READING_RESPONSE_BODY) throw new IllegalStateException("state: " + state); state = STATE_IDLE; if (recyclable && onIdle == ON_IDLE_POOL) { onIdle = ON_IDLE_HOLD; // Set the on idle policy back to the default. Internal.instance.recycle(pool, connection); } else if (onIdle == ON_IDLE_CLOSE) { state = STATE_CLOSED; connection.getSocket().close(); } } /** * Calls abort on the cache entry and disconnects the socket. This * should be invoked when the connection is closed unexpectedly to * invalidate the cache entry and to prevent the HTTP connection from * being reused. HTTP messages are sent in serial so whenever a message * cannot be read to completion, subsequent messages cannot be read * either and the connection must be discarded. * * <p>An earlier implementation skipped the remaining bytes, but this * requires that the entire transfer be completed. If the intention was * to cancel the transfer, closing the connection is the only solution. */ protected final void unexpectedEndOfInput() { Util.closeQuietly(connection.getSocket()); state = STATE_CLOSED; } } /** An HTTP body with a fixed length specified in advance. */ private class FixedLengthSource extends AbstractSource { private long bytesRemaining; public FixedLengthSource(long length) throws IOException { bytesRemaining = length; if (bytesRemaining == 0) { endOfInput(true); } } @Override public long read(Buffer sink, long byteCount) throws IOException { if (byteCount < 0) throw new IllegalArgumentException("byteCount < 0: " + byteCount); if (closed) throw new IllegalStateException("closed"); if (bytesRemaining == 0) return -1; long read = source.read(sink, Math.min(bytesRemaining, byteCount)); if (read == -1) { unexpectedEndOfInput(); // The server didn't supply the promised content length. throw new ProtocolException("unexpected end of stream"); } bytesRemaining -= read; if (bytesRemaining == 0) { endOfInput(true); } return read; } @Override public void close() throws IOException { if (closed) return; if (bytesRemaining != 0 && !Util.discard(this, DISCARD_STREAM_TIMEOUT_MILLIS, MILLISECONDS)) { unexpectedEndOfInput(); } closed = true; } } /** An HTTP body with alternating chunk sizes and chunk bodies. */ private class ChunkedSource extends AbstractSource { private static final int NO_CHUNK_YET = -1; private int bytesRemainingInChunk = NO_CHUNK_YET; private boolean hasMoreChunks = true; private final HttpEngine httpEngine; ChunkedSource(HttpEngine httpEngine) throws IOException { this.httpEngine = httpEngine; } @Override public long read(Buffer sink, long byteCount) throws IOException { if (byteCount < 0) throw new IllegalArgumentException("byteCount < 0: " + byteCount); if (closed) throw new IllegalStateException("closed"); if (!hasMoreChunks) return -1; if (bytesRemainingInChunk == 0 || bytesRemainingInChunk == NO_CHUNK_YET) { readChunkSize(); if (!hasMoreChunks) return -1; } long read = source.read(sink, Math.min(byteCount, bytesRemainingInChunk)); if (read == -1) { unexpectedEndOfInput(); // The server didn't supply the promised chunk length. throw new IOException("unexpected end of stream"); } bytesRemainingInChunk -= read; return read; } private void readChunkSize() throws IOException { // Read the suffix of the previous chunk. if (bytesRemainingInChunk != NO_CHUNK_YET) { source.readUtf8LineStrict(); } String chunkSizeString = source.readUtf8LineStrict(); int index = chunkSizeString.indexOf(";"); if (index != -1) { chunkSizeString = chunkSizeString.substring(0, index); } try { bytesRemainingInChunk = Integer.parseInt(chunkSizeString.trim(), 16); } catch (NumberFormatException e) { throw new ProtocolException("Expected a hex chunk size but was " + chunkSizeString); } if (bytesRemainingInChunk == 0) { hasMoreChunks = false; Headers.Builder trailersBuilder = new Headers.Builder(); readHeaders(trailersBuilder); httpEngine.receiveHeaders(trailersBuilder.build()); endOfInput(true); } } @Override public void close() throws IOException { if (closed) return; if (hasMoreChunks && !Util.discard(this, DISCARD_STREAM_TIMEOUT_MILLIS, MILLISECONDS)) { unexpectedEndOfInput(); } closed = true; } } /** An HTTP message body terminated by the end of the underlying stream. */ private class UnknownLengthSource extends AbstractSource { private boolean inputExhausted; @Override public long read(Buffer sink, long byteCount) throws IOException { if (byteCount < 0) throw new IllegalArgumentException("byteCount < 0: " + byteCount); if (closed) throw new IllegalStateException("closed"); if (inputExhausted) return -1; long read = source.read(sink, byteCount); if (read == -1) { inputExhausted = true; endOfInput(false); return -1; } return read; } @Override public void close() throws IOException { if (closed) return; if (!inputExhausted) { unexpectedEndOfInput(); } closed = true; } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.server.display; import android.graphics.Rect; import android.view.Display; import android.view.DisplayInfo; import android.view.Surface; import java.io.PrintWriter; import java.util.List; import libcore.util.Objects; /** * Describes how a logical display is configured. * <p> * At this time, we only support logical displays that are coupled to a particular * primary display device from which the logical display derives its basic properties * such as its size, density and refresh rate. * </p><p> * A logical display may be mirrored onto multiple display devices in addition to its * primary display device. Note that the contents of a logical display may not * always be visible, even on its primary display device, such as in the case where * the primary display device is currently mirroring content from a different * logical display. * </p><p> * This object is designed to encapsulate as much of the policy of logical * displays as possible. The idea is to make it easy to implement new kinds of * logical displays mostly by making local changes to this class. * </p><p> * Note: The display manager architecture does not actually require logical displays * to be associated with any individual display device. Logical displays and * display devices are orthogonal concepts. Some mapping will exist between * logical displays and display devices but it can be many-to-many and * and some might have no relation at all. * </p><p> * Logical displays are guarded by the {@link DisplayManagerService.SyncRoot} lock. * </p> */ final class LogicalDisplay { private final DisplayInfo mBaseDisplayInfo = new DisplayInfo(); // The layer stack we use when the display has been blanked to prevent any // of its content from appearing. private static final int BLANK_LAYER_STACK = -1; private final int mDisplayId; private final int mLayerStack; private DisplayInfo mOverrideDisplayInfo; // set by the window manager private DisplayInfo mInfo; // The display device that this logical display is based on and which // determines the base metrics that it uses. private DisplayDevice mPrimaryDisplayDevice; private DisplayDeviceInfo mPrimaryDisplayDeviceInfo; // True if the logical display has unique content. private boolean mHasContent; // Temporary rectangle used when needed. private final Rect mTempLayerStackRect = new Rect(); private final Rect mTempDisplayRect = new Rect(); public LogicalDisplay(int displayId, int layerStack, DisplayDevice primaryDisplayDevice) { mDisplayId = displayId; mLayerStack = layerStack; mPrimaryDisplayDevice = primaryDisplayDevice; } /** * Gets the logical display id of this logical display. * * @return The logical display id. */ public int getDisplayIdLocked() { return mDisplayId; } /** * Gets the primary display device associated with this logical display. * * @return The primary display device. */ public DisplayDevice getPrimaryDisplayDeviceLocked() { return mPrimaryDisplayDevice; } /** * Gets information about the logical display. * * @return The device info, which should be treated as immutable by the caller. * The logical display should allocate a new display info object whenever * the data changes. */ public DisplayInfo getDisplayInfoLocked() { if (mInfo == null) { mInfo = new DisplayInfo(); if (mOverrideDisplayInfo != null) { mInfo.copyFrom(mOverrideDisplayInfo); mInfo.layerStack = mBaseDisplayInfo.layerStack; mInfo.name = mBaseDisplayInfo.name; } else { mInfo.copyFrom(mBaseDisplayInfo); } } return mInfo; } /** * Sets overridden logical display information from the window manager. * This method can be used to adjust application insets, rotation, and other * properties that the window manager takes care of. * * @param info The logical display information, may be null. */ public void setDisplayInfoOverrideFromWindowManagerLocked(DisplayInfo info) { if (info != null) { if (mOverrideDisplayInfo == null) { mOverrideDisplayInfo = new DisplayInfo(info); mInfo = null; } else if (!mOverrideDisplayInfo.equals(info)) { mOverrideDisplayInfo.copyFrom(info); mInfo = null; } } else if (mOverrideDisplayInfo != null) { mOverrideDisplayInfo = null; mInfo = null; } } /** * Returns true if the logical display is in a valid state. * This method should be checked after calling {@link #updateLocked} to handle the * case where a logical display should be removed because all of its associated * display devices are gone or if it is otherwise no longer needed. * * @return True if the logical display is still valid. */ public boolean isValidLocked() { return mPrimaryDisplayDevice != null; } /** * Updates the state of the logical display based on the available display devices. * The logical display might become invalid if it is attached to a display device * that no longer exists. * * @param devices The list of all connected display devices. */ public void updateLocked(List<DisplayDevice> devices) { // Nothing to update if already invalid. if (mPrimaryDisplayDevice == null) { return; } // Check whether logical display has become invalid. if (!devices.contains(mPrimaryDisplayDevice)) { mPrimaryDisplayDevice = null; return; } // Bootstrap the logical display using its associated primary physical display. // We might use more elaborate configurations later. It's possible that the // configuration of several physical displays might be used to determine the // logical display that they are sharing. (eg. Adjust size for pixel-perfect // mirroring over HDMI.) DisplayDeviceInfo deviceInfo = mPrimaryDisplayDevice.getDisplayDeviceInfoLocked(); if (!Objects.equal(mPrimaryDisplayDeviceInfo, deviceInfo)) { mBaseDisplayInfo.layerStack = mLayerStack; mBaseDisplayInfo.flags = 0; if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_SUPPORTS_PROTECTED_BUFFERS; } if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_SECURE) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_SECURE; } mBaseDisplayInfo.type = deviceInfo.type; mBaseDisplayInfo.address = deviceInfo.address; mBaseDisplayInfo.name = deviceInfo.name; mBaseDisplayInfo.appWidth = deviceInfo.width; mBaseDisplayInfo.appHeight = deviceInfo.height; mBaseDisplayInfo.logicalWidth = deviceInfo.width; mBaseDisplayInfo.logicalHeight = deviceInfo.height; mBaseDisplayInfo.rotation = Surface.ROTATION_0; mBaseDisplayInfo.refreshRate = deviceInfo.refreshRate; mBaseDisplayInfo.logicalDensityDpi = deviceInfo.densityDpi; mBaseDisplayInfo.physicalXDpi = deviceInfo.xDpi; mBaseDisplayInfo.physicalYDpi = deviceInfo.yDpi; mBaseDisplayInfo.smallestNominalAppWidth = deviceInfo.width; mBaseDisplayInfo.smallestNominalAppHeight = deviceInfo.height; mBaseDisplayInfo.largestNominalAppWidth = deviceInfo.width; mBaseDisplayInfo.largestNominalAppHeight = deviceInfo.height; mPrimaryDisplayDeviceInfo = deviceInfo; mInfo = null; } } /** * Applies the layer stack and transformation to the given display device * so that it shows the contents of this logical display. * * We know that the given display device is only ever showing the contents of * a single logical display, so this method is expected to blow away all of its * transformation properties to make it happen regardless of what the * display device was previously showing. * * The caller must have an open Surface transaction. * * The display device may not be the primary display device, in the case * where the display is being mirrored. * * @param device The display device to modify. * @param isBlanked True if the device is being blanked. */ public void configureDisplayInTransactionLocked(DisplayDevice device, boolean isBlanked) { final DisplayInfo displayInfo = getDisplayInfoLocked(); final DisplayDeviceInfo displayDeviceInfo = device.getDisplayDeviceInfoLocked(); // Set the layer stack. device.setLayerStackInTransactionLocked(isBlanked ? BLANK_LAYER_STACK : mLayerStack); // Set the viewport. // This is the area of the logical display that we intend to show on the // display device. For now, it is always the full size of the logical display. mTempLayerStackRect.set(0, 0, displayInfo.logicalWidth, displayInfo.logicalHeight); // Set the orientation. // The orientation specifies how the physical coordinate system of the display // is rotated when the contents of the logical display are rendered. int orientation = Surface.ROTATION_0; if (device == mPrimaryDisplayDevice && (displayDeviceInfo.flags & DisplayDeviceInfo.FLAG_ROTATES_WITH_CONTENT) != 0) { orientation = displayInfo.rotation; } // Apply the physical rotation of the display device itself. orientation = (orientation + displayDeviceInfo.rotation) % 4; // Set the frame. // The frame specifies the rotated physical coordinates into which the viewport // is mapped. We need to take care to preserve the aspect ratio of the viewport. // Currently we maximize the area to fill the display, but we could try to be // more clever and match resolutions. boolean rotated = (orientation == Surface.ROTATION_90 || orientation == Surface.ROTATION_270); int physWidth = rotated ? displayDeviceInfo.height : displayDeviceInfo.width; int physHeight = rotated ? displayDeviceInfo.width : displayDeviceInfo.height; // Determine whether the width or height is more constrained to be scaled. // physWidth / displayInfo.logicalWidth => letter box // or physHeight / displayInfo.logicalHeight => pillar box // // We avoid a division (and possible floating point imprecision) here by // multiplying the fractions by the product of their denominators before // comparing them. int displayRectWidth, displayRectHeight; if (physWidth * displayInfo.logicalHeight < physHeight * displayInfo.logicalWidth) { // Letter box. displayRectWidth = physWidth; displayRectHeight = displayInfo.logicalHeight * physWidth / displayInfo.logicalWidth; } else { // Pillar box. displayRectWidth = displayInfo.logicalWidth * physHeight / displayInfo.logicalHeight; displayRectHeight = physHeight; } int displayRectTop = (physHeight - displayRectHeight) / 2; int displayRectLeft = (physWidth - displayRectWidth) / 2; mTempDisplayRect.set(displayRectLeft, displayRectTop, displayRectLeft + displayRectWidth, displayRectTop + displayRectHeight); device.setProjectionInTransactionLocked(orientation, mTempLayerStackRect, mTempDisplayRect); } /** * Returns true if the logical display has unique content. * <p> * If the display has unique content then we will try to ensure that it is * visible on at least its primary display device. Otherwise we will ignore the * logical display and perhaps show mirrored content on the primary display device. * </p> * * @return True if the display has unique content. */ public boolean hasContentLocked() { return mHasContent; } /** * Sets whether the logical display has unique content. * * @param hasContent True if the display has unique content. */ public void setHasContentLocked(boolean hasContent) { mHasContent = hasContent; } public void dumpLocked(PrintWriter pw) { pw.println("mDisplayId=" + mDisplayId); pw.println("mLayerStack=" + mLayerStack); pw.println("mHasContent=" + mHasContent); pw.println("mPrimaryDisplayDevice=" + (mPrimaryDisplayDevice != null ? mPrimaryDisplayDevice.getNameLocked() : "null")); pw.println("mBaseDisplayInfo=" + mBaseDisplayInfo); pw.println("mOverrideDisplayInfo=" + mOverrideDisplayInfo); } }
/** * <copyright> * </copyright> * * $Id$ */ package org.oasis.xAL.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.BasicFeatureMap; import org.eclipse.emf.ecore.util.FeatureMap; import org.eclipse.emf.ecore.util.InternalEList; import org.oasis.xAL.ThoroughfareNameType; import org.oasis.xAL.XALPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Thoroughfare Name Type</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.oasis.xAL.impl.ThoroughfareNameTypeImpl#getMixed <em>Mixed</em>}</li> * <li>{@link org.oasis.xAL.impl.ThoroughfareNameTypeImpl#getCode <em>Code</em>}</li> * <li>{@link org.oasis.xAL.impl.ThoroughfareNameTypeImpl#getType <em>Type</em>}</li> * <li>{@link org.oasis.xAL.impl.ThoroughfareNameTypeImpl#getAnyAttribute <em>Any Attribute</em>}</li> * </ul> * </p> * * @generated */ public class ThoroughfareNameTypeImpl extends EObjectImpl implements ThoroughfareNameType { /** * The cached value of the '{@link #getMixed() <em>Mixed</em>}' attribute list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMixed() * @generated * @ordered */ protected FeatureMap mixed; /** * The default value of the '{@link #getCode() <em>Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCode() * @generated * @ordered */ protected static final Object CODE_EDEFAULT = null; /** * The cached value of the '{@link #getCode() <em>Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCode() * @generated * @ordered */ protected Object code = CODE_EDEFAULT; /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected static final Object TYPE_EDEFAULT = null; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected Object type = TYPE_EDEFAULT; /** * The cached value of the '{@link #getAnyAttribute() <em>Any Attribute</em>}' attribute list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAnyAttribute() * @generated * @ordered */ protected FeatureMap anyAttribute; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ThoroughfareNameTypeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return XALPackage.eINSTANCE.getThoroughfareNameType(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FeatureMap getMixed() { if (mixed == null) { mixed = new BasicFeatureMap(this, XALPackage.THOROUGHFARE_NAME_TYPE__MIXED); } return mixed; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getCode() { return code; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCode(Object newCode) { Object oldCode = code; code = newCode; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, XALPackage.THOROUGHFARE_NAME_TYPE__CODE, oldCode, code)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(Object newType) { Object oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, XALPackage.THOROUGHFARE_NAME_TYPE__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FeatureMap getAnyAttribute() { if (anyAttribute == null) { anyAttribute = new BasicFeatureMap(this, XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE); } return anyAttribute; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case XALPackage.THOROUGHFARE_NAME_TYPE__MIXED: return ((InternalEList<?>)getMixed()).basicRemove(otherEnd, msgs); case XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE: return ((InternalEList<?>)getAnyAttribute()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case XALPackage.THOROUGHFARE_NAME_TYPE__MIXED: if (coreType) return getMixed(); return ((FeatureMap.Internal)getMixed()).getWrapper(); case XALPackage.THOROUGHFARE_NAME_TYPE__CODE: return getCode(); case XALPackage.THOROUGHFARE_NAME_TYPE__TYPE: return getType(); case XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE: if (coreType) return getAnyAttribute(); return ((FeatureMap.Internal)getAnyAttribute()).getWrapper(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case XALPackage.THOROUGHFARE_NAME_TYPE__MIXED: ((FeatureMap.Internal)getMixed()).set(newValue); return; case XALPackage.THOROUGHFARE_NAME_TYPE__CODE: setCode(newValue); return; case XALPackage.THOROUGHFARE_NAME_TYPE__TYPE: setType(newValue); return; case XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE: ((FeatureMap.Internal)getAnyAttribute()).set(newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case XALPackage.THOROUGHFARE_NAME_TYPE__MIXED: getMixed().clear(); return; case XALPackage.THOROUGHFARE_NAME_TYPE__CODE: setCode(CODE_EDEFAULT); return; case XALPackage.THOROUGHFARE_NAME_TYPE__TYPE: setType(TYPE_EDEFAULT); return; case XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE: getAnyAttribute().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case XALPackage.THOROUGHFARE_NAME_TYPE__MIXED: return mixed != null && !mixed.isEmpty(); case XALPackage.THOROUGHFARE_NAME_TYPE__CODE: return CODE_EDEFAULT == null ? code != null : !CODE_EDEFAULT.equals(code); case XALPackage.THOROUGHFARE_NAME_TYPE__TYPE: return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type); case XALPackage.THOROUGHFARE_NAME_TYPE__ANY_ATTRIBUTE: return anyAttribute != null && !anyAttribute.isEmpty(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (mixed: "); result.append(mixed); result.append(", code: "); result.append(code); result.append(", type: "); result.append(type); result.append(", anyAttribute: "); result.append(anyAttribute); result.append(')'); return result.toString(); } } //ThoroughfareNameTypeImpl
package liuyang.nlp.lda.com; /* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* * Stopwords.java * Copyright (C) 2001 Eibe Frank */ //package weka.core; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.util.Collections; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.Vector; /** * Class that can test whether a given string is a stop word. * Lowercases all words before the test. <p/> * The format for reading and writing is one word per line, lines starting * with '#' are interpreted as comments and therefore skipped. <p/> * The default stopwords are based on <a href="http://www.cs.cmu.edu/~mccallum/bow/rainbow/" target="_blank">Rainbow</a>. <p/> * * Accepts the following parameter: <p/> * * -i file <br/> * loads the stopwords from the given file <p/> * * -o file <br/> * saves the stopwords to the given file <p/> * * -p <br/> * outputs the current stopwords on stdout <p/> * * Any additional parameters are interpreted as words to test as stopwords. * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Ashraf M. Kibriya (amk14@cs.waikato.ac.nz) * @author FracPete (fracpete at waikato dot ac dot nz) * @version $Revision: 1.4 $ */ public class Stopwords { /** The hash set containing the list of stopwords */ protected HashSet m_Words = null; /** The default stopwords object (stoplist based on Rainbow) */ protected static Stopwords m_Stopwords; static { if (m_Stopwords == null) { m_Stopwords = new Stopwords(); } } /** * initializes the stopwords (based on <a href="http://www.cs.cmu.edu/~mccallum/bow/rainbow/" target="_blank">Rainbow</a>). */ public Stopwords() { m_Words = new HashSet(); //Stopwords list from Rainbow add("a"); add("able"); add("about"); add("above"); add("according"); add("accordingly"); add("across"); add("actually"); add("after"); add("afterwards"); add("again"); // add("against"); add("all"); // add("allow"); // add("allows"); // add("almost"); add("alone"); add("along"); add("already"); add("also"); // add("although"); add("always"); add("am"); add("among"); add("amongst"); add("an"); add("and"); add("another"); add("any"); add("anybody"); add("anyhow"); add("anyone"); add("anything"); add("anyway"); add("anyways"); add("anywhere"); add("apart"); add("appear"); // add("appreciate"); add("appropriate"); add("are"); add("around"); add("as"); add("aside"); add("ask"); add("asking"); add("associated"); add("at"); add("available"); add("away"); // add("awfully"); add("b"); add("be"); add("became"); add("because"); add("become"); add("becomes"); add("becoming"); add("been"); add("before"); add("beforehand"); add("behind"); add("being"); add("believe"); add("below"); add("beside"); add("besides"); // add("best"); // add("better"); add("between"); add("beyond"); add("both"); add("but"); add("brief"); add("by"); add("c"); add("came"); add("can"); add("certain"); add("certainly"); add("clearly"); add("co"); add("com"); add("come"); add("comes"); add("contain"); add("containing"); add("contains"); add("corresponding"); add("could"); add("course"); add("currently"); add("d"); add("definitely"); add("described"); add("despite"); add("did"); add("different"); add("do"); add("does"); add("doing"); add("done"); add("down"); add("downwards"); add("during"); add("e"); add("each"); add("edu"); add("eg"); add("eight"); add("either"); add("else"); add("elsewhere"); add("enough"); add("entirely"); add("especially"); add("et"); add("etc"); add("even"); add("ever"); add("every"); add("everybody"); add("everyone"); add("everything"); add("everywhere"); add("ex"); add("exactly"); add("example"); add("except"); add("f"); add("far"); add("few"); add("fifth"); add("first"); add("five"); add("followed"); add("following"); add("follows"); add("for"); add("former"); add("formerly"); add("forth"); add("four"); add("from"); add("further"); add("furthermore"); add("g"); add("get"); add("gets"); add("getting"); add("given"); add("gives"); add("go"); add("goes"); add("going"); add("gone"); add("got"); add("gotten"); // add("greetings"); add("h"); add("had"); add("happens"); // add("hardly"); add("has"); add("have"); add("having"); add("he"); add("hello"); add("help"); add("hence"); add("her"); add("here"); add("hereafter"); add("hereby"); add("herein"); add("hereupon"); add("hers"); add("herself"); add("hi"); add("him"); add("himself"); add("his"); add("hither"); // add("hopefully"); add("how"); add("howbeit"); add("however"); add("i"); add("ie"); add("if"); // add("ignored"); add("immediate"); add("in"); add("inasmuch"); add("inc"); add("indeed"); add("indicate"); add("indicated"); add("indicates"); add("inner"); add("insofar"); add("instead"); add("into"); add("inward"); add("is"); add("it"); add("its"); add("itself"); add("j"); add("just"); add("k"); add("keep"); add("keeps"); add("kept"); // add("know"); // add("knows"); // add("known"); add("l"); add("last"); add("lately"); add("later"); add("latter"); add("latterly"); add("least"); add("less"); add("lest"); add("let"); add("like"); add("liked"); add("likely"); add("little"); add("ll"); //added to avoid words like you'll,I'll etc. add("look"); add("looking"); add("looks"); add("ltd"); add("m"); add("mainly"); add("many"); add("may"); add("maybe"); add("me"); // add("mean"); add("meanwhile"); // add("merely"); add("might"); add("more"); add("moreover"); add("most"); add("mostly"); add("much"); add("must"); add("my"); add("myself"); add("n"); add("name"); add("namely"); add("nd"); add("near"); add("nearly"); add("necessary"); add("need"); add("needs"); // add("neither"); // add("never"); // add("nevertheless"); add("new"); add("next"); add("nine"); add("normally"); // add("novel"); add("no"); add("nobody"); add("non"); add("none"); add("noone"); add("nor"); add("normally"); add("not"); add("n't"); add("nothing"); add("novel"); add("now"); add("nowhere"); add("now"); add("nowhere"); add("o"); add("obviously"); add("of"); add("off"); add("often"); add("oh"); add("ok"); add("okay"); // add("old"); add("on"); add("once"); add("one"); add("ones"); add("only"); add("onto"); add("or"); add("other"); add("others"); add("otherwise"); add("ought"); add("our"); add("ours"); add("ourselves"); add("out"); add("outside"); add("over"); add("overall"); add("own"); add("p"); add("particular"); add("particularly"); add("per"); add("perhaps"); add("placed"); add("please"); add("plus"); add("possible"); add("presumably"); add("probably"); add("provides"); add("q"); add("que"); add("quite"); add("qv"); add("r"); add("rather"); add("rd"); add("re"); add("really"); add("reasonably"); add("regarding"); add("regardless"); add("regards"); add("relatively"); add("respectively"); add("right"); add("s"); add("said"); add("same"); add("saw"); add("say"); add("saying"); add("says"); add("second"); add("secondly"); add("see"); add("seeing"); // add("seem"); // add("seemed"); // add("seeming"); // add("seems"); add("seen"); add("self"); add("selves"); add("sensible"); add("sent"); // add("serious"); // add("seriously"); add("seven"); add("several"); add("shall"); add("she"); add("should"); add("since"); add("six"); add("so"); add("some"); add("somebody"); add("somehow"); add("someone"); add("something"); add("sometime"); add("sometimes"); add("somewhat"); add("somewhere"); add("soon"); add("sorry"); add("specified"); add("specify"); add("specifying"); add("still"); add("sub"); add("such"); add("sup"); add("sure"); add("t"); add("take"); add("taken"); add("tell"); add("tends"); add("th"); add("than"); // add("thank"); // add("thanks"); // add("thanx"); add("that"); add("thats"); add("the"); add("their"); add("theirs"); add("them"); add("themselves"); add("then"); add("thence"); add("there"); add("thereafter"); add("thereby"); add("therefore"); add("therein"); add("theres"); add("thereupon"); add("these"); add("they"); add("think"); add("third"); add("this"); add("thorough"); add("thoroughly"); add("those"); add("though"); add("three"); add("through"); add("throughout"); add("thru"); add("thus"); add("to"); add("together"); add("too"); add("took"); add("toward"); add("towards"); add("tried"); add("tries"); add("truly"); add("try"); add("trying"); add("twice"); add("two"); add("u"); add("un"); add("under"); // add("unfortunately"); // add("unless"); // add("unlikely"); add("until"); add("unto"); add("up"); add("upon"); add("us"); add("use"); add("used"); // add("useful"); add("uses"); add("using"); add("usually"); add("uucp"); add("v"); add("value"); add("various"); add("ve"); //added to avoid words like I've,you've etc. add("very"); add("via"); add("viz"); add("vs"); add("w"); add("want"); add("wants"); add("was"); // add("way"); add("we"); // add("welcome"); // add("well"); add("went"); add("were"); add("what"); // add("whatever"); add("when"); add("whence"); add("whenever"); add("where"); add("whereafter"); add("whereas"); add("whereby"); add("wherein"); add("whereupon"); add("wherever"); add("whether"); add("which"); add("while"); add("whither"); add("who"); add("whoever"); add("whole"); add("whom"); add("whose"); add("why"); add("will"); add("willing"); add("wish"); add("with"); add("within"); add("without"); add("wonder"); add("would"); add("would"); add("x"); add("y"); // add("yes"); add("yet"); add("you"); add("your"); add("yours"); add("yourself"); add("yourselves"); add("z"); add("zero"); // add new add("i'm"); add("he's"); add("she's"); add("you're"); add("i'll"); add("you'll"); add("she'll"); add("he'll"); add("it's"); add("don't"); add("can't"); add("didn't"); add("i've"); add("that's"); add("there's"); add("isn't"); add("what's"); add("rt"); add("doesn't"); add("w/"); add("w/o"); } /** * removes all stopwords */ public void clear() { m_Words.clear(); } /** * adds the given word to the stopword list (is automatically converted to * lower case and trimmed) * * @param word the word to add */ public void add(String word) { if (word.trim().length() > 0) m_Words.add(word.trim().toLowerCase()); } /** * removes the word from the stopword list * * @param word the word to remove * @return true if the word was found in the list and then removed */ public boolean remove(String word) { return m_Words.remove(word); } /** * Returns a sorted enumeration over all stored stopwords * * @return the enumeration over all stopwords */ public Enumeration elements() { Iterator iter; Vector list; iter = m_Words.iterator(); list = new Vector(); while (iter.hasNext()) list.add(iter.next()); // sort list Collections.sort(list); return list.elements(); } /** * Generates a new Stopwords object from the given file * * @param filename the file to read the stopwords from * @throws Exception if reading fails */ public void read(String filename) throws Exception { read(new File(filename)); } /** * Generates a new Stopwords object from the given file * * @param file the file to read the stopwords from * @throws Exception if reading fails */ public void read(File file) throws Exception { read(new BufferedReader(new FileReader(file))); } /** * Generates a new Stopwords object from the reader. The reader is * closed automatically. * * @param reader the reader to get the stopwords from * @throws Exception if reading fails */ public void read(BufferedReader reader) throws Exception { String line; clear(); while ((line = reader.readLine()) != null) { line = line.trim(); // comment? if (line.startsWith("#")) continue; add(line); } reader.close(); } /** * Writes the current stopwords to the given file * * @param filename the file to write the stopwords to * @throws Exception if writing fails */ public void write(String filename) throws Exception { write(new File(filename)); } /** * Writes the current stopwords to the given file * * @param file the file to write the stopwords to * @throws Exception if writing fails */ public void write(File file) throws Exception { write(new BufferedWriter(new FileWriter(file))); } /** * Writes the current stopwords to the given writer. The writer is closed * automatically. * * @param writer the writer to get the stopwords from * @throws Exception if writing fails */ public void write(BufferedWriter writer) throws Exception { Enumeration enm; // header writer.write("# generated " + new Date()); writer.newLine(); enm = elements(); while (enm.hasMoreElements()) { writer.write(enm.nextElement().toString()); writer.newLine(); } writer.flush(); writer.close(); } /** * returns the current stopwords in a string * * @return the current stopwords */ public String toString() { Enumeration enm; StringBuffer result; result = new StringBuffer(); enm = elements(); while (enm.hasMoreElements()) { result.append(enm.nextElement().toString()); if (enm.hasMoreElements()) result.append(","); } return result.toString(); } /** * Returns true if the given string is a stop word. * * @param word the word to test * @return true if the word is a stopword */ public boolean is(String word) { return m_Words.contains(word.toLowerCase()); } /** * Returns true if the given string is a stop word. * * @param str the word to test * @return true if the word is a stopword */ public static boolean isStopword(String str) { return m_Stopwords.is(str.toLowerCase()); } }
//Licensed to the Apache Software Foundation (ASF) under one //or more contributor license agreements. See the NOTICE file //distributed with this work for additional information //regarding copyright ownership. The ASF licenses this file //to you under the Apache License, Version 2.0 (the //"License"); you may not use this file except in compliance //with the License. You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, //software distributed under the License is distributed on an //"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY //KIND, either express or implied. See the License for the //specific language governing permissions and limitations //under the License. package org.apache.cloudstack.quota; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.mail.Authenticator; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.PasswordAuthentication; import javax.mail.Session; import javax.mail.URLName; import javax.mail.internet.InternetAddress; import javax.naming.ConfigurationException; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.quota.constant.QuotaConfig; import org.apache.cloudstack.quota.constant.QuotaConfig.QuotaEmailTemplateTypes; import org.apache.cloudstack.quota.dao.QuotaAccountDao; import org.apache.cloudstack.quota.dao.QuotaEmailTemplatesDao; import org.apache.cloudstack.quota.vo.QuotaAccountVO; import org.apache.cloudstack.quota.vo.QuotaEmailTemplatesVO; import org.apache.commons.lang3.text.StrSubstitutor; import org.apache.log4j.Logger; import org.springframework.stereotype.Component; import com.cloud.domain.DomainVO; import com.cloud.domain.dao.DomainDao; import com.cloud.user.Account; import com.cloud.user.Account.State; import com.cloud.user.AccountVO; import com.cloud.user.UserVO; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.UserDao; import com.cloud.utils.NumbersUtil; import com.cloud.utils.component.ManagerBase; import com.cloud.utils.db.TransactionLegacy; import com.google.common.base.Strings; import com.sun.mail.smtp.SMTPMessage; import com.sun.mail.smtp.SMTPSSLTransport; import com.sun.mail.smtp.SMTPTransport; @Component public class QuotaAlertManagerImpl extends ManagerBase implements QuotaAlertManager { private static final Logger s_logger = Logger.getLogger(QuotaAlertManagerImpl.class); @Inject private AccountDao _accountDao; @Inject private QuotaAccountDao _quotaAcc; @Inject private UserDao _userDao; @Inject private DomainDao _domainDao; @Inject private QuotaEmailTemplatesDao _quotaEmailTemplateDao; @Inject private ConfigurationDao _configDao; @Inject private QuotaManager _quotaManager; private EmailQuotaAlert _emailQuotaAlert; private boolean _lockAccountEnforcement = false; boolean _smtpDebug = false; public QuotaAlertManagerImpl() { super(); } private void mergeConfigs(Map<String, String> dbParams, Map<String, Object> xmlParams) { for (Map.Entry<String, Object> param : xmlParams.entrySet()) { dbParams.put(param.getKey(), (String)param.getValue()); } } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { super.configure(name, params); Map<String, String> configs = _configDao.getConfiguration(params); if (params != null) { mergeConfigs(configs, params); } final String smtpHost = configs.get(QuotaConfig.QuotaSmtpHost.key()); int smtpPort = NumbersUtil.parseInt(configs.get(QuotaConfig.QuotaSmtpPort.key()), 25); String useAuthStr = configs.get(QuotaConfig.QuotaSmtpAuthType.key()); boolean useAuth = ((useAuthStr != null) && Boolean.parseBoolean(useAuthStr)); String smtpUsername = configs.get(QuotaConfig.QuotaSmtpUser.key()); String smtpPassword = configs.get(QuotaConfig.QuotaSmtpPassword.key()); String emailSender = configs.get(QuotaConfig.QuotaSmtpSender.key()); _lockAccountEnforcement = "true".equalsIgnoreCase(configs.get(QuotaConfig.QuotaEnableEnforcement.key())); _emailQuotaAlert = new EmailQuotaAlert(smtpHost, smtpPort, useAuth, smtpUsername, smtpPassword, emailSender, _smtpDebug); return true; } @Override public boolean start() { if (s_logger.isInfoEnabled()) { s_logger.info("Starting Alert Manager"); } return true; } @Override public boolean stop() { if (s_logger.isInfoEnabled()) { s_logger.info("Stopping Alert Manager"); } return true; } @Override public void checkAndSendQuotaAlertEmails() { List<DeferredQuotaEmail> deferredQuotaEmailList = new ArrayList<DeferredQuotaEmail>(); final BigDecimal zeroBalance = new BigDecimal(0); for (final QuotaAccountVO quotaAccount : _quotaAcc.listAllQuotaAccount()) { if (s_logger.isDebugEnabled()) { s_logger.debug("checkAndSendQuotaAlertEmails accId=" + quotaAccount.getId()); } BigDecimal accountBalance = quotaAccount.getQuotaBalance(); Date balanceDate = quotaAccount.getQuotaBalanceDate(); Date alertDate = quotaAccount.getQuotaAlertDate(); int lockable = quotaAccount.getQuotaEnforce(); BigDecimal thresholdBalance = quotaAccount.getQuotaMinBalance(); if (accountBalance != null) { AccountVO account = _accountDao.findById(quotaAccount.getId()); if (account == null) { continue; // the account is removed } if (s_logger.isDebugEnabled()) { s_logger.debug("checkAndSendQuotaAlertEmails: Check id=" + account.getId() + " bal=" + accountBalance + ", alertDate=" + alertDate + ", lockable=" + lockable); } if (accountBalance.compareTo(zeroBalance) < 0) { if (_lockAccountEnforcement && (lockable == 1)) { if (_quotaManager.isLockable(account)) { s_logger.info("Locking account " + account.getAccountName() + " due to quota < 0."); lockAccount(account.getId()); } } if (alertDate == null || (balanceDate.after(alertDate) && getDifferenceDays(alertDate, new Date()) > 1)) { s_logger.info("Sending alert " + account.getAccountName() + " due to quota < 0."); deferredQuotaEmailList.add(new DeferredQuotaEmail(account, quotaAccount, QuotaConfig.QuotaEmailTemplateTypes.QUOTA_EMPTY)); } } else if (accountBalance.compareTo(thresholdBalance) < 0) { if (alertDate == null || (balanceDate.after(alertDate) && getDifferenceDays(alertDate, new Date()) > 1)) { s_logger.info("Sending alert " + account.getAccountName() + " due to quota below threshold."); deferredQuotaEmailList.add(new DeferredQuotaEmail(account, quotaAccount, QuotaConfig.QuotaEmailTemplateTypes.QUOTA_LOW)); } } } } for (DeferredQuotaEmail emailToBeSent : deferredQuotaEmailList) { if (s_logger.isDebugEnabled()) { s_logger.debug("checkAndSendQuotaAlertEmails: Attempting to send quota alert email to users of account: " + emailToBeSent.getAccount().getAccountName()); } sendQuotaAlert(emailToBeSent); } } @Override public void sendQuotaAlert(DeferredQuotaEmail emailToBeSent) { final AccountVO account = emailToBeSent.getAccount(); final BigDecimal balance = emailToBeSent.getQuotaBalance(); final BigDecimal usage = emailToBeSent.getQuotaUsage(); final QuotaConfig.QuotaEmailTemplateTypes emailType = emailToBeSent.getEmailTemplateType(); final List<QuotaEmailTemplatesVO> emailTemplates = _quotaEmailTemplateDao.listAllQuotaEmailTemplates(emailType.toString()); if (emailTemplates != null && emailTemplates.get(0) != null) { final QuotaEmailTemplatesVO emailTemplate = emailTemplates.get(0); final DomainVO accountDomain = _domainDao.findByIdIncludingRemoved(account.getDomainId()); final List<UserVO> usersInAccount = _userDao.listByAccount(account.getId()); String userNames = ""; final List<String> emailRecipients = new ArrayList<String>(); for (UserVO user : usersInAccount) { userNames += String.format("%s <%s>,", user.getUsername(), user.getEmail()); emailRecipients.add(user.getEmail()); } if (userNames.endsWith(",")) { userNames = userNames.substring(0, userNames.length() - 1); } final Map<String, String> optionMap = new HashMap<String, String>(); optionMap.put("accountName", account.getAccountName()); optionMap.put("accountID", account.getUuid()); optionMap.put("accountUsers", userNames); optionMap.put("domainName", accountDomain.getName()); optionMap.put("domainID", accountDomain.getUuid()); optionMap.put("quotaBalance", QuotaConfig.QuotaCurrencySymbol.value() + " " + balance.toString()); if (emailType == QuotaEmailTemplateTypes.QUOTA_STATEMENT) { optionMap.put("quotaUsage", QuotaConfig.QuotaCurrencySymbol.value() + " " + usage.toString()); } if (s_logger.isDebugEnabled()) { s_logger.debug("accountName" + account.getAccountName() + "accountID" + account.getUuid() + "accountUsers" + userNames + "domainName" + accountDomain.getName() + "domainID" + accountDomain.getUuid()); } final StrSubstitutor templateEngine = new StrSubstitutor(optionMap); final String subject = templateEngine.replace(emailTemplate.getTemplateSubject()); final String body = templateEngine.replace(emailTemplate.getTemplateBody()); try { _emailQuotaAlert.sendQuotaAlert(emailRecipients, subject, body); emailToBeSent.sentSuccessfully(_quotaAcc); } catch (Exception e) { s_logger.error(String.format("Unable to send quota alert email (subject=%s; body=%s) to account %s (%s) recipients (%s) due to error (%s)", subject, body, account.getAccountName(), account.getUuid(), emailRecipients, e)); if (s_logger.isDebugEnabled()) { s_logger.debug("Exception", e); } } } else { s_logger.error(String.format("No quota email template found for type %s, cannot send quota alert email to account %s(%s)", emailType, account.getAccountName(), account.getUuid())); } } public static long getDifferenceDays(Date d1, Date d2) { long diff = d2.getTime() - d1.getTime(); return TimeUnit.DAYS.convert(diff, TimeUnit.MILLISECONDS); } protected boolean lockAccount(long accountId) { final short opendb = TransactionLegacy.currentTxn().getDatabaseId(); boolean success = false; try (TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.CLOUD_DB)) { Account account = _accountDao.findById(accountId); if (account != null) { if (account.getState() == State.locked) { return true; // already locked, no-op } else if (account.getState() == State.enabled) { AccountVO acctForUpdate = _accountDao.createForUpdate(); acctForUpdate.setState(State.locked); success = _accountDao.update(Long.valueOf(accountId), acctForUpdate); } else { if (s_logger.isInfoEnabled()) { s_logger.info("Attempting to lock a non-enabled account, current state is " + account.getState() + " (accountId: " + accountId + "), locking failed."); } } } else { s_logger.warn("Failed to lock account " + accountId + ", account not found."); } } catch (Exception e) { s_logger.error("Exception occured while locking account by Quota Alert Manager", e); throw e; } finally { TransactionLegacy.open(opendb).close(); } return success; } public static class DeferredQuotaEmail { private AccountVO account; private QuotaAccountVO quotaAccount; private QuotaConfig.QuotaEmailTemplateTypes emailTemplateType; private BigDecimal quotaUsage; public DeferredQuotaEmail(AccountVO account, QuotaAccountVO quotaAccount, BigDecimal quotaUsage, QuotaConfig.QuotaEmailTemplateTypes emailTemplateType) { this.account = account; this.quotaAccount = quotaAccount; this.emailTemplateType = emailTemplateType; this.quotaUsage = quotaUsage; } public DeferredQuotaEmail(AccountVO account, QuotaAccountVO quotaAccount, QuotaConfig.QuotaEmailTemplateTypes emailTemplateType) { this.account = account; this.quotaAccount = quotaAccount; this.emailTemplateType = emailTemplateType; this.quotaUsage = new BigDecimal(-1); } public AccountVO getAccount() { return account; } public BigDecimal getQuotaBalance() { return quotaAccount.getQuotaBalance(); } public BigDecimal getQuotaUsage() { return quotaUsage; } public Date getSendDate() { if (emailTemplateType == QuotaEmailTemplateTypes.QUOTA_STATEMENT) { return quotaAccount.getLastStatementDate(); } else { return quotaAccount.getQuotaAlertDate(); } } public QuotaConfig.QuotaEmailTemplateTypes getEmailTemplateType() { return emailTemplateType; } public void sentSuccessfully(final QuotaAccountDao quotaAccountDao) { if (emailTemplateType == QuotaEmailTemplateTypes.QUOTA_STATEMENT) { quotaAccount.setLastStatementDate(new Date()); } else { quotaAccount.setQuotaAlertDate(new Date()); quotaAccount.setQuotaAlertType(emailTemplateType.ordinal()); } quotaAccountDao.updateQuotaAccount(quotaAccount.getAccountId(), quotaAccount); } }; static class EmailQuotaAlert { private final Session _smtpSession; private final String _smtpHost; private final int _smtpPort; private final boolean _smtpUseAuth; private final String _smtpUsername; private final String _smtpPassword; private final String _emailSender; public EmailQuotaAlert(String smtpHost, int smtpPort, boolean smtpUseAuth, final String smtpUsername, final String smtpPassword, String emailSender, boolean smtpDebug) { _smtpHost = smtpHost; _smtpPort = smtpPort; _smtpUseAuth = smtpUseAuth; _smtpUsername = smtpUsername; _smtpPassword = smtpPassword; _emailSender = emailSender; if (!Strings.isNullOrEmpty(_smtpHost)) { Properties smtpProps = new Properties(); smtpProps.put("mail.smtp.host", smtpHost); smtpProps.put("mail.smtp.port", smtpPort); smtpProps.put("mail.smtp.auth", "" + smtpUseAuth); if (smtpUsername != null) { smtpProps.put("mail.smtp.user", smtpUsername); } smtpProps.put("mail.smtps.host", smtpHost); smtpProps.put("mail.smtps.port", smtpPort); smtpProps.put("mail.smtps.auth", "" + smtpUseAuth); if (!Strings.isNullOrEmpty(smtpUsername)) { smtpProps.put("mail.smtps.user", smtpUsername); } if (!Strings.isNullOrEmpty(smtpUsername) && !Strings.isNullOrEmpty(smtpPassword)) { _smtpSession = Session.getInstance(smtpProps, new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(smtpUsername, smtpPassword); } }); } else { _smtpSession = Session.getInstance(smtpProps); } _smtpSession.setDebug(smtpDebug); } else { _smtpSession = null; } } public void sendQuotaAlert(List<String> emails, String subject, String body) throws MessagingException, UnsupportedEncodingException { if (_smtpSession == null) { s_logger.error("Unable to create smtp session."); return; } SMTPMessage msg = new SMTPMessage(_smtpSession); msg.setSender(new InternetAddress(_emailSender, _emailSender)); msg.setFrom(new InternetAddress(_emailSender, _emailSender)); for (String email : emails) { if (email != null && !email.isEmpty()) { try { InternetAddress address = new InternetAddress(email, email); msg.addRecipient(Message.RecipientType.TO, address); } catch (Exception pokemon) { s_logger.error("Exception in creating address for:" + email, pokemon); } } } msg.setSubject(subject); msg.setSentDate(new Date()); msg.setContent(body, "text/html; charset=utf-8"); msg.saveChanges(); SMTPTransport smtpTrans = null; if (_smtpUseAuth) { smtpTrans = new SMTPSSLTransport(_smtpSession, new URLName("smtp", _smtpHost, _smtpPort, null, _smtpUsername, _smtpPassword)); } else { smtpTrans = new SMTPTransport(_smtpSession, new URLName("smtp", _smtpHost, _smtpPort, null, _smtpUsername, _smtpPassword)); } smtpTrans.connect(); smtpTrans.sendMessage(msg, msg.getAllRecipients()); smtpTrans.close(); } } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import static org.junit.Assert.assertEquals; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfigurationCollectionFactory; import com.google.devtools.build.lib.analysis.config.ConfigurationFactory; import com.google.devtools.build.lib.runtime.BlazeCommandDispatcher.LockingMode; import com.google.devtools.build.lib.runtime.BlazeCommandDispatcher.ShutdownBlazeServerException; import com.google.devtools.build.lib.testutil.Scratch; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.io.RecordingOutErr; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsProvider; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mockito; import java.util.List; /** * Tests the handling of rc-options in {@link BlazeCommandDispatcher}. */ @RunWith(JUnit4.class) public class BlazeCommandDispatcherRcoptionsTest { /** * Example options to be used by the tests. */ public static class FooOptions extends OptionsBase { @Option(name = "numoption", defaultValue = "0") public int numOption; @Option(name = "stringoption", defaultValue = "[unspecified]") public String stringOption; } @Command( name = "reportnum", options = {FooOptions.class}, shortDescription = "", help = "" ) private static class ReportNumCommand implements BlazeCommand { @Override public ExitCode exec(CommandEnvironment env, OptionsProvider options) throws ShutdownBlazeServerException { FooOptions fooOptions = options.getOptions(FooOptions.class); env.getReporter().getOutErr().printOut("" + fooOptions.numOption); return ExitCode.SUCCESS; } @Override public void editOptions(CommandEnvironment env, OptionsParser optionsParser) {} } @Command( name = "reportall", options = {FooOptions.class}, shortDescription = "", help = "" ) private static class ReportAllCommand implements BlazeCommand { @Override public ExitCode exec(CommandEnvironment env, OptionsProvider options) throws ShutdownBlazeServerException { FooOptions fooOptions = options.getOptions(FooOptions.class); env.getReporter() .getOutErr() .printOut("" + fooOptions.numOption + " " + fooOptions.stringOption); return ExitCode.SUCCESS; } @Override public void editOptions(CommandEnvironment env, OptionsParser optionsParser) {} } @Command( name = "reportallinherited", options = {FooOptions.class}, shortDescription = "", help = "", inherits = ReportAllCommand.class ) private static class ReportAllInheritedCommand extends ReportAllCommand { } private final Scratch scratch = new Scratch(); private final RecordingOutErr outErr = new RecordingOutErr(); private final ReportNumCommand reportNum = new ReportNumCommand(); private final ReportAllCommand reportAll = new ReportAllCommand(); private final ReportAllCommand reportAllInherited = new ReportAllInheritedCommand(); private BlazeRuntime runtime; @Before public final void initializeRuntime() throws Exception { BlazeDirectories directories = new BlazeDirectories( scratch.dir("install_base"), scratch.dir("output_base"), scratch.dir("pkg")); this.runtime = new BlazeRuntime.Builder() .setDirectories(directories) .setStartupOptionsProvider( OptionsParser.newOptionsParser(BlazeServerStartupOptions.class)) .setConfigurationFactory( new ConfigurationFactory(Mockito.mock(ConfigurationCollectionFactory.class))) .build(); } @Test public void testCommonUsed() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/home/jrluser/.blazerc", "--default_override=0:common=--numoption=99"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum); List<String> cmdLine = Lists.newArrayList("reportnum"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("Common options should be used", "99", out); } @Test public void testSpecificOptionsWin() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/home/jrluser/.blazerc", "--default_override=0:reportnum=--numoption=42", "--default_override=0:common=--numoption=99"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum); List<String> cmdLine = Lists.newArrayList("reportnum"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("Specific options should dominate common options", "42", out); } @Test public void testSpecificOptionsWinOtherOrder() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/home/jrluser/.blazerc", "--default_override=0:common=--numoption=99", "--default_override=0:reportnum=--numoption=42"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum); List<String> cmdLine = Lists.newArrayList("reportnum"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("Specific options should dominate common options", "42", out); } @Test public void testOptionsCombined() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/etc/bazelrc", "--default_override=0:common=--stringoption=foo", "--rc_source=/home/jrluser/.blazerc", "--default_override=1:common=--numoption=99"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum, reportAll); List<String> cmdLine = Lists.newArrayList("reportall"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("Options should get accumulated over different rc files", "99 foo", out); } @Test public void testOptionsCombinedWithOverride() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/etc/bazelrc", "--default_override=0:common=--stringoption=foo", "--default_override=0:common=--numoption=42", "--rc_source=/home/jrluser/.blazerc", "--default_override=1:common=--numoption=99"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum, reportAll); List<String> cmdLine = Lists.newArrayList("reportall"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("The more specific rc-file should override", "99 foo", out); } @Test public void testOptionsCombinedWithOverrideOtherName() throws Exception { List<String> blazercOpts = ImmutableList.of( "--rc_source=/home/jrluser/.blazerc", "--default_override=0:common=--stringoption=foo", "--default_override=0:common=--numoption=42", "--rc_source=/etc/bazelrc", "--default_override=1:common=--numoption=99"); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum, reportAll); List<String> cmdLine = Lists.newArrayList("reportall"); cmdLine.addAll(blazercOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals("The more specific rc-file should override irrespective of name", "99 foo", out); } @Test public void testInheritedOptionsWithSpecificOverride() throws Exception { ImmutableList<ImmutableList<String>> blazercOpts = ImmutableList.of( ImmutableList.of( "--rc_source=/doesnt/matter/0/bazelrc", "--default_override=0:common=--stringoption=common", "--default_override=0:common=--numoption=42"), ImmutableList.of( "--rc_source=/doesnt/matter/1/bazelrc", "--default_override=0:reportall=--stringoption=reportall"), ImmutableList.of( "--rc_source=/doesnt/matter/2/bazelrc", "--default_override=0:reportallinherited=--stringoption=reportallinherited")); for (List<ImmutableList<String>> e : Collections2.permutations(blazercOpts)) { outErr.reset(); BlazeCommandDispatcher dispatch = new BlazeCommandDispatcher(runtime, reportNum, reportAll, reportAllInherited); List<String> cmdLine = Lists.newArrayList("reportallinherited"); List<String> orderedOpts = ImmutableList.copyOf(Iterables.concat(e)); cmdLine.addAll(orderedOpts); dispatch.exec(cmdLine, LockingMode.ERROR_OUT, "test", outErr); String out = outErr.outAsLatin1(); assertEquals( String.format( "The more specific option should override, irrespective of source file or order. %s", orderedOpts), "42 reportallinherited", out); } } }
package com.wenyu.Data; import java.io.Serializable; import java.util.List; public class IntroData implements Serializable { /** * */ private static final long serialVersionUID = 1L; private List<IntronameData> listname; private List<IntrovalueData> listvalue; private String contact; private String telephone; private String address; private String regional; private String tempid; private String cdname; private String id; private String x,y; private String send_to_id; private String type; private String count; public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String contact, String telephone, String address, String regional, String tempid, String cdname, String id) { super(); this.listname = listname; this.listvalue = listvalue; this.contact = contact; this.telephone = telephone; this.address = address; this.regional = regional; this.tempid = tempid; this.cdname = cdname; this.id = id; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String contact, String telephone, String address, String regional, String tempid, String cdname, String id, String x, String y, String send_to_id, String type) { super(); this.listname = listname; this.listvalue = listvalue; this.contact = contact; this.telephone = telephone; this.address = address; this.regional = regional; this.tempid = tempid; this.cdname = cdname; this.id = id; this.x = x; this.y = y; this.send_to_id = send_to_id; this.type = type; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String contact, String telephone, String address, String regional, String tempid, String cdname, String id, String x, String y, String send_to_id) { super(); this.listname = listname; this.listvalue = listvalue; this.contact = contact; this.telephone = telephone; this.address = address; this.regional = regional; this.tempid = tempid; this.cdname = cdname; this.id = id; this.x = x; this.y = y; this.send_to_id = send_to_id; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String contact, String telephone, String address, String regional, String tempid, String cdname, String id, String x, String y) { super(); this.listname = listname; this.listvalue = listvalue; this.contact = contact; this.telephone = telephone; this.address = address; this.regional = regional; this.tempid = tempid; this.cdname = cdname; this.id = id; this.x = x; this.y = y; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String contact, String telephone, String address, String regional, String tempid, String cdname) { super(); this.listname = listname; this.listvalue = listvalue; this.contact = contact; this.telephone = telephone; this.address = address; this.regional = regional; this.tempid = tempid; this.cdname = cdname; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String tempid, String cdname, String x, String y) { super(); this.listname = listname; this.listvalue = listvalue; this.tempid = tempid; this.cdname = cdname; this.x = x; this.y = y; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String tempid, String cdname) { super(); this.listname = listname; this.listvalue = listvalue; this.tempid = tempid; this.cdname = cdname; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue, String tempid, String cdname, String count) { super(); this.listname = listname; this.listvalue = listvalue; this.tempid = tempid; this.cdname = cdname; this.count = count; } public IntroData(List<IntrovalueData> listvalue) { super(); this.listvalue = listvalue; } public IntroData(List<IntronameData> listname, List<IntrovalueData> listvalue) { super(); this.listname = listname; this.listvalue = listvalue; } public String getSend_to_id() { return send_to_id; } public String getType() { return type; } public void setType(String type) { this.type = type; } public void setSend_to_id(String send_to_id) { this.send_to_id = send_to_id; } public String getCount() { return count; } public void setCount(String count) { this.count = count; } public String getX() { return x; } public void setX(String x) { this.x = x; } public String getY() { return y; } public void setY(String y) { this.y = y; } public String getId() { return id; } public void setId(String id) { this.id = id; } public List<IntronameData> getListname() { return listname; } public void setListname(List<IntronameData> listname) { this.listname = listname; } public List<IntrovalueData> getListvalue() { return listvalue; } public void setListvalue(List<IntrovalueData> listvalue) { this.listvalue = listvalue; } public String getContact() { return contact; } public void setContact(String contact) { this.contact = contact; } public String getTelephone() { return telephone; } public void setTelephone(String telephone) { this.telephone = telephone; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public String getRegional() { return regional; } public void setRegional(String regional) { this.regional = regional; } public String getTempid() { return tempid; } public void setTempid(String tempid) { this.tempid = tempid; } public String getCdname() { return cdname; } public void setCdname(String cdname) { this.cdname = cdname; } @Override public String toString() { return "IntroData [listname=" + listname + ", listvalue=" + listvalue + ", contact=" + contact + ", telephone=" + telephone + ", address=" + address + ", regional=" + regional + ", tempid=" + tempid + ", cdname=" + cdname + ", id=" + id + ", x=" + x + ", y=" + y + ", send_to_id=" + send_to_id + ", type=" + type + ", count=" + count + "]"; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.testIntegration.createTest; import com.intellij.CommonBundle; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.daemon.impl.quickfix.OrderEntryFix; import com.intellij.icons.AllIcons; import com.intellij.ide.util.PropertiesComponent; import com.intellij.ide.util.TreeClassChooser; import com.intellij.ide.util.TreeClassChooserFactory; import com.intellij.java.JavaBundle; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.JavaProjectRootsUtil; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleSettings; import com.intellij.refactoring.PackageWrapper; import com.intellij.refactoring.move.moveClassesOrPackages.MoveClassesOrPackagesUtil; import com.intellij.refactoring.ui.MemberSelectionTable; import com.intellij.refactoring.ui.PackageNameReferenceEditorCombo; import com.intellij.refactoring.util.RefactoringMessageUtil; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.refactoring.util.classMembers.MemberInfo; import com.intellij.testIntegration.JavaTestFramework; import com.intellij.testIntegration.TestFramework; import com.intellij.testIntegration.TestIntegrationUtils; import com.intellij.ui.*; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.java.JavaModuleSourceRootTypes; import org.jetbrains.jps.model.java.JavaSourceRootType; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.io.IOException; import java.util.List; import java.util.*; public class CreateTestDialog extends DialogWrapper { private static final String RECENTS_KEY = "CreateTestDialog.RecentsKey"; private static final String RECENT_SUPERS_KEY = "CreateTestDialog.Recents.Supers"; private static final String DEFAULT_LIBRARY_NAME_PROPERTY = CreateTestDialog.class.getName() + ".defaultLibrary"; private static final String DEFAULT_LIBRARY_SUPERCLASS_NAME_PROPERTY = CreateTestDialog.class.getName() + ".defaultLibrarySuperClass"; private static final String SHOW_INHERITED_MEMBERS_PROPERTY = CreateTestDialog.class.getName() + ".includeInheritedMembers"; private final Project myProject; private final PsiClass myTargetClass; private final PsiPackage myTargetPackage; private final Module myTargetModule; protected PsiDirectory myTargetDirectory; private TestFramework mySelectedFramework; private final ComboBox<TestFramework> myLibrariesCombo = new ComboBox<>(new DefaultComboBoxModel<>()); private EditorTextField myTargetClassNameField; private ReferenceEditorComboWithBrowseButton mySuperClassField; private ReferenceEditorComboWithBrowseButton myTargetPackageField; private final JCheckBox myGenerateBeforeBox = new JCheckBox(JavaBundle.message("intention.create.test.dialog.setUp")); private final JCheckBox myGenerateAfterBox = new JCheckBox(JavaBundle.message("intention.create.test.dialog.tearDown")); private final JCheckBox myShowInheritedMethodsBox = new JCheckBox(JavaBundle.message("intention.create.test.dialog.show.inherited")); private final MemberSelectionTable myMethodsTable = new MemberSelectionTable(Collections.emptyList(), null); private final JButton myFixLibraryButton = new JButton(JavaBundle.message("intention.create.test.dialog.fix.library")); private JPanel myFixLibraryPanel; private JLabel myFixLibraryLabel; public CreateTestDialog(@NotNull Project project, @NotNull @NlsContexts.DialogTitle String title, PsiClass targetClass, PsiPackage targetPackage, Module targetModule) { super(project, true); myProject = project; myTargetClass = targetClass; myTargetPackage = targetPackage; myTargetModule = targetModule; setTitle(title); init(); } protected String suggestTestClassName(PsiClass targetClass) { JavaCodeStyleSettings customSettings = JavaCodeStyleSettings.getInstance(targetClass.getContainingFile()); String prefix = customSettings.TEST_NAME_PREFIX; String suffix = customSettings.TEST_NAME_SUFFIX; return prefix + targetClass.getName() + suffix; } private boolean isSuperclassSelectedManually() { String superClass = mySuperClassField.getText(); if (StringUtil.isEmptyOrSpaces(superClass)) { return false; } for (TestFramework framework : TestFramework.EXTENSION_NAME.getExtensions()) { if (superClass.equals(framework.getDefaultSuperClass())) { return false; } if (superClass.equals(getLastSelectedSuperClassName(framework))) { return false; } } return true; } private void onLibrarySelected(TestFramework descriptor) { if (descriptor.isLibraryAttached(myTargetModule)) { myFixLibraryPanel.setVisible(false); } else { myFixLibraryPanel.setVisible(true); String text = JavaBundle.message("intention.create.test.dialog.library.not.found", descriptor.getName()); myFixLibraryLabel.setText(text); myFixLibraryButton.setVisible(descriptor instanceof JavaTestFramework && ((JavaTestFramework)descriptor).getFrameworkLibraryDescriptor() != null || descriptor.getLibraryPath() != null); } @NlsSafe String libraryDefaultSuperClass = descriptor.getDefaultSuperClass(); @NlsSafe String lastSelectedSuperClass = getLastSelectedSuperClassName(descriptor); @NlsSafe String superClass = lastSelectedSuperClass != null ? lastSelectedSuperClass : libraryDefaultSuperClass; if (isSuperclassSelectedManually()) { if (superClass != null) { String currentSuperClass = mySuperClassField.getText(); mySuperClassField.appendItem(superClass); mySuperClassField.setText(currentSuperClass); } } else { mySuperClassField.appendItem(StringUtil.notNullize(superClass)); mySuperClassField.getChildComponent().setSelectedItem(superClass == null ? "" : superClass); } mySelectedFramework = descriptor; } private void updateMethodsTable() { List<MemberInfo> methods = TestIntegrationUtils.extractClassMethods( myTargetClass, myShowInheritedMethodsBox.isSelected()); Set<PsiMember> selectedMethods = new HashSet<>(); for (MemberInfo each : myMethodsTable.getSelectedMemberInfos()) { selectedMethods.add(each.getMember()); } for (MemberInfo each : methods) { each.setChecked(selectedMethods.contains(each.getMember())); } myMethodsTable.setMemberInfos(methods); } private String getDefaultLibraryName() { return getProperties().getValue(DEFAULT_LIBRARY_NAME_PROPERTY, "JUnit5"); } private String getLastSelectedSuperClassName(TestFramework framework) { return getProperties().getValue(getDefaultSuperClassPropertyName(framework)); } private void saveDefaultLibraryNameAndSuperClass() { getProperties().setValue(DEFAULT_LIBRARY_NAME_PROPERTY, mySelectedFramework.getName()); getProperties().setValue(getDefaultSuperClassPropertyName(mySelectedFramework), mySuperClassField.getText()); } private static String getDefaultSuperClassPropertyName(TestFramework framework) { return DEFAULT_LIBRARY_SUPERCLASS_NAME_PROPERTY + "." + framework.getName(); } private void restoreShowInheritedMembersStatus() { myShowInheritedMethodsBox.setSelected(getProperties().getBoolean(SHOW_INHERITED_MEMBERS_PROPERTY)); } private void saveShowInheritedMembersStatus() { getProperties().setValue(SHOW_INHERITED_MEMBERS_PROPERTY, myShowInheritedMethodsBox.isSelected()); } private PropertiesComponent getProperties() { return PropertiesComponent.getInstance(myProject); } @Override protected String getDimensionServiceKey() { return getClass().getName(); } @Override protected String getHelpId() { return "reference.dialogs.createTest"; } @Override public JComponent getPreferredFocusedComponent() { return myTargetClassNameField; } @Override protected JComponent createCenterPanel() { JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints constr = new GridBagConstraints(); constr.fill = GridBagConstraints.HORIZONTAL; constr.anchor = GridBagConstraints.WEST; int gridy = 1; constr.insets = insets(4); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; final JLabel libLabel = new JLabel(JavaBundle.message("intention.create.test.dialog.testing.library")); libLabel.setLabelFor(myLibrariesCombo); panel.add(libLabel, constr); constr.gridx = 1; constr.weightx = 1; constr.gridwidth = GridBagConstraints.REMAINDER; panel.add(myLibrariesCombo, constr); myFixLibraryPanel = new JPanel(new BorderLayout()); myFixLibraryLabel = new JLabel(); myFixLibraryLabel.setIcon(AllIcons.Actions.IntentionBulb); myFixLibraryPanel.add(myFixLibraryLabel, BorderLayout.CENTER); myFixLibraryPanel.add(myFixLibraryButton, BorderLayout.EAST); constr.insets = insets(1); constr.gridy = gridy++; constr.gridx = 0; panel.add(myFixLibraryPanel, constr); constr.gridheight = 1; constr.insets = insets(6); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; constr.gridwidth = 1; panel.add(new JLabel(JavaBundle.message("intention.create.test.dialog.class.name")), constr); myTargetClassNameField = new EditorTextField(suggestTestClassName(myTargetClass)); myTargetClassNameField.getDocument().addDocumentListener(new DocumentListener() { @Override public void documentChanged(@NotNull DocumentEvent e) { getOKAction().setEnabled(PsiNameHelper.getInstance(myProject).isIdentifier(getClassName())); } }); constr.gridx = 1; constr.weightx = 1; panel.add(myTargetClassNameField, constr); constr.insets = insets(1); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; panel.add(new JLabel(JavaBundle.message("intention.create.test.dialog.super.class")), constr); mySuperClassField = new ReferenceEditorComboWithBrowseButton(new MyChooseSuperClassAction(), null, myProject, true, JavaCodeFragment.VisibilityChecker.EVERYTHING_VISIBLE, RECENT_SUPERS_KEY); mySuperClassField.setMinimumSize(mySuperClassField.getPreferredSize()); constr.gridx = 1; constr.weightx = 1; panel.add(mySuperClassField, constr); constr.insets = insets(1); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; panel.add(new JLabel(JavaBundle.message("dialog.create.class.destination.package.label")), constr); constr.gridx = 1; constr.weightx = 1; String targetPackageName = myTargetPackage != null ? myTargetPackage.getQualifiedName() : ""; myTargetPackageField = new PackageNameReferenceEditorCombo(targetPackageName, myProject, RECENTS_KEY, JavaBundle.message("dialog.create.class.package.chooser.title")); new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { myTargetPackageField.getButton().doClick(); } }.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.SHIFT_DOWN_MASK)), myTargetPackageField.getChildComponent()); JPanel targetPackagePanel = new JPanel(new BorderLayout()); targetPackagePanel.add(myTargetPackageField, BorderLayout.CENTER); panel.add(targetPackagePanel, constr); constr.insets = insets(6); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; panel.add(new JLabel(JavaBundle.message("intention.create.test.dialog.generate")), constr); constr.gridx = 1; constr.weightx = 1; panel.add(myGenerateBeforeBox, constr); constr.insets = insets(1); constr.gridy = gridy++; panel.add(myGenerateAfterBox, constr); constr.insets = insets(6); constr.gridy = gridy++; constr.gridx = 0; constr.weightx = 0; final JLabel membersLabel = new JLabel(JavaBundle.message("intention.create.test.dialog.select.methods")); membersLabel.setLabelFor(myMethodsTable); panel.add(membersLabel, constr); constr.gridx = 1; constr.weightx = 1; panel.add(myShowInheritedMethodsBox, constr); constr.insets = insets(1, 8); constr.gridy = gridy++; constr.gridx = 0; constr.gridwidth = GridBagConstraints.REMAINDER; constr.fill = GridBagConstraints.BOTH; constr.weighty = 1; panel.add(ScrollPaneFactory.createScrollPane(myMethodsTable), constr); myLibrariesCombo.setRenderer(SimpleListCellRenderer.create((label, value, index) -> { if (value != null) { label.setText(value.getName()); label.setIcon(value.getIcon()); } })); final boolean hasTestRoots = !ModuleRootManager.getInstance(myTargetModule).getSourceRoots(JavaModuleSourceRootTypes.TESTS).isEmpty(); final List<TestFramework> attachedLibraries = new ArrayList<>(); final String defaultLibrary = getDefaultLibraryName(); TestFramework defaultDescriptor = null; final DefaultComboBoxModel<TestFramework> model = (DefaultComboBoxModel<TestFramework>)myLibrariesCombo.getModel(); final List<TestFramework> descriptors = new SmartList<>(TestFramework.EXTENSION_NAME.getExtensionList()); descriptors.sort((d1, d2) -> Comparing.compare(d1.getName(), d2.getName())); for (final TestFramework descriptor : descriptors) { model.addElement(descriptor); if (hasTestRoots && descriptor.isLibraryAttached(myTargetModule)) { attachedLibraries.add(descriptor); } if (Objects.equals(defaultLibrary, descriptor.getName())) { defaultDescriptor = descriptor; } } myLibrariesCombo.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final Object selectedItem = myLibrariesCombo.getSelectedItem(); if (selectedItem != null) { final DumbService dumbService = DumbService.getInstance(myProject); dumbService.runWithAlternativeResolveEnabled(() -> onLibrarySelected((TestFramework)selectedItem)); } } }); if (defaultDescriptor != null && (attachedLibraries.contains(defaultDescriptor) || attachedLibraries.isEmpty())) { myLibrariesCombo.setSelectedItem(defaultDescriptor); } else if (!descriptors.isEmpty()) { List<TestFramework> applicableFrameworks = attachedLibraries.isEmpty() ? descriptors : attachedLibraries; TestFramework preferredFramework = ObjectUtils.notNull(ContainerUtil.find(applicableFrameworks, d -> d.getLanguage().equals(myTargetClass.getLanguage())), applicableFrameworks.get(0)); myLibrariesCombo.setSelectedItem(preferredFramework); } myFixLibraryButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (mySelectedFramework instanceof JavaTestFramework) { ((JavaTestFramework)mySelectedFramework).setupLibrary(myTargetModule) .onSuccess(__ -> myFixLibraryPanel.setVisible(false)); } else { OrderEntryFix.addJarToRoots(mySelectedFramework.getLibraryPath(), myTargetModule, null); myFixLibraryPanel.setVisible(false); } } }); myShowInheritedMethodsBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateMethodsTable(); } }); restoreShowInheritedMembersStatus(); updateMethodsTable(); return panel; } private static Insets insets(int top) { return insets(top, 0); } private static Insets insets(int top, int bottom) { return JBUI.insets(top, 8, bottom, 8); } public String getClassName() { return myTargetClassNameField.getText(); } public PsiClass getTargetClass() { return myTargetClass; } @Nullable public String getSuperClassName() { String result = mySuperClassField.getText().trim(); if (result.length() == 0) return null; return result; } public PsiDirectory getTargetDirectory() { return myTargetDirectory; } public Collection<MemberInfo> getSelectedMethods() { return myMethodsTable.getSelectedMemberInfos(); } public boolean shouldGeneratedAfter() { return myGenerateAfterBox.isSelected(); } public boolean shouldGeneratedBefore() { return myGenerateBeforeBox.isSelected(); } public TestFramework getSelectedTestFrameworkDescriptor() { return mySelectedFramework; } @Override protected void doOKAction() { RecentsManager.getInstance(myProject).registerRecentEntry(RECENTS_KEY, myTargetPackageField.getText()); RecentsManager.getInstance(myProject).registerRecentEntry(RECENT_SUPERS_KEY, mySuperClassField.getText()); String errorMessage = null; try { myTargetDirectory = selectTargetDirectory(); if (myTargetDirectory == null) return; } catch (IncorrectOperationException e) { errorMessage = e.getMessage(); } if (errorMessage == null) { try { errorMessage = checkCanCreateClass(); } catch (IncorrectOperationException e) { errorMessage = e.getMessage(); } } if (errorMessage != null) { final int result = Messages .showOkCancelDialog(myProject, JavaBundle.message("dialog.message.0.update.existing.class", errorMessage), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); if (result == Messages.CANCEL) { return; } } saveDefaultLibraryNameAndSuperClass(); saveShowInheritedMembersStatus(); super.doOKAction(); } protected String checkCanCreateClass() { return RefactoringMessageUtil.checkCanCreateClass(myTargetDirectory, getClassName()); } @Nullable private PsiDirectory selectTargetDirectory() throws IncorrectOperationException { final String packageName = getPackageName(); final PackageWrapper targetPackage = new PackageWrapper(PsiManager.getInstance(myProject), packageName); final VirtualFile selectedRoot = ReadAction.compute(() -> { final List<VirtualFile> testFolders = CreateTestAction.computeTestRoots(myTargetModule); List<VirtualFile> roots; if (testFolders.isEmpty()) { roots = new ArrayList<>(); List<String> urls = CreateTestAction.computeSuitableTestRootUrls(myTargetModule); for (String url : urls) { try { ContainerUtil.addIfNotNull(roots, VfsUtil.createDirectories(VfsUtilCore.urlToPath(url))); } catch (IOException e) { throw new RuntimeException(e); } } if (roots.isEmpty()) { JavaProjectRootsUtil.collectSuitableDestinationSourceRoots(myTargetModule, roots); } if (roots.isEmpty()) return null; } else { roots = new ArrayList<>(testFolders); } if (roots.size() == 1) { return roots.get(0); } else { PsiDirectory defaultDir = chooseDefaultDirectory(targetPackage.getDirectories(), roots); return MoveClassesOrPackagesUtil.chooseSourceRoot(targetPackage, roots, defaultDir); } }); if (selectedRoot == null) return null; return WriteCommandAction.writeCommandAction(myProject).withName(CodeInsightBundle.message("create.directory.command")) .compute(() -> RefactoringUtil.createPackageDirectoryInSourceRoot(targetPackage, selectedRoot)); } @Nullable private PsiDirectory chooseDefaultDirectory(PsiDirectory[] directories, List<VirtualFile> roots) { List<PsiDirectory> dirs = new ArrayList<>(); PsiManager psiManager = PsiManager.getInstance(myProject); for (VirtualFile file : ModuleRootManager.getInstance(myTargetModule).getSourceRoots(JavaSourceRootType.TEST_SOURCE)) { final PsiDirectory dir = psiManager.findDirectory(file); if (dir != null) { dirs.add(dir); } } if (!dirs.isEmpty()) { for (PsiDirectory dir : dirs) { final String dirName = dir.getVirtualFile().getPath(); if (dirName.contains("generated")) continue; return dir; } return dirs.get(0); } for (PsiDirectory dir : directories) { final VirtualFile file = dir.getVirtualFile(); for (VirtualFile root : roots) { if (VfsUtilCore.isAncestor(root, file, false)) { final PsiDirectory rootDir = psiManager.findDirectory(root); if (rootDir != null) { return rootDir; } } } } return ModuleManager.getInstance(myProject) .getModuleDependentModules(myTargetModule) .stream().flatMap(module -> ModuleRootManager.getInstance(module).getSourceRoots(JavaSourceRootType.TEST_SOURCE).stream()) .map(root -> psiManager.findDirectory(root)).findFirst().orElse(null); } private String getPackageName() { String name = myTargetPackageField.getText(); return name != null ? name.trim() : ""; } private class MyChooseSuperClassAction implements ActionListener { @Override public void actionPerformed(ActionEvent e) { TreeClassChooserFactory f = TreeClassChooserFactory.getInstance(myProject); TreeClassChooser dialog = f.createAllProjectScopeChooser(JavaBundle.message("intention.create.test.dialog.choose.super.class")); dialog.showDialog(); PsiClass aClass = dialog.getSelected(); if (aClass != null) { String superClass = aClass.getQualifiedName(); mySuperClassField.appendItem(superClass); mySuperClassField.getChildComponent().setSelectedItem(superClass); } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.devkit.util; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.MultiLineLabelUI; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.xml.XmlFile; import com.intellij.ui.ColoredListCellRenderer; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.components.JBList; import com.intellij.ui.table.JBTable; import org.jetbrains.annotations.NonNls; import org.jetbrains.idea.devkit.DevKitBundle; import org.jetbrains.idea.devkit.module.PluginModuleType; import javax.swing.*; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; /** * @author swr */ public class ChooseModulesDialog extends DialogWrapper { private final Icon myIcon; private final String myMessage; private final JTable myView; private final List<Module> myCandidateModules; private final boolean[] myStates; public ChooseModulesDialog(final Project project, List<Module> candidateModules, @NonNls String title) { this ( project, candidateModules, title, DevKitBundle.message("select.plugin.modules.to.patch")); } public ChooseModulesDialog(final Project project, List<Module> candidateModules, @NonNls String title, final String message) { super(project, false); setTitle(title); myCandidateModules = candidateModules; myIcon = Messages.getQuestionIcon(); myMessage = message; myView = new JBTable(new AbstractTableModel() { public int getRowCount() { return myCandidateModules.size(); } public int getColumnCount() { return 2; } public boolean isCellEditable(int rowIndex, int columnIndex) { return columnIndex == 0; } public void setValueAt(Object aValue, int rowIndex, int columnIndex) { myStates[rowIndex] = (Boolean)aValue; fireTableCellUpdated(rowIndex, columnIndex); } public Class<?> getColumnClass(int columnIndex) { return columnIndex == 0 ? Boolean.class : Module.class; } public Object getValueAt(int rowIndex, int columnIndex) { return columnIndex == 0 ? myStates[rowIndex] : myCandidateModules.get(rowIndex); } }); myView.setShowGrid(false); myView.setTableHeader(null); myView.setIntercellSpacing(new Dimension(0, 0)); myView.getColumnModel().getColumn(0).setMaxWidth(new JCheckBox().getPreferredSize().width); myView.getModel().addTableModelListener(new TableModelListener() { public void tableChanged(TableModelEvent e) { getOKAction().setEnabled(getSelectedModules().size() > 0); } }); myView.addKeyListener(new KeyAdapter() { public void keyTyped(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_ENTER || e.getKeyChar() == '\n') { doOKAction(); } } }); myView.setDefaultRenderer(Module.class, new MyTableCellRenderer(project)); myStates = new boolean[candidateModules.size()]; Arrays.fill(myStates, true); init(); } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new BorderLayout(15, 10)); if (myIcon != null) { JLabel iconLabel = new JLabel(myIcon); Container container = new Container(); container.setLayout(new BorderLayout()); container.add(iconLabel, BorderLayout.NORTH); panel.add(container, BorderLayout.WEST); } JPanel messagePanel = new JPanel(new BorderLayout()); if (myMessage != null) { JLabel textLabel = new JLabel(myMessage); textLabel.setBorder(BorderFactory.createEmptyBorder(0, 0, 5, 0)); textLabel.setUI(new MultiLineLabelUI()); messagePanel.add(textLabel, BorderLayout.NORTH); } panel.add(messagePanel, BorderLayout.CENTER); final JScrollPane jScrollPane = ScrollPaneFactory.createScrollPane(); jScrollPane.setViewportView(myView); jScrollPane.setPreferredSize(new Dimension(300, 80)); panel.add(jScrollPane, BorderLayout.SOUTH); return panel; } public JComponent getPreferredFocusedComponent() { return myView; } protected JComponent createCenterPanel() { return null; } public List<Module> getSelectedModules() { final ArrayList<Module> list = new ArrayList<Module>(myCandidateModules); final Iterator<Module> modules = list.iterator(); for (boolean b : myStates) { modules.next(); if (!b) { modules.remove(); } } return list; } private static class MyTableCellRenderer implements TableCellRenderer { private final JList myList; private final Project myProject; private final ColoredListCellRenderer myCellRenderer; public MyTableCellRenderer(Project project) { myProject = project; myList = new JBList(); myCellRenderer = new ColoredListCellRenderer() { protected void customizeCellRenderer(JList list, Object value, int index, boolean selected, boolean hasFocus) { final Module module = ((Module)value); setIcon(module.getModuleType().getNodeIcon(false)); append(module.getName(), SimpleTextAttributes.REGULAR_ATTRIBUTES); final XmlFile pluginXml = PluginModuleType.getPluginXml(module); assert pluginXml != null; final VirtualFile virtualFile = pluginXml.getVirtualFile(); assert virtualFile != null; final VirtualFile projectPath = myProject.getBaseDir(); if (VfsUtil.isAncestor(projectPath, virtualFile, false)) { append(" (" + VfsUtil.getRelativePath(virtualFile, projectPath, File.separatorChar) + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES); } else { append(" (" + virtualFile.getPresentableUrl() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES); } } }; } public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { return myCellRenderer.getListCellRendererComponent(myList, value, row, isSelected, hasFocus); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.nio; import com.google.gwt.typedarrays.shared.ArrayBufferView; import com.google.gwt.typedarrays.shared.Int16Array; import com.google.gwt.typedarrays.shared.TypedArrays; /** A buffer of shorts. * <p> A short buffer can be created in either of the following ways: </p> * <ul> * <li>{@link #allocate(int) Allocate} a new short array and create a buffer based on it;</li> * <li>{@link #wrap(short[]) Wrap} an existing short array to create a new buffer;</li> * <li>Use {@link java.nio.ByteBuffer#asShortBuffer() ByteBuffer.asShortBuffer} to create a short * buffer based on a byte buffer.</li> * </ul> */ public final class ShortBuffer extends Buffer implements Comparable<ShortBuffer>, loon.jni.HasArrayBufferView { private final ByteBuffer byteBuffer; private final Int16Array shortArray; static ShortBuffer wrap (ByteBuffer byteBuffer) { return new ShortBuffer(byteBuffer.slice()); } /** Creates a short buffer based on a newly allocated short array. * * @param capacity the capacity of the new buffer. * @return the created short buffer. * @throws IllegalArgumentException if {@code capacity} is less than zero. */ public static ShortBuffer allocate (int capacity) { if (capacity < 0) { throw new IllegalArgumentException(); } ByteBuffer bb = ByteBuffer.allocateDirect(capacity * 2); bb.order(ByteOrder.nativeOrder()); return bb.asShortBuffer(); } ShortBuffer(ByteBuffer byteBuffer) { super((byteBuffer.capacity() >> 1)); this.byteBuffer = byteBuffer; this.byteBuffer.clear(); this.shortArray = TypedArrays.createInt16Array( byteBuffer.byteArray.buffer(), byteBuffer.byteArray.byteOffset(), capacity); } /** Compacts this short buffer. * <p> The remaining shorts will be moved to the head of the buffer, starting from position * zero. Then the position is set to {@code remaining()}; the limit is set to capacity; the * mark is cleared. </p> * * @return this buffer. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public ShortBuffer compact () { byteBuffer.limit(limit << 1); byteBuffer.position(position << 1); byteBuffer.compact(); byteBuffer.clear(); position = limit - position; limit = capacity; mark = UNSET_MARK; return this; } /** Compare the remaining shorts of this buffer to another short buffer's remaining shorts. * * @param otherBuffer another short buffer. * @return a negative value if this is less than {@code otherBuffer}; 0 if this equals to * {@code otherBuffer}; a positive value if this is greater than {@code otherBuffer}. * @exception ClassCastException if {@code otherBuffer} is not a short buffer. */ public int compareTo (ShortBuffer otherBuffer) { int compareRemaining = (remaining() < otherBuffer.remaining()) ? remaining() : otherBuffer.remaining(); int thisPos = position; int otherPos = otherBuffer.position; short thisByte, otherByte; while (compareRemaining > 0) { thisByte = get(thisPos); otherByte = otherBuffer.get(otherPos); if (thisByte != otherByte) { return thisByte < otherByte ? -1 : 1; } thisPos++; otherPos++; compareRemaining--; } return remaining() - otherBuffer.remaining(); } /** Returns a duplicated buffer that shares its content with this buffer. * <p> The duplicated buffer's position, limit, capacity and mark are the same as this buffer. * The duplicated buffer's read-only property and byte order are the same as this buffer's. * </p> * <p> The new buffer shares its content with this buffer, which means either buffer's change * of content will be visible to the other. The two buffer's position, limit and mark are * independent. </p> * * @return a duplicated buffer that shares its content with this buffer. */ public ShortBuffer duplicate () { ShortBuffer buf = new ShortBuffer( byteBuffer.duplicate()); buf.limit = limit; buf.position = position; buf.mark = mark; return buf; } /** Checks whether this short buffer is equal to another object. * <p> If {@code other} is not a short buffer then {@code false} is returned. Two short buffers * are equal if and only if their remaining shorts are exactly the same. Position, limit, * capacity and mark are not considered. </p> * * @param other the object to compare with this short buffer. * @return {@code true} if this short buffer is equal to {@code other}, {@code false} otherwise. */ public boolean equals (Object other) { if (!(other instanceof ShortBuffer)) { return false; } ShortBuffer otherBuffer = (ShortBuffer)other; if (remaining() != otherBuffer.remaining()) { return false; } int myPosition = position; int otherPosition = otherBuffer.position; boolean equalSoFar = true; while (equalSoFar && (myPosition < limit)) { equalSoFar = get(myPosition++) == otherBuffer.get(otherPosition++); } return equalSoFar; } /** Returns the short at the current position and increases the position by 1. * * @return the short at the current position. * @exception BufferUnderflowException if the position is equal or greater than limit. */ public short get () { // if (position == limit) { // throw new BufferUnderflowException(); // } return shortArray.get(position++); } /** Reads shorts from the current position into the specified short array and increases the * position by the number of shorts read. * <p> Calling this method has the same effect as {@code get(dest, 0, dest.length)}. </p> * * @param dest the destination short array. * @return this buffer. * @exception BufferUnderflowException if {@code dest.length} is greater than {@code remaining()}. */ public ShortBuffer get (short[] dest) { return get(dest, 0, dest.length); } /** Reads shorts from the current position into the specified short array, starting from the * specified offset, and increases the position by the number of shorts read. * * @param dest the target short array. * @param off the offset of the short array, must not be negative and not greater than {@code * dest.length}. * @param len the number of shorts to read, must be no less than zero and not greater than * {@code dest.length - off}. * @return this buffer. * @exception IndexOutOfBoundsException if either {@code off} or {@code len} is invalid. * @exception BufferUnderflowException if {@code len} is greater than {@code remaining()}. */ public ShortBuffer get (short[] dest, int off, int len) { int length = dest.length; if (off < 0 || len < 0 || (long)off + (long)len > length) { throw new IndexOutOfBoundsException(); } if (len > remaining()) { throw new BufferUnderflowException(); } for (int i = off; i < off + len; i++) { dest[i] = get(); } return this; } /** Returns the short at the specified index; the position is not changed. * * @param index the index, must not be negative and less than limit. * @return a short at the specified index. * @exception IndexOutOfBoundsException if index is invalid. */ public short get (int index) { // if (index < 0 || index >= limit) { // throw new IndexOutOfBoundsException(); // } return shortArray.get(index); } /** Indicates whether this buffer is based on a short array and is read/write. * * @return {@code true} if this buffer is based on a short array and provides read/write * access, {@code false} otherwise. */ public final boolean hasArray () { return false; } /** Calculates this buffer's hash code from the remaining chars. The position, limit, capacity * and mark don't affect the hash code. * * @return the hash code calculated from the remaining shorts. */ public int hashCode () { int myPosition = position; int hash = 0; while (myPosition < limit) { hash = hash + get(myPosition++); } return hash; } /** Indicates whether this buffer is direct. A direct buffer will try its best to take * advantage of native memory APIs and it may not stay in the Java heap, so it is not affected * by garbage collection. * <p> A short buffer is direct if it is based on a byte buffer and the byte buffer is direct. * </p> * * @return {@code true} if this buffer is direct, {@code false} otherwise. */ public boolean isDirect () { return true; } /** Returns the byte order used by this buffer when converting shorts from/to bytes. * <p> If this buffer is not based on a byte buffer, then always return the platform's native * byte order. </p> * * @return the byte order used by this buffer when converting shorts from/to bytes. */ public ByteOrder order () { return ByteOrder.nativeOrder(); } /** Writes the given short to the current position and increases the position by 1. * * @param s the short to write. * @return this buffer. * @exception BufferOverflowException if position is equal or greater than limit. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public ShortBuffer put (short c) { // if (position == limit) { // throw new BufferOverflowException(); // } shortArray.set(position++, c); return this; } /** Writes shorts from the given short array to the current position and increases the position * by the number of shorts written. <p> Calling this method has the same effect as {@code * put(src, 0, src.length)}. </p> * * @param src the source short array. * @return this buffer. * @exception BufferOverflowException if {@code remaining()} is less than {@code src.length}. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public final ShortBuffer put (short[] src) { return put(src, 0, src.length); } /** Writes shorts from the given short array, starting from the specified offset, to the * current position and increases the position by the number of shorts written. * * @param src the source short array. * @param off the offset of short array, must not be negative and not greater than {@code src.length}. * @param len the number of shorts to write, must be no less than zero and not greater than * {@code src.length - off}. * @return this buffer. * @exception BufferOverflowException if {@code remaining()} is less than {@code len}. * @exception IndexOutOfBoundsException if either {@code off} or {@code len} is invalid. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public ShortBuffer put (short[] src, int off, int len) { int length = src.length; if (off < 0 || len < 0 || (long)off + (long)len > length) { throw new IndexOutOfBoundsException(); } if (len > remaining()) { throw new BufferOverflowException(); } for (int i = off; i < off + len; i++) { put(src[i]); } return this; } /** Writes all the remaining shorts of the {@code src} short buffer to this buffer's current * position, and increases both buffers' position by the number of shorts copied. * * @param src the source short buffer. * @return this buffer. * @exception BufferOverflowException if {@code src.remaining()} is greater than this buffer's * {@code remaining()}. * @exception IllegalArgumentException if {@code src} is this buffer. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public ShortBuffer put (ShortBuffer src) { if (src == this) { throw new IllegalArgumentException(); } if (src.remaining() > remaining()) { throw new BufferOverflowException(); } short[] contents = new short[src.remaining()]; src.get(contents); put(contents); return this; } /** Writes a short to the specified index of this buffer; the position is not changed. * * @param index the index, must not be negative and less than the limit. * @param s the short to write. * @return this buffer. * @exception IndexOutOfBoundsException if index is invalid. * @exception ReadOnlyBufferException if no changes may be made to the contents of this buffer. */ public ShortBuffer put (int index, short c) { // if (index < 0 || index >= limit) { // throw new IndexOutOfBoundsException(); // } shortArray.set(index, c); return this; } /** Returns a sliced buffer that shares its content with this buffer. * <p> The sliced buffer's capacity will be this buffer's {@code remaining()}, and its zero * position will correspond to this buffer's current position. The new buffer's position will * be 0, limit will be its capacity, and its mark is cleared. The new buffer's read-only * property and byte order are same as this buffer's. </p> * <p> The new buffer shares its content with this buffer, which means either buffer's change * of content will be visible to the other. The two buffer's position, limit and mark are * independent. </p> * * @return a sliced buffer that shares its content with this buffer. */ public ShortBuffer slice () { byteBuffer.limit(limit << 1); byteBuffer.position(position << 1); ShortBuffer result = new ShortBuffer(byteBuffer.slice()); byteBuffer.clear(); return result; } /** Returns a string representing the state of this short buffer. * * @return a string representing the state of this short buffer. */ public String toString () { StringBuffer buf = new StringBuffer(); buf.append(getClass().getName()); buf.append(", status: capacity="); //$NON-NLS-1$ buf.append(capacity()); buf.append(" position="); //$NON-NLS-1$ buf.append(position()); buf.append(" limit="); //$NON-NLS-1$ buf.append(limit()); return buf.toString(); } public ArrayBufferView getTypedArray () { return shortArray; } public int getElementSize () { return 2; } public int getElementType() { return 0x1402; // GL_SHORT } public boolean isReadOnly() { return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.commons.osgi; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * This is a helper class to parse manifest header entries. */ public class ManifestHeader { /** * A header can have several entries separated by comma. */ public interface Entry { /** * The value of the entry. */ String getValue(); /** * The attributes specified for this entry. */ NameValuePair[] getAttributes(); /** * The directives for this entry. */ NameValuePair[] getDirectives(); String getAttributeValue(String name); String getDirectiveValue(String name); } /** The entries for this header. */ private Entry[] entries = new Entry[0]; /** * Add new entries from parsing. */ private void add(Entry[] paths) { if ( paths != null && paths.length > 0 ) { final Entry[] copy = new Entry[this.entries.length + paths.length]; System.arraycopy(this.entries, 0, copy, 0, this.entries.length); System.arraycopy(paths, 0, copy, this.entries.length, paths.length); this.entries = copy; } } /** * Return the entries for this header. */ public Entry[] getEntries() { return this.entries; } /** * Directives and attributes are simple name/value pairs. */ public final static class NameValuePair { private final String name; private final String value; public NameValuePair(String name, String value) { this.name = name; this.value = value; } public String getName() { return name; } public String getValue() { return value; } } private static final String CLASS_PATH_SEPARATOR = ","; private static final String PACKAGE_SEPARATOR = ";"; private static final String DIRECTIVE_SEPARATOR = ":="; private static final String ATTRIBUTE_SEPARATOR = "="; /** * Parse headers * Like this: path; path; dir1:=dirval1; dir2:=dirval2; attr1=attrval1; attr2=attrval2, * path; path; dir1:=dirval1; dir2:=dirval2; attr1=attrval1; attr2=attrval2 * The returned object maintains the order of entries (paths), directives and attributes. */ public static ManifestHeader parse(String header) { final ManifestHeader entry = new ManifestHeader(); if (header != null) { if (header.length() == 0) { throw new IllegalArgumentException("A header cannot be an empty string."); } final String[] clauseStrings = parseDelimitedString(header, CLASS_PATH_SEPARATOR); if ( clauseStrings != null ) { for(final String clause : clauseStrings) { entry.add(parseStandardHeaderClause(clause)); } } } return (entry.getEntries().length == 0) ? null : entry; } /** * Parse a clause * Like this: path; path; dir1:=dirval1; dir2:=dirval2; attr1=attrval1; attr2=attrval2 */ private static ManifestHeader.Entry[] parseStandardHeaderClause(String clauseString) throws IllegalArgumentException { // Break string into semi-colon delimited pieces. String[] pieces = parseDelimitedString(clauseString, PACKAGE_SEPARATOR); // Count the number of different paths; paths // will not have an '=' in their string. This assumes // that paths come first, before directives and // attributes. int pathCount = 0; for (int pieceIdx = 0; pieceIdx < pieces.length; pieceIdx++) { if (pieces[pieceIdx].indexOf('=') >= 0) { break; } pathCount++; } // Error if no paths were specified. if (pathCount == 0) { throw new IllegalArgumentException( "No paths specified in header: " + clauseString); } // Create an array of paths. PathImpl[] paths = new PathImpl[pathCount]; for(int i=0;i<pathCount;i++) { paths[i] = new PathImpl(pieces[i]); } // Parse the directives/attributes // and keep the order // for simpliefied checking if a directive/attribute is used twice, we keep // two collections: one for the values and one for the names final List<ManifestHeader.NameValuePair> dirsList = new ArrayList<ManifestHeader.NameValuePair>(); final Set<String> dirsNames = new HashSet<String>(); final List<ManifestHeader.NameValuePair> attrsList = new ArrayList<ManifestHeader.NameValuePair>(); final Set<String> attrsNames = new HashSet<String>(); int idx = -1; String sep = null; for (int pieceIdx = pathCount; pieceIdx < pieces.length; pieceIdx++) { if ((idx = pieces[pieceIdx].indexOf(DIRECTIVE_SEPARATOR)) >= 0) { sep = DIRECTIVE_SEPARATOR; } else if ((idx = pieces[pieceIdx].indexOf(ATTRIBUTE_SEPARATOR)) >= 0) { sep = ATTRIBUTE_SEPARATOR; } else { throw new IllegalArgumentException("Not a directive/attribute: " + clauseString); } final String key = pieces[pieceIdx].substring(0, idx).trim(); String value = pieces[pieceIdx].substring(idx + sep.length()).trim(); // Remove quotes, if value is quoted. if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); } // Save the directive/attribute in the appropriate array. if (sep.equals(DIRECTIVE_SEPARATOR)) { // Check for duplicates. if (dirsNames.contains(key)) { throw new IllegalArgumentException("Duplicate directive: " + key); } dirsList.add(new ManifestHeader.NameValuePair(key, value)); dirsNames.add(key); } else { // Check for duplicates. if (attrsNames.contains(key)) { throw new IllegalArgumentException("Duplicate attribute: " + key); } attrsList.add(new ManifestHeader.NameValuePair(key, value)); attrsNames.add(key); } } // Create directive array. ManifestHeader.NameValuePair[] dirs = dirsList.toArray(new ManifestHeader.NameValuePair[dirsList.size()]); // Create attribute array. ManifestHeader.NameValuePair[] attrs = attrsList.toArray(new ManifestHeader.NameValuePair[attrsList.size()]); // now set attributes and directives for each path for(int i=0;i<pathCount;i++) { paths[i].init(dirs, attrs); } return paths; } private static final int CHAR = 1; private static final int DELIMITER = 2; private static final int STARTQUOTE = 4; private static final int ENDQUOTE = 8; /** * Parses delimited string and returns an array containing the tokens. This * parser obeys quotes, so the delimiter character will be ignored if it is * inside of a quote. This method assumes that the quote character is not * included in the set of delimiter characters. * @param value the delimited string to parse. * @param delim the characters delimiting the tokens. * @return an array of string tokens or null if there were no tokens. **/ private static String[] parseDelimitedString(String value, String delim) { if (value == null) { value = ""; } final List<String> list = new ArrayList<String>(); final StringBuffer sb = new StringBuffer(); int expecting = (CHAR | DELIMITER | STARTQUOTE); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); boolean isDelimiter = (delim.indexOf(c) >= 0); boolean isQuote = (c == '"'); if (isDelimiter && ((expecting & DELIMITER) > 0)) { list.add(sb.toString().trim()); sb.delete(0, sb.length()); expecting = (CHAR | DELIMITER | STARTQUOTE); } else if (isQuote && ((expecting & STARTQUOTE) > 0)) { sb.append(c); expecting = CHAR | ENDQUOTE; } else if (isQuote && ((expecting & ENDQUOTE) > 0)) { sb.append(c); expecting = (CHAR | STARTQUOTE | DELIMITER); } else if ((expecting & CHAR) > 0) { sb.append(c); } else { throw new IllegalArgumentException("Invalid delimited string: " + value); } } if (sb.length() > 0) { list.add(sb.toString().trim()); } if ( list.size() == 0 ) { return null; } return list.toArray(new String[list.size()]); } protected static final class PathImpl implements ManifestHeader.Entry { private final String value; private NameValuePair[] attributes; private NameValuePair[] directives; public PathImpl(final String path) { this.value = path; } public void init(NameValuePair[] dirs, NameValuePair[] attrs) { this.directives = dirs; this.attributes = attrs; } /** * @see org.apache.sling.commons.osgi.ManifestHeader.Entry#getAttributes() */ public NameValuePair[] getAttributes() { return this.attributes; } /** * @see org.apache.sling.commons.osgi.ManifestHeader.Entry#getDirectives() */ public NameValuePair[] getDirectives() { return this.directives; } /** * @see org.apache.sling.commons.osgi.ManifestHeader.Entry#getValue() */ public String getValue() { return this.value; } public String getAttributeValue(String name) { String v = null; int index = 0; while ( v == null && index < attributes.length ) { if ( attributes[index].getName().equals(name) ) { v = attributes[index].getValue(); } index++; } return v; } public String getDirectiveValue(String name) { String v = null; int index = 0; while ( v == null && index < directives.length ) { if ( directives[index].getName().equals(name) ) { v = directives[index].getValue(); } index++; } return v; } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon.trans; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.ResourceBundle; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicBoolean; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.ToolBar; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.i18n.GlobalMessages; import org.pentaho.di.trans.step.BaseStepData.StepExecutionStatus; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepStatus; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.XulSpoonSettingsManager; import org.pentaho.di.ui.spoon.delegates.SpoonDelegate; import org.pentaho.di.ui.xul.KettleXulLoader; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulLoader; import org.pentaho.ui.xul.containers.XulToolbar; import org.pentaho.ui.xul.impl.XulEventHandler; import org.pentaho.ui.xul.swt.tags.SwtToolbarbutton; public class TransGridDelegate extends SpoonDelegate implements XulEventHandler { private static Class<?> PKG = Spoon.class; // for i18n purposes, needed by Translator2!! private static final String XUL_FILE_TRANS_GRID_TOOLBAR = "ui/trans-grid-toolbar.xul"; private static final int STEP_NUMBER_COLUMN = 0; private static final int STEP_NAME_COLUMN = 1; public static final long REFRESH_TIME = 100L; public static final long UPDATE_TIME_VIEW = 1000L; private TransGraph transGraph; private CTabItem transGridTab; private TableView transGridView; private boolean refresh_busy; private long lastUpdateView; private XulToolbar toolbar; private Composite transGridComposite; private boolean hideInactiveSteps; private boolean showSelectedSteps; /** * @param spoon * @param transGraph */ public TransGridDelegate( Spoon spoon, TransGraph transGraph ) { super( spoon ); this.transGraph = transGraph; hideInactiveSteps = false; } public void showGridView() { if ( transGridTab == null || transGridTab.isDisposed() ) { addTransGrid(); } else { transGridTab.dispose(); transGraph.checkEmptyExtraView(); } } /** * Add a grid with the execution metrics per step in a table view * */ public void addTransGrid() { // First, see if we need to add the extra view... // if ( transGraph.extraViewComposite == null || transGraph.extraViewComposite.isDisposed() ) { transGraph.addExtraView(); } else { if ( transGridTab != null && !transGridTab.isDisposed() ) { // just set this one active and get out... // transGraph.extraViewTabFolder.setSelection( transGridTab ); return; } } transGridTab = new CTabItem( transGraph.extraViewTabFolder, SWT.NONE ); transGridTab.setImage( GUIResource.getInstance().getImageShowGrid() ); transGridTab.setText( BaseMessages.getString( PKG, "Spoon.TransGraph.GridTab.Name" ) ); transGridComposite = new Composite( transGraph.extraViewTabFolder, SWT.NONE ); transGridComposite.setLayout( new FormLayout() ); addToolBar(); Control toolbarControl = (Control) toolbar.getManagedObject(); toolbarControl.setLayoutData( new FormData() ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); // First one in the left top corner fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); toolbarControl.setLayoutData( fd ); toolbarControl.setParent( transGridComposite ); //ignore whitespace for stepname column valueMeta, causing sorting to ignore whitespace String stepNameColumnName = BaseMessages.getString( PKG, "TransLog.Column.Stepname" ); ValueMetaInterface valueMeta = new ValueMetaString( stepNameColumnName ); valueMeta.setIgnoreWhitespace( true ); ColumnInfo stepNameColumnInfo = new ColumnInfo( stepNameColumnName, ColumnInfo.COLUMN_TYPE_TEXT, false, true ); stepNameColumnInfo.setValueMeta( valueMeta ); ColumnInfo[] colinf = new ColumnInfo[] { stepNameColumnInfo, new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Copynr" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Read" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Written" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Input" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Output" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Updated" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Rejected" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Errors" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Active" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Time" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.Speed" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "TransLog.Column.PriorityBufferSizes" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ), }; colinf[1].setAllignement( SWT.RIGHT ); colinf[2].setAllignement( SWT.RIGHT ); colinf[3].setAllignement( SWT.RIGHT ); colinf[4].setAllignement( SWT.RIGHT ); colinf[5].setAllignement( SWT.RIGHT ); colinf[6].setAllignement( SWT.RIGHT ); colinf[7].setAllignement( SWT.RIGHT ); colinf[8].setAllignement( SWT.RIGHT ); colinf[9].setAllignement( SWT.LEFT ); colinf[10].setAllignement( SWT.RIGHT ); colinf[11].setAllignement( SWT.RIGHT ); colinf[12].setAllignement( SWT.RIGHT ); transGridView = new TableView( transGraph.getManagedObject(), transGridComposite, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, 1, true, // readonly! null, // Listener spoon.props ); FormData fdView = new FormData(); fdView.left = new FormAttachment( 0, 0 ); fdView.right = new FormAttachment( 100, 0 ); fdView.top = new FormAttachment( (Control) toolbar.getManagedObject(), 0 ); fdView.bottom = new FormAttachment( 100, 0 ); transGridView.setLayoutData( fdView ); ColumnInfo numberColumn = transGridView.getNumberColumn(); ValueMetaInterface numberColumnValueMeta = new ValueMetaString( "#", TransGridDelegate::subStepCompare ); numberColumn.setValueMeta( numberColumnValueMeta ); // Add a timer to update this view every couple of seconds... // final Timer tim = new Timer( "TransGraph: " + transGraph.getMeta().getName() ); final AtomicBoolean busy = new AtomicBoolean( false ); TimerTask timtask = new TimerTask() { public void run() { if ( !spoon.getDisplay().isDisposed() ) { spoon.getDisplay().asyncExec( new Runnable() { public void run() { if ( !busy.get() ) { busy.set( true ); refreshView(); busy.set( false ); } } } ); } } }; tim.schedule( timtask, 0L, REFRESH_TIME ); // schedule to repeat a couple of times per second to get fast feedback transGridTab.addDisposeListener( new DisposeListener() { public void widgetDisposed( DisposeEvent disposeEvent ) { tim.cancel(); } } ); transGridTab.setControl( transGridComposite ); transGraph.extraViewTabFolder.setSelection( transGridTab ); } private void addToolBar() { try { XulLoader loader = new KettleXulLoader(); loader.setSettingsManager( XulSpoonSettingsManager.getInstance() ); ResourceBundle bundle = GlobalMessages.getBundle( "org/pentaho/di/ui/spoon/messages/messages" ); XulDomContainer xulDomContainer = loader.loadXul( XUL_FILE_TRANS_GRID_TOOLBAR, bundle ); xulDomContainer.addEventHandler( this ); toolbar = (XulToolbar) xulDomContainer.getDocumentRoot().getElementById( "nav-toolbar" ); ToolBar swtToolBar = (ToolBar) toolbar.getManagedObject(); spoon.props.setLook( swtToolBar, Props.WIDGET_STYLE_TOOLBAR ); swtToolBar.layout( true, true ); } catch ( Throwable t ) { log.logError( toString(), Const.getStackTracker( t ) ); new ErrorDialog( transGridComposite.getShell(), BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Title" ), BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Message", XUL_FILE_TRANS_GRID_TOOLBAR ), new Exception( t ) ); } } public void showHideInactive() { hideInactiveSteps = !hideInactiveSteps; SwtToolbarbutton onlyActiveButton = (SwtToolbarbutton) toolbar.getElementById( "show-inactive" ); if ( onlyActiveButton != null ) { onlyActiveButton.setSelected( hideInactiveSteps ); if ( hideInactiveSteps ) { onlyActiveButton.setImage( GUIResource.getInstance().getImageHideInactive() ); } else { onlyActiveButton.setImage( GUIResource.getInstance().getImageShowInactive() ); } } } public void showHideSelected() { showSelectedSteps = !showSelectedSteps; SwtToolbarbutton onlySelectedButton = (SwtToolbarbutton) toolbar.getElementById( "show-selected" ); if ( onlySelectedButton != null ) { onlySelectedButton.setSelected( showSelectedSteps ); if ( showSelectedSteps ) { onlySelectedButton.setImage( GUIResource.getInstance().getImageShowSelected() ); } else { onlySelectedButton.setImage( GUIResource.getInstance().getImageShowAll() ); } } } private void refreshView() { boolean tableCleared = false; int numberStepsToDisplay = -1; int baseStepCount = -1; if ( transGridView == null || transGridView.isDisposed() ) { return; } if ( refresh_busy ) { return; } List<StepMeta> selectedSteps = new ArrayList<StepMeta>(); if ( showSelectedSteps ) { selectedSteps = transGraph.trans.getTransMeta().getSelectedSteps(); } int topIdx = transGridView.getTable().getTopIndex(); refresh_busy = true; Table table = transGridView.table; long time = new Date().getTime(); long msSinceLastUpdate = time - lastUpdateView; if ( transGraph.trans != null && !transGraph.trans.isPreparing() && msSinceLastUpdate > UPDATE_TIME_VIEW ) { lastUpdateView = time; baseStepCount = transGraph.trans.nrSteps(); if ( hideInactiveSteps ) { numberStepsToDisplay = transGraph.trans.nrActiveSteps(); } else { numberStepsToDisplay = baseStepCount; } StepExecutionStatus[] stepStatusLookup = transGraph.trans.getTransStepExecutionStatusLookup(); boolean[] isRunningLookup = transGraph.trans.getTransStepIsRunningLookup(); // Count sub steps for ( int i = 0; i < baseStepCount; i++ ) { // if inactive steps are hidden, only count sub steps of active base steps if ( !hideInactiveSteps || ( isRunningLookup[ i ] || stepStatusLookup[ i ] != StepExecutionStatus.STATUS_FINISHED ) ) { StepInterface baseStep = transGraph.trans.getRunThread( i ); numberStepsToDisplay += baseStep.subStatuses().size(); } } if ( table.getItemCount() != numberStepsToDisplay ) { table.removeAll(); tableCleared = true; } if ( numberStepsToDisplay == 0 && table.getItemCount() == 0 ) { new TableItem( table, SWT.NONE ); refresh_busy = false; return; } if ( tableCleared ) { // iterate over the base steps and add into table for ( int i = 0; i < baseStepCount; i++ ) { StepInterface baseStep = transGraph.trans.getRunThread( i ); // if the step should be displayed if ( showSelected( selectedSteps, baseStep ) && ( hideInactiveSteps && ( isRunningLookup[ i ] || stepStatusLookup[ i ] != StepExecutionStatus.STATUS_FINISHED ) ) || ( !hideInactiveSteps && stepStatusLookup[ i ] != StepExecutionStatus.STATUS_EMPTY ) ) { // write base step to table TableItem ti = new TableItem( table, SWT.NONE ); String baseStepNumber = "" + ( i + 1 ); ti.setText( STEP_NUMBER_COLUMN, baseStepNumber ); updateRowFromBaseStep( baseStep, ti ); // write sub steps to table int subStepIndex = 1; for ( StepStatus subStepStatus : baseStep.subStatuses() ) { String[] subFields = subStepStatus.getTransLogFields( baseStep.getStatus().getDescription() ); subFields[ STEP_NAME_COLUMN ] = " " + subFields[ STEP_NAME_COLUMN ]; TableItem subItem = new TableItem( table, SWT.NONE ); subItem.setText( STEP_NUMBER_COLUMN, baseStepNumber + "." + subStepIndex++ ); for ( int f = 1; f < subFields.length; f++ ) { subItem.setText( f, subFields[ f ] ); } } } } } else { // iterate over and update the existing rows in the table for ( int rowIndex = 0; rowIndex < numberStepsToDisplay; rowIndex++ ) { TableItem ti = table.getItem( rowIndex ); if ( ti == null ) { continue; } String tableStepNumber = ti.getText( STEP_NUMBER_COLUMN ); String[] tableStepNumberSplit = tableStepNumber.split( "\\." ); String tableBaseStepNumber = tableStepNumberSplit[ 0 ]; boolean isBaseStep = true; if ( tableStepNumberSplit.length > 1 ) { isBaseStep = false; } // step numbers displayed on table start at 1 and step number indexes begin at 0 int baseStepNumber = Integer.parseInt( tableBaseStepNumber ) - 1; StepInterface baseStep = transGraph.trans.getRunThread( baseStepNumber ); // if the step should be displayed if ( showSelected( selectedSteps, baseStep ) && ( hideInactiveSteps && ( isRunningLookup[ baseStepNumber ] || stepStatusLookup[ baseStepNumber ] != StepExecutionStatus.STATUS_FINISHED ) ) || ( !hideInactiveSteps && stepStatusLookup[ baseStepNumber ] != StepExecutionStatus.STATUS_EMPTY ) ) { if ( isBaseStep ) { updateRowFromBaseStep( baseStep, ti ); } else { // loop through sub steps and update the one that matches the sub step name from the table String tableSubStepName = ti.getText( STEP_NAME_COLUMN ); for ( StepStatus subStepStatus : baseStep.subStatuses() ) { String[] subFields = subStepStatus.getTransLogFields( baseStep.getStatus().getDescription() ); subFields[ STEP_NAME_COLUMN ] = " " + subFields[ STEP_NAME_COLUMN ]; if ( (subFields[ STEP_NAME_COLUMN ]).equals( tableSubStepName ) ) { updateCellsIfChanged( subFields, ti ); } } } } } } int sortColumn = transGridView.getSortField(); boolean sortDescending = transGridView.isSortingDescending(); // Only need to re-sort if the output has been sorted differently to the default if ( table.getItemCount() > 0 && ( sortColumn != 0 || sortDescending ) ) { transGridView.sortTable( transGridView.getSortField(), sortDescending ); } // Alternate row background color for ( int i = 0; i < table.getItems().length; i++ ) { TableItem item = table.getItem( i ); item.setForeground( GUIResource.getInstance().getColorBlack() ); if ( !item.getBackground().equals( GUIResource.getInstance().getColorRed() ) ) { item.setBackground( i % 2 == 0 ? GUIResource.getInstance().getColorWhite() : GUIResource.getInstance().getColorBlueCustomGrid() ); } } // if (updateRowNumbers) { transGridView.setRowNums(); } transGridView.optWidth( true ); int[] selectedItems = transGridView.getSelectionIndices(); if ( selectedItems != null && selectedItems.length > 0 ) { transGridView.setSelection( selectedItems ); } // transGridView.getTable().setTopIndex(topIdx); if ( transGridView.getTable().getTopIndex() != topIdx ) { transGridView.getTable().setTopIndex( topIdx ); } } else { // We need at least one table-item in a table! if ( table.getItemCount() == 0 ) { new TableItem( table, SWT.NONE ); } } refresh_busy = false; } private void updateRowFromBaseStep( StepInterface baseStep, TableItem row ) { StepStatus stepStatus = new StepStatus( baseStep ); String[] fields = stepStatus.getTransLogFields(); updateCellsIfChanged( fields, row ); // Error lines should appear in red: if ( baseStep.getErrors() > 0 ) { row.setBackground( GUIResource.getInstance().getColorRed() ); } else { row.setBackground( GUIResource.getInstance().getColorWhite() ); } } private boolean showSelected( List<StepMeta> selectedSteps, StepInterface baseStep ) { // See if the step is selected & in need of display boolean showSelected; if ( showSelectedSteps ) { if ( selectedSteps.size() == 0 ) { showSelected = true; } else { showSelected = false; for ( StepMeta stepMeta : selectedSteps ) { if ( baseStep.getStepMeta().equals( stepMeta ) ) { showSelected = true; break; } } } } else { showSelected = true; } return showSelected; } /** * Anti-flicker: if nothing has changed, don't change it on the screen! * * @param fields * @param row */ private void updateCellsIfChanged( String[] fields, TableItem row ) { for ( int f = 1; f < fields.length; f++ ) { if ( !fields[ f ].equalsIgnoreCase( row.getText( f ) ) ) { row.setText( f, fields[ f ] ); } } } public CTabItem getTransGridTab() { return transGridTab; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getData() */ public Object getData() { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getName() */ public String getName() { return "transgrid"; } /** * Sub Step Compare * * Note - nulls must be handled outside of this method * * @param o1 - First object to compare * @param o2 - Second object to compare * @return 0 if equal, integer greater than 0 if o1 > o2, integer less than 0 if o2 > o1 */ static int subStepCompare( Object o1, Object o2 ) { final String[] string1 = o1.toString().split( "\\." ); final String[] string2 = o2.toString().split( "\\." ); //Compare the base step first int cmp = Integer.compare( Integer.parseInt( string1[ 0 ] ), Integer.parseInt( string2[ 0 ] ) ); //if the base step numbers are equal, then we need to compare the sub step numbers if ( cmp == 0 ) { if ( string1.length == 2 && string2.length == 2 ) { //compare the sub step numbers cmp = Integer.compare( Integer.parseInt( string1[ 1 ] ), Integer.parseInt( string2[ 1 ] ) ); } else if ( string1.length < string2.length ) { cmp = -1; } else if ( string2.length < string1.length ) { cmp = 1; } } return cmp; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getXulDomContainer() */ public XulDomContainer getXulDomContainer() { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setData(java.lang.Object) */ public void setData( Object data ) { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setName(java.lang.String) */ public void setName( String name ) { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setXulDomContainer(org.pentaho.ui.xul.XulDomContainer) */ public void setXulDomContainer( XulDomContainer xulDomContainer ) { // TODO Auto-generated method stub } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.MoreStrings; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.collect.Table; import java.nio.file.Path; import java.util.Collection; import java.util.Map; import java.util.Set; public class ResourceFilters { /** Utility class: do not instantiate. */ private ResourceFilters() {} /** * The set of supported directories in resource folders. This is defined in * http://developer.android.com/guide/topics/resources/providing-resources.html#table1 */ @VisibleForTesting public static final ImmutableSet<String> SUPPORTED_RESOURCE_DIRECTORIES = ImmutableSet.of( "animator", "anim", "color", "drawable", "mipmap", "layout", "menu", "raw", "values", "xml"); /** * Represents the names and values of valid densities for resources as defined in * http://developer.android.com/guide/topics/resources/providing-resources.html#DensityQualifier */ public enum Density { // Note: ordering here matters and must be increasing by number! LDPI("ldpi", 120.0), NO_QUALIFIER("", 160.0), MDPI("mdpi", 160.0), TVDPI("tvdpi", 213.0), HDPI("hdpi", 240.0), XHDPI("xhdpi", 320.0), XXHDPI("xxhdpi", 480.0), XXXHDPI("xxxhdpi", 640.0); private final String qualifier; private final double value; public static final Ordering<Density> ORDERING = Ordering.natural(); Density(String qualifier, double value) { this.qualifier = qualifier; this.value = value; } public double value() { return value; } @Override public String toString() { return qualifier; } public static Density from(String s) { return s.isEmpty() ? NO_QUALIFIER : valueOf(s.toUpperCase()); } public static boolean isDensity(String s) { for (Density choice : values()) { if (choice.toString().equals(s)) { return true; } } return false; } } public static class Qualifiers { /** e.g. "xhdpi" */ public final ResourceFilters.Density density; /** e.g. "de-v11" */ public final String others; /** * Creates a Qualfiers given the Path to a resource folder, pulls out the density filters and * leaves the rest. */ public static Qualifiers from(Path path) { ResourceFilters.Density density = Density.NO_QUALIFIER; StringBuilder othersBuilder = new StringBuilder(); String parts[] = path.getFileName().toString().split("-"); Preconditions.checkState(parts.length > 0); Preconditions.checkState(SUPPORTED_RESOURCE_DIRECTORIES.contains(parts[0])); for (int i = 1; i < parts.length; i++) { String qualifier = parts[i]; if (ResourceFilters.Density.isDensity(qualifier)) { density = Density.from(qualifier); } else { othersBuilder.append((MoreStrings.isEmpty(othersBuilder) ? "" : "-") + qualifier); } } return new Qualifiers(density, othersBuilder.toString()); } private Qualifiers(Density density, String others) { this.density = density; this.others = others; } } /** * Takes a list of image files (as paths), and a target density (mdpi, hdpi, xhdpi), and * returns a list of files which can be safely left out when building an APK for phones with that * screen density. That APK will run on other screens as well but look worse due to scaling. * <p> * Each combination of non-density qualifiers is processed separately. For example, if we have * {@code drawable-hdpi, drawable-mdpi, drawable-xhdpi, drawable-hdpi-ro}, for a target of {@code * mdpi}, we'll be keeping {@code drawable-mdpi, drawable-hdpi-ro}. * @param candidates list of paths to image files * @param targetDensities densities we want to keep * @param canDownscale do we have access to an image scaler * @return set of files to remove */ @VisibleForTesting static ImmutableSet<Path> filterByDensity( Collection<Path> candidates, Set<ResourceFilters.Density> targetDensities, boolean canDownscale) { ImmutableSet.Builder<Path> removals = ImmutableSet.builder(); Table<String, Density, Path> imageValues = HashBasedTable.create(); // Create mappings for drawables. If candidate == "<base>/drawable-<dpi>-<other>/<filename>", // then we'll record a mapping of the form ("<base>/<filename>/<other>", "<dpi>") -> candidate. // For example: // mdpi hdpi // -------------------------------------------------------------------- // key: res/some.png/ | res/drawable-mdpi/some.png res/drawable-hdpi/some.png // key: res/some.png/fr | res/drawable-fr-hdpi/some.png for (Path candidate : candidates) { Qualifiers qualifiers = Qualifiers.from(candidate.getParent()); String filename = candidate.getFileName().toString(); Density density = qualifiers.density; String resDirectory = candidate.getParent().getParent().toString(); String key = String.format("%s/%s/%s", resDirectory, filename, qualifiers.others); imageValues.put(key, density, candidate); } for (String key : imageValues.rowKeySet()) { Map<Density, Path> options = imageValues.row(key); Set<Density> available = options.keySet(); // This is to make sure we preserve the existing structure of drawable/ files. Set<Density> targets = targetDensities; if (available.contains(Density.NO_QUALIFIER) && !available.contains(Density.MDPI)) { targets = Sets.newHashSet(Iterables.transform(targetDensities, input -> (input == Density.MDPI) ? Density.NO_QUALIFIER : input)); } // We intend to keep all available targeted densities. Set<Density> toKeep = Sets.newHashSet(Sets.intersection(available, targets)); // Make sure we have a decent fit for the largest target density. Density largestTarget = Density.ORDERING.max(targets); if (!available.contains(largestTarget)) { Density fallback = null; // Downscaling nine-patch drawables would require extra logic, not doing that yet. if (canDownscale && !options.values().iterator().next().toString().endsWith(".9.png")) { // Highest possible quality, because we'll downscale it. fallback = Density.ORDERING.max(available); } else { // We want to minimize size, so we'll go for the smallest available density that's // still larger than the missing one and, missing that, for the largest available. for (Density candidate : Density.ORDERING.reverse().sortedCopy(available)) { if (fallback == null || Density.ORDERING.compare(candidate, largestTarget) > 0) { fallback = candidate; } } } toKeep.add(fallback); } // Mark remaining densities for removal. for (Density density : Sets.difference(available, toKeep)) { removals.add(options.get(density)); } } return removals.build(); } /** * Given a list of paths of available drawables, and a target screen density, returns a * {@link com.google.common.base.Predicate} that fails for drawables of a different * density, whenever they can be safely removed. * @param candidates list of available drawables * @param targetDensities set of e.g. {@code "mdpi"}, {@code "ldpi"} etc. * @param canDownscale if no exact match is available, retain the highest quality * @return a predicate as above */ public static Predicate<Path> createImageDensityFilter( Collection<Path> candidates, Set<ResourceFilters.Density> targetDensities, boolean canDownscale) { final Set<Path> pathsToRemove = filterByDensity(candidates, targetDensities, canDownscale); return path -> !pathsToRemove.contains(path); } private static String getResourceType(Path resourceFolder) { String parts[] = resourceFolder.getFileName().toString().split("-"); return parts[0]; } private static Path getResourceFolder(Path resourceFile) { for (int i = 0; i < resourceFile.getNameCount(); i++) { Path part = resourceFile.getName(i); if (SUPPORTED_RESOURCE_DIRECTORIES.contains(getResourceType(part))) { return resourceFile.subpath(0, i + 1); } } throw new HumanReadableException( "Resource file at %s is not in a valid resource folder. See " + "http://developer.android.com/guide/topics/resources/providing-resources.html#table1 " + "for a list of valid resource folders.", resourceFile); } /** * Given a set of target densities, returns a {@link Predicate} that fails for any non-drawable * resource of a different density. Special consideration exists for the default density * ({@link Density#NO_QUALIFIER} when the target does not exists. */ public static Predicate<Path> createDensityFilter( final ProjectFilesystem filesystem, final Set<Density> targetDensities) { return resourceFile -> { final Path resourceFolder = getResourceFolder(resourceFile); if (resourceFolder.getFileName().toString().startsWith("drawable")) { // Drawables are handled independently, so do not do anything with them. return true; } Density density = Qualifiers.from(resourceFolder).density; // We should include the resource in these situations: // * it is one of the target densities // * this is a "values" resource, which we include the fallback and any targets so we do not // have to parse the XML to determine if there are differences. // * there is no resource at any one of the target densities, and this is the fallback. if (targetDensities.contains(density)) { return true; } if (density.equals(Density.NO_QUALIFIER)) { final String resourceType = getResourceType(resourceFolder); return resourceType.equals("values") || FluentIterable.from(targetDensities).anyMatch(target -> { Path targetResourceFile = resourceFolder .resolveSibling(String.format("%s-%s", resourceType, target)) .resolve(resourceFolder.relativize(resourceFile)); return !filesystem.exists(targetResourceFile); }); } return false; }; } }
/* * Copyright 2011 LMAX Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lmax.disruptor; import com.lmax.disruptor.support.*; import org.junit.Test; import java.util.concurrent.*; /** * <pre> * * Sequence a series of events from multiple publishers going to one event processor. * * +----+ * | P1 |------+ * +----+ | * v * +----+ +-----+ * | P1 |--->| EP1 | * +----+ +-----+ * ^ * +----+ | * | P3 |------+ * +----+ * * * Queue Based: * ============ * * +----+ put * | P1 |------+ * +----+ | * v take * +----+ +====+ +-----+ * | P2 |--->| Q1 |<---| EP1 | * +----+ +====+ +-----+ * ^ * +----+ | * | P3 |------+ * +----+ * * P1 - Publisher 1 * P2 - Publisher 2 * P3 - Publisher 3 * Q1 - Queue 1 * EP1 - EventProcessor 1 * * * Disruptor: * ========== * track to prevent wrap * +--------------------+ * | | * | v * +----+ +====+ +====+ +-----+ * | P1 |--->| RB |<---| SB | | EP1 | * +----+ +====+ +====+ +-----+ * ^ get ^ | * +----+ | | | * | P2 |------+ +---------+ * +----+ | waitFor * | * +----+ | * | P3 |------+ * +----+ * * P1 - Publisher 1 * P2 - Publisher 2 * P3 - Publisher 3 * RB - RingBuffer * SB - SequenceBarrier * EP1 - EventProcessor 1 * * </pre> */ public final class ThreePublisherToOneProcessorSequencedThroughputTest extends AbstractPerfTestQueueVsDisruptor { private static final int NUM_PUBLISHERS = 3; private static final int BUFFER_SIZE = 1024 * 8; private static final long ITERATIONS = 1000L * 1000L * 100L; private final ExecutorService EXECUTOR = Executors.newFixedThreadPool(NUM_PUBLISHERS + 1); private final CyclicBarrier cyclicBarrier = new CyclicBarrier(NUM_PUBLISHERS + 1); /////////////////////////////////////////////////////////////////////////////////////////////// private final BlockingQueue<Long> blockingQueue = new LinkedBlockingQueue<Long>(BUFFER_SIZE); private final ValueAdditionQueueProcessor queueProcessor = new ValueAdditionQueueProcessor(blockingQueue, ((ITERATIONS / NUM_PUBLISHERS) * NUM_PUBLISHERS) - 1L); private final ValueQueuePublisher[] valueQueuePublishers = new ValueQueuePublisher[NUM_PUBLISHERS]; { for (int i = 0; i < NUM_PUBLISHERS; i++) { valueQueuePublishers[i] = new ValueQueuePublisher(cyclicBarrier, blockingQueue, ITERATIONS / NUM_PUBLISHERS); } } /////////////////////////////////////////////////////////////////////////////////////////////// private final RingBuffer<ValueEvent> ringBuffer = new RingBuffer<ValueEvent>(ValueEvent.EVENT_FACTORY, new MultiThreadedClaimStrategy(BUFFER_SIZE), new YieldingWaitStrategy()); private final SequenceBarrier sequenceBarrier = ringBuffer.newBarrier(); private final ValueAdditionEventHandler handler = new ValueAdditionEventHandler(); private final BatchEventProcessor<ValueEvent> batchEventProcessor = new BatchEventProcessor<ValueEvent>(ringBuffer, sequenceBarrier, handler); private final ValuePublisher[] valuePublishers = new ValuePublisher[NUM_PUBLISHERS]; { for (int i = 0; i < NUM_PUBLISHERS; i++) { valuePublishers[i] = new ValuePublisher(cyclicBarrier, ringBuffer, ITERATIONS / NUM_PUBLISHERS); } ringBuffer.setGatingSequences(batchEventProcessor.getSequence()); } /////////////////////////////////////////////////////////////////////////////////////////////// @Override protected int getRequiredProcessorCount() { return 4; } @Test @Override public void shouldCompareDisruptorVsQueues() throws Exception { testImplementations(); } @Override protected long runQueuePass() throws Exception { final CountDownLatch latch = new CountDownLatch(1); queueProcessor.reset(latch); Future[] futures = new Future[NUM_PUBLISHERS]; for (int i = 0; i < NUM_PUBLISHERS; i++) { futures[i] = EXECUTOR.submit(valueQueuePublishers[i]); } Future processorFuture = EXECUTOR.submit(queueProcessor); long start = System.currentTimeMillis(); cyclicBarrier.await(); for (int i = 0; i < NUM_PUBLISHERS; i++) { futures[i].get(); } latch.await(); long opsPerSecond = (ITERATIONS * 1000L) / (System.currentTimeMillis() - start); queueProcessor.halt(); processorFuture.cancel(true); return opsPerSecond; } @Override protected long runDisruptorPass() throws Exception { final CountDownLatch latch = new CountDownLatch(1); handler.reset(latch, batchEventProcessor.getSequence().get() + ((ITERATIONS / NUM_PUBLISHERS) * NUM_PUBLISHERS)); Future[] futures = new Future[NUM_PUBLISHERS]; for (int i = 0; i < NUM_PUBLISHERS; i++) { futures[i] = EXECUTOR.submit(valuePublishers[i]); } EXECUTOR.submit(batchEventProcessor); long start = System.currentTimeMillis(); cyclicBarrier.await(); for (int i = 0; i < NUM_PUBLISHERS; i++) { futures[i].get(); } latch.await(); long opsPerSecond = (ITERATIONS * 1000L) / (System.currentTimeMillis() - start); batchEventProcessor.halt(); return opsPerSecond; } }
package com.frc2013.rmr662.climber; import com.frc2013.rmr662.main.TeleopMode; import com.frc2013.rmr662.system.generic.Component; import com.frc2013.rmr662.wrappers.Button; import com.frc2013.rmr662.wrappers.RMRDigitalInput; import com.frc2013.rmr662.wrappers.RMRJaguar; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * * @author jackson */ public class NewClimber extends Component { private static final int TIME_AT_WHICH_WE_CAN_BE_SATISFIED_THAT_THE_HOOKS_HAVE_STOPPED_BOUNCING = 1000; private static final int STOP_COUNT = 6; // Motor constants private final int MOTOR_CHANNEL = 3; private final double MOTOR_MULTIPLIER = 0.5; private final double MOTOR_UP = 1.0; private final double MOTOR_DOWN = 1.0; // Servo constants private static final int SERVO_CHANNEL = 4; private static final double SERVO_UNLOCK = 1.0; private static final double SERVO_LOCK = 0.0; // Hook input channels private final int LEFT_CHANNEL = 1; private final int RIGHT_CHANNEL = 2; private final int TOP_CHANNEL = 14; private final int BOTTOM_CHANNEL = 12; // button indices private final int MODE_BUTTON = 1; private final int ABORT_BUTTON = 2; private final int SERVO_BUTTON = 3; private final int OPERATOR_CONTROL_AXIS = 3; // Hardware stuff private final RMRDigitalInput left; private final RMRDigitalInput right; private final RMRDigitalInput top; private final RMRDigitalInput bottom; private final RMRJaguar motor; private final Servo servo; private final Joystick xbox; private final Button modeButton; private final Button servoButton; // Last hook states private boolean leftOnLast = false; private boolean rightOnLast = false; private boolean hooksOnLast = false; // Motor state private boolean motorGoingUp = false; // Servo state private boolean servoLocked = false; // Code states private boolean climbing = false; private boolean stopped = false; private boolean isInAutoMode = true; // Hook press counter private int counter = 0; private long lastHookEngageTime = 0; public NewClimber() { motor = new RMRJaguar(MOTOR_CHANNEL, MOTOR_MULTIPLIER); left = new RMRDigitalInput(LEFT_CHANNEL, false); right = new RMRDigitalInput(RIGHT_CHANNEL, false); top = new RMRDigitalInput(TOP_CHANNEL, false); bottom = new RMRDigitalInput(BOTTOM_CHANNEL, false); xbox = new Joystick(TeleopMode.XBOX_JOYSTICK_PORT); modeButton = new Button(xbox, MODE_BUTTON); servo = new Servo(SERVO_CHANNEL); servoButton = new Button(xbox, SERVO_BUTTON); } // ========================== UTILITY METHODS ======================== // Toggle between auto and manual mode private void toggleMode() { isInAutoMode = !isInAutoMode; SmartDashboard.putBoolean("climb_mode_is_auto", isInAutoMode); } private void toggleServoState() { setServoState(!servoLocked); } private void setServoState(boolean locked) { if (locked && !servoLocked) { SmartDashboard.putBoolean("servo_is_locked", true); System.out.println("servo_locked = true"); servo.set(SERVO_LOCK); servoLocked = true; } else if (!locked && servoLocked) { SmartDashboard.putBoolean("servo_is_locked", false); System.out.println("servo_locked = false"); servo.set(SERVO_UNLOCK); servoLocked = false; } } // ========================== MOTOR STUFF ======================= private void updateMotor() { if (motorGoingUp && top.get()) { // Motor is at top motor.set(MOTOR_DOWN); motorGoingUp = false; toggleServoState(); } else if (!motorGoingUp && bottom.get()) { // Motor is at bottom motor.set(MOTOR_UP); motorGoingUp = true; } } // ======================== HOOK COUNTER STUFF ================ // Check if both sides are on private boolean areBothHooksOn() { final boolean leftOn = left.get(); final boolean rightOn = right.get(); if (leftOn != leftOnLast) { SmartDashboard.putBoolean("left_hook_state", leftOn); System.out.println("left = " + leftOn); leftOnLast = leftOn; } if (rightOn != rightOnLast) { SmartDashboard.putBoolean("right_hook_state", rightOn); System.out.println("right = " + rightOn); rightOnLast = rightOn; } return leftOn && rightOn; } private void updateHookCounter() { final boolean hooksOn = areBothHooksOn(); if (!hooksOnLast && hooksOn) { counter++; hooksOnLast = true; lastHookEngageTime = System.currentTimeMillis(); } else if (hooksOnLast && !hooksOn) { if (System.currentTimeMillis() - lastHookEngageTime > TIME_AT_WHICH_WE_CAN_BE_SATISFIED_THAT_THE_HOOKS_HAVE_STOPPED_BOUNCING) { hooksOnLast = false; } } if (counter == STOP_COUNT) { stopped = true; motor.set(0.0); } } // =========================== OPERATOR CONTROL ======================== private void operator() { if (servoButton.wasPressed()) { toggleServoState(); } motor.set(xbox.getRawAxis(OPERATOR_CONTROL_AXIS)); } // ======================== COMPONENT METHODS ===================== protected void update() { if (xbox.getRawButton(ABORT_BUTTON)) { System.out.println("ABORT!"); motor.set(0.0); end(); return; } if (modeButton.wasPressed()) { toggleMode(); System.out.println("Toggle mode. isAutoMode = " + isInAutoMode); } if (!climbing && areBothHooksOn()) { servo.set(SERVO_UNLOCK); // make sure hooks won't break when carriage goes up at very beginning motor.set(MOTOR_UP); motorGoingUp = true; climbing = true; } if (climbing) { if (isInAutoMode && !stopped) { updateMotor(); updateHookCounter(); } else if (! isInAutoMode) { operator(); // Operator control } } } protected void onEnd() { motor.set(0.0); motor.free(); servo.free(); top.free(); bottom.free(); left.free(); right.free(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.codeInspection; import com.intellij.JavaTestUtil; import com.intellij.codeInsight.NullableNotNullManager; import com.intellij.codeInspection.nullable.NullableStuffInspection; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.Disposer; import com.intellij.testFramework.IdeaTestUtil; import com.intellij.testFramework.LightProjectDescriptor; import com.intellij.testFramework.PsiTestUtil; import com.intellij.testFramework.fixtures.DefaultLightProjectDescriptor; import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase; import org.jetbrains.annotations.NotNull; public class NullableStuffInspectionTest extends LightCodeInsightFixtureTestCase { private static final DefaultLightProjectDescriptor PROJECT_DESCRIPTOR = new DefaultLightProjectDescriptor() { @Override public Sdk getSdk() { return PsiTestUtil.addJdkAnnotations(IdeaTestUtil.getMockJdk18()); } }; private NullableStuffInspection myInspection = new NullableStuffInspection(); @NotNull @Override protected LightProjectDescriptor getProjectDescriptor() { return PROJECT_DESCRIPTOR; } @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath() + "/inspection/nullableProblems/"; } private void doTest() { myFixture.enableInspections(myInspection); myFixture.testHighlighting(true, false, true, getTestName(false) + ".java"); } @Override public void setUp() throws Exception { super.setUp(); myInspection.REPORT_ANNOTATION_NOT_PROPAGATED_TO_OVERRIDERS = false; } @Override protected void tearDown() throws Exception { myInspection = null; super.tearDown(); } public void testProblems() { doTest();} public void testAnnotatingPrimitivesTypeUse() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testAnnotatingPrimitivesAmbiguous() { DataFlowInspection8Test.setupAmbiguousAnnotations("withTypeUse", myFixture); doTest(); } public void testProblems2() { doTest(); } public void testNullableFieldNotnullParam() { doTest(); } public void testNotNullFieldNullableParam() { doTest(); } public void testNotNullCustomException() { doTest(); } public void testNotNullFieldNotInitialized() { doTest(); } public void testNotNullFieldInitializedInLambda() { doTest(); } public void testNotNullFieldNotInitializedInOneConstructor() { doTest(); } public void testNotNullFieldNotInitializedSetting() { myInspection.REQUIRE_NOTNULL_FIELDS_INITIALIZED = false; doTest(); } public void testNotNullByDefaultFieldNotInitialized() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); doTest(); } public void testNotNullAnnotationChecksInChildClassMethods() { doTest(); } public void testGetterSetterProblems() { doTest(); } public void testNonTrivialGettersSetters() { doTest(); } public void testOverriddenMethods() { myInspection.REPORT_ANNOTATION_NOT_PROPAGATED_TO_OVERRIDERS = true; doTest(); } public void testOverriddenViaMethodReference() { doTest(); } public void testOverridingExternalNotNull() { doTest(); } public void testIgnoreExternalNotNull() { myInspection.IGNORE_EXTERNAL_SUPER_NOTNULL = true; doTest(); } public void testNotNullParameterOverridesNotAnnotated() { myInspection.REPORT_NOTNULL_PARAMETERS_OVERRIDES_NOT_ANNOTATED = true; doTest(); } public void testHonorSuperParameterDefault() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); DataFlowInspectionTest.addJavaxDefaultNullabilityAnnotations(myFixture); myFixture.addFileToProject("foo/package-info.java", "@javax.annotation.ParametersAreNonnullByDefault package foo;"); myFixture.addClass("package foo; import javax.annotation.*; public interface NullableFunction { void fun(@Nullable Object o); }"); myFixture.addClass("package foo; public interface AnyFunction { void fun(Object o); }"); doTest(); } public void testHonorThisParameterDefault() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); DataFlowInspectionTest.addJavaxDefaultNullabilityAnnotations(myFixture); myFixture.addFileToProject("foo/package-info.java", "@javax.annotation.ParametersAreNonnullByDefault package foo;"); myFixture.configureFromExistingVirtualFile(myFixture.copyFileToProject(getTestName(false) + ".java", "foo/Classes.java")); myFixture.enableInspections(myInspection); myFixture.checkHighlighting(true, false, true); } public void testHonorCustomDefault() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); myFixture.addClass("package foo;" + "import static java.lang.annotation.ElementType.*;" + "@javax.annotation.meta.TypeQualifierDefault({PARAMETER, FIELD, METHOD, LOCAL_VARIABLE}) " + "@javax.annotation.Nonnull " + "public @interface NotNullByDefault {}"); myFixture.addFileToProject("foo/package-info.java", "@NotNullByDefault package foo;"); myFixture.configureFromExistingVirtualFile(myFixture.copyFileToProject(getTestName(false) + ".java", "foo/Classes.java")); myFixture.enableInspections(myInspection); myFixture.checkHighlighting(true, false, true); } public void testOverrideCustomDefault() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); myFixture.addClass("package custom;" + "public @interface CheckForNull {}"); final NullableNotNullManager nnnManager = NullableNotNullManager.getInstance(getProject()); nnnManager.setNullables("custom.CheckForNull"); Disposer.register(myFixture.getTestRootDisposable(), nnnManager::setNullables); myFixture.addClass("package foo;" + "import static java.lang.annotation.ElementType.*;" + "@javax.annotation.meta.TypeQualifierDefault(METHOD) " + "@javax.annotation.Nonnull " + "public @interface ReturnValuesAreNonnullByDefault {}"); myFixture.addFileToProject("foo/package-info.java", "@ReturnValuesAreNonnullByDefault package foo;"); myFixture.configureFromExistingVirtualFile(myFixture.copyFileToProject(getTestName(false) + ".java", "foo/Classes.java")); myFixture.enableInspections(myInspection); myFixture.checkHighlighting(true, false, true); } public void testNullPassedToNotNullParameter() { doTest(); } public void testNullPassedToNotNullConstructorParameter() { doTest(); } public void testNullPassedAsPartNotNullAnnotatedOfVarArg() { doTest(); } public void testHonorParameterDefaultInSetters() { DataFlowInspectionTest.addJavaxNullabilityAnnotations(myFixture); DataFlowInspectionTest.addJavaxDefaultNullabilityAnnotations(myFixture); myFixture.addFileToProject("foo/package-info.java", "@javax.annotation.ParametersAreNonnullByDefault package foo;"); myFixture.configureFromExistingVirtualFile(myFixture.copyFileToProject(getTestName(false) + ".java", "foo/Classes.java")); myFixture.enableInspections(myInspection); myFixture.checkHighlighting(true, false, true); } public void testBeanValidationNotNull() { myFixture.addClass("package javax.annotation.constraints; public @interface NotNull{}"); DataFlowInspection8Test.setCustomAnnotations(getProject(), getTestRootDisposable(), "javax.annotation.constraints.NotNull", "javax.annotation.constraints.Nullable"); myInspection.REPORT_ANNOTATION_NOT_PROPAGATED_TO_OVERRIDERS = true; doTest(); } public void testForeachParameterNullability() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testPassingNullableCollectionWhereNotNullIsExpected() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testPassingNullableMapWhereNotNullIsExpected() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testPassingNullableMapValueWhereNotNullIsExpected() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testNotNullCollectionItemWithNullableSuperType() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testNotNullTypeArgumentWithNullableSuperType() { DataFlowInspection8Test.setupTypeUseAnnotations("typeUse", myFixture); doTest(); } public void testAnnotateQuickFixOnMethodReference() { doTest(); myFixture.launchAction(myFixture.findSingleIntention("Annotate")); myFixture.checkResultByFile(getTestName(false) + "_after.java"); } }
/* * Copyright 2012 Sebastian Annies, Hamburg * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.mp4parser.authoring.builder; import com.coremedia.iso.BoxParser; import com.coremedia.iso.IsoFile; import com.coremedia.iso.IsoTypeWriter; import com.coremedia.iso.boxes.*; import com.googlecode.mp4parser.BasicContainer; import com.googlecode.mp4parser.DataSource; import com.googlecode.mp4parser.authoring.Movie; import com.googlecode.mp4parser.authoring.Sample; import com.googlecode.mp4parser.authoring.Track; import com.googlecode.mp4parser.authoring.tracks.CencEncyprtedTrack; import com.googlecode.mp4parser.boxes.cenc.CencSampleAuxiliaryDataFormat; import com.googlecode.mp4parser.boxes.dece.SampleEncryptionBox; import com.googlecode.mp4parser.util.Path; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import static com.googlecode.mp4parser.util.CastUtils.l2i; /** * Creates a plain MP4 file from a video. Plain as plain can be. */ public class DefaultMp4Builder implements Mp4Builder { Set<StaticChunkOffsetBox> chunkOffsetBoxes = new HashSet<StaticChunkOffsetBox>(); Set<SampleAuxiliaryInformationOffsetsBox> sampleAuxiliaryInformationOffsetsBoxes = new HashSet<SampleAuxiliaryInformationOffsetsBox>(); private static Logger LOG = Logger.getLogger(DefaultMp4Builder.class.getName()); HashMap<Track, List<Sample>> track2Sample = new HashMap<Track, List<Sample>>(); HashMap<Track, long[]> track2SampleSizes = new HashMap<Track, long[]>(); private FragmentIntersectionFinder intersectionFinder; public void setIntersectionFinder(FragmentIntersectionFinder intersectionFinder) { this.intersectionFinder = intersectionFinder; } /** * {@inheritDoc} */ public Container build(Movie movie) { if (intersectionFinder == null) { intersectionFinder = new TwoSecondIntersectionFinder(movie, 2); } LOG.fine("Creating movie " + movie); for (Track track : movie.getTracks()) { // getting the samples may be a time consuming activity List<Sample> samples = track.getSamples(); putSamples(track, samples); long[] sizes = new long[samples.size()]; for (int i = 0; i < sizes.length; i++) { Sample b = samples.get(i); sizes[i] = b.getSize(); } track2SampleSizes.put(track, sizes); } BasicContainer isoFile = new BasicContainer(); isoFile.addBox(createFileTypeBox(movie)); Map<Track, int[]> chunks = new HashMap<Track, int[]>(); for (Track track : movie.getTracks()) { chunks.put(track, getChunkSizes(track, movie)); } Box moov = createMovieBox(movie, chunks); isoFile.addBox(moov); List<Box> stszs = Path.getPaths(moov, "trak/mdia/minf/stbl/stsz"); long contentSize = 0; for (Box stsz : stszs) { contentSize += sum(((SampleSizeBox) stsz).getSampleSizes()); } InterleaveChunkMdat mdat = new InterleaveChunkMdat(movie, chunks, contentSize); isoFile.addBox(mdat); /* dataOffset is where the first sample starts. In this special mdat the samples always start at offset 16 so that we can use the same offset for large boxes and small boxes */ long dataOffset = mdat.getDataOffset(); for (StaticChunkOffsetBox chunkOffsetBox : chunkOffsetBoxes) { long[] offsets = chunkOffsetBox.getChunkOffsets(); for (int i = 0; i < offsets.length; i++) { offsets[i] += dataOffset; } } for (SampleAuxiliaryInformationOffsetsBox saio : sampleAuxiliaryInformationOffsetsBoxes) { long offset = saio.getSize(); // the calculation is systematically wrong by 4, I don't want to debug why. Just a quick correction --san 14.May.13 offset += 4 + 4 + 4 + 4 + 4 + 24 ; // size of all header we were missing otherwise (moov, trak, mdia, minf, stbl) Object b = saio; do { Object current = b; b = ((Box)b).getParent(); for (Box box : ((Container)b).getBoxes()) { if (box == current) { break; } offset += box.getSize(); } } while (b instanceof Box); long[] saioOffsets = saio.getOffsets(); for (int i = 0; i < saioOffsets.length; i++) { saioOffsets[i] = saioOffsets[i] + offset; } saio.setOffsets(saioOffsets); } return isoFile; } protected List<Sample> putSamples(Track track, List<Sample> samples) { return track2Sample.put(track, samples); } protected FileTypeBox createFileTypeBox(Movie movie) { List<String> minorBrands = new LinkedList<String>(); minorBrands.add("isom"); minorBrands.add("iso2"); minorBrands.add("avc1"); return new FileTypeBox("isom", 0, minorBrands); } protected MovieBox createMovieBox(Movie movie, Map<Track, int[]> chunks) { MovieBox movieBox = new MovieBox(); MovieHeaderBox mvhd = new MovieHeaderBox(); mvhd.setCreationTime(new Date()); mvhd.setModificationTime(new Date()); mvhd.setMatrix(movie.getMatrix()); long movieTimeScale = getTimescale(movie); long duration = 0; for (Track track : movie.getTracks()) { long tracksDuration = track.getDuration() * movieTimeScale / track.getTrackMetaData().getTimescale(); if (tracksDuration > duration) { duration = tracksDuration; } } mvhd.setDuration(duration); mvhd.setTimescale(movieTimeScale); // find the next available trackId long nextTrackId = 0; for (Track track : movie.getTracks()) { nextTrackId = nextTrackId < track.getTrackMetaData().getTrackId() ? track.getTrackMetaData().getTrackId() : nextTrackId; } mvhd.setNextTrackId(++nextTrackId); movieBox.addBox(mvhd); for (Track track : movie.getTracks()) { movieBox.addBox(createTrackBox(track, movie, chunks)); } // metadata here Box udta = createUdta(movie); if (udta != null) { movieBox.addBox(udta); } return movieBox; } /** * Override to create a user data box that may contain metadata. * * @param movie source movie * @return a 'udta' box or <code>null</code> if none provided */ protected Box createUdta(Movie movie) { return null; } protected TrackBox createTrackBox(Track track, Movie movie, Map<Track, int[]> chunks) { TrackBox trackBox = new TrackBox(); TrackHeaderBox tkhd = new TrackHeaderBox(); tkhd.setEnabled(true); tkhd.setInMovie(true); tkhd.setInPreview(true); tkhd.setInPoster(true); tkhd.setMatrix(track.getTrackMetaData().getMatrix()); tkhd.setAlternateGroup(track.getTrackMetaData().getGroup()); tkhd.setCreationTime(track.getTrackMetaData().getCreationTime()); // We need to take edit list box into account in trackheader duration // but as long as I don't support edit list boxes it is sufficient to // just translate media duration to movie timescale tkhd.setDuration(track.getDuration() * getTimescale(movie) / track.getTrackMetaData().getTimescale()); tkhd.setHeight(track.getTrackMetaData().getHeight()); tkhd.setWidth(track.getTrackMetaData().getWidth()); tkhd.setLayer(track.getTrackMetaData().getLayer()); tkhd.setModificationTime(new Date()); tkhd.setTrackId(track.getTrackMetaData().getTrackId()); tkhd.setVolume(track.getTrackMetaData().getVolume()); trackBox.addBox(tkhd); /* EditBox edit = new EditBox(); EditListBox editListBox = new EditListBox(); editListBox.setEntries(Collections.singletonList( new EditListBox.Entry(editListBox, (long) (track.getTrackMetaData().getStartTime() * getTimescale(movie)), -1, 1))); edit.addBox(editListBox); trackBox.addBox(edit); */ MediaBox mdia = new MediaBox(); trackBox.addBox(mdia); MediaHeaderBox mdhd = new MediaHeaderBox(); mdhd.setCreationTime(track.getTrackMetaData().getCreationTime()); mdhd.setDuration(track.getDuration()); mdhd.setTimescale(track.getTrackMetaData().getTimescale()); mdhd.setLanguage(track.getTrackMetaData().getLanguage()); mdia.addBox(mdhd); HandlerBox hdlr = new HandlerBox(); mdia.addBox(hdlr); hdlr.setHandlerType(track.getHandler()); MediaInformationBox minf = new MediaInformationBox(); if (track.getHandler().equals("vide")) { minf.addBox(new VideoMediaHeaderBox()); } else if (track.getHandler().equals("soun")) { minf.addBox(new SoundMediaHeaderBox()); } else if (track.getHandler().equals("text")) { minf.addBox(new NullMediaHeaderBox()); } else if (track.getHandler().equals("subt")) { minf.addBox(new SubtitleMediaHeaderBox()); } else if (track.getHandler().equals("hint")) { minf.addBox(new HintMediaHeaderBox()); } else if (track.getHandler().equals("sbtl")) { minf.addBox(new NullMediaHeaderBox()); } // dinf: all these three boxes tell us is that the actual // data is in the current file and not somewhere external DataInformationBox dinf = new DataInformationBox(); DataReferenceBox dref = new DataReferenceBox(); dinf.addBox(dref); DataEntryUrlBox url = new DataEntryUrlBox(); url.setFlags(1); dref.addBox(url); minf.addBox(dinf); // Box stbl = createStbl(track, movie, chunks); minf.addBox(stbl); mdia.addBox(minf); return trackBox; } protected Box createStbl(Track track, Movie movie, Map<Track, int[]> chunks) { SampleTableBox stbl = new SampleTableBox(); createStsd(track, stbl); createStts(track, stbl); createCtts(track, stbl); createStss(track, stbl); createSdtp(track, stbl); createStsc(track, chunks, stbl); createStsz(track, stbl); createStco(track, movie, chunks, stbl); if (track instanceof CencEncyprtedTrack) { createCencBoxes((CencEncyprtedTrack) track, stbl, chunks.get(track)); } return stbl; } protected void createCencBoxes(CencEncyprtedTrack track, SampleTableBox stbl, int[] chunkSizes) { SampleAuxiliaryInformationSizesBox saiz = new SampleAuxiliaryInformationSizesBox(); saiz.setAuxInfoType("cenc"); saiz.setFlags(1); List<CencSampleAuxiliaryDataFormat> sampleEncryptionEntries = track.getSampleEncryptionEntries(); if (track.hasSubSampleEncryption()) { short[] sizes = new short[sampleEncryptionEntries.size()]; for (int i = 0; i < sizes.length; i++) { sizes[i] = (short) sampleEncryptionEntries.get(i).getSize(); } saiz.setSampleInfoSizes(sizes); } else { saiz.setDefaultSampleInfoSize(8); // 8 bytes iv saiz.setSampleCount(track.getSamples().size()); } SampleAuxiliaryInformationOffsetsBox saio = new SampleAuxiliaryInformationOffsetsBox(); SampleEncryptionBox senc = new SampleEncryptionBox(); senc.setSubSampleEncryption(track.hasSubSampleEncryption()); senc.setEntries(sampleEncryptionEntries); long offset = senc.getOffsetToFirstIV(); int index = 0; long[] offsets = new long[track.getSamples().size()]; for (int i = 0; i < chunkSizes.length; i++) { offsets[i] = offset; for (int j = 0; j < chunkSizes[i]; j++){ offset += sampleEncryptionEntries.get(index++).getSize(); } } saio.setOffsets(offsets); stbl.addBox(saiz); stbl.addBox(saio); stbl.addBox(senc); sampleAuxiliaryInformationOffsetsBoxes.add(saio); } protected void createStsd(Track track, SampleTableBox stbl) { stbl.addBox(track.getSampleDescriptionBox()); } protected void createStco(Track track, Movie movie, Map<Track, int[]> chunks, SampleTableBox stbl) { int[] tracksChunkSizes = chunks.get(track); // The ChunkOffsetBox we create here is just a stub // since we haven't created the whole structure we can't tell where the // first chunk starts (mdat box). So I just let the chunk offset // start at zero and I will add the mdat offset later. StaticChunkOffsetBox stco = new StaticChunkOffsetBox(); this.chunkOffsetBoxes.add(stco); long offset = 0; long[] chunkOffset = new long[tracksChunkSizes.length]; // all tracks have the same number of chunks if (LOG.isLoggable(Level.FINE)) { LOG.fine("Calculating chunk offsets for track_" + track.getTrackMetaData().getTrackId()); } for (int i = 0; i < tracksChunkSizes.length; i++) { // The filelayout will be: // chunk_1_track_1,... ,chunk_1_track_n, chunk_2_track_1,... ,chunk_2_track_n, ... , chunk_m_track_1,... ,chunk_m_track_n // calculating the offsets if (LOG.isLoggable(Level.FINER)) { LOG.finer("Calculating chunk offsets for track_" + track.getTrackMetaData().getTrackId() + " chunk " + i); } for (Track current : movie.getTracks()) { if (LOG.isLoggable(Level.FINEST)) { LOG.finest("Adding offsets of track_" + current.getTrackMetaData().getTrackId()); } int[] chunkSizes = chunks.get(current); long firstSampleOfChunk = 0; for (int j = 0; j < i; j++) { firstSampleOfChunk += chunkSizes[j]; } if (current == track) { chunkOffset[i] = offset; } for (int j = l2i(firstSampleOfChunk); j < firstSampleOfChunk + chunkSizes[i]; j++) { offset += track2SampleSizes.get(current)[j]; } } } stco.setChunkOffsets(chunkOffset); stbl.addBox(stco); } protected void createStsz(Track track, SampleTableBox stbl) { SampleSizeBox stsz = new SampleSizeBox(); stsz.setSampleSizes(track2SampleSizes.get(track)); stbl.addBox(stsz); } protected void createStsc(Track track, Map<Track, int[]> chunks, SampleTableBox stbl) { int[] tracksChunkSizes = chunks.get(track); SampleToChunkBox stsc = new SampleToChunkBox(); stsc.setEntries(new LinkedList<SampleToChunkBox.Entry>()); long lastChunkSize = Integer.MIN_VALUE; // to be sure the first chunks hasn't got the same size for (int i = 0; i < tracksChunkSizes.length; i++) { // The sample description index references the sample description box // that describes the samples of this chunk. My Tracks cannot have more // than one sample description box. Therefore 1 is always right // the first chunk has the number '1' if (lastChunkSize != tracksChunkSizes[i]) { stsc.getEntries().add(new SampleToChunkBox.Entry(i + 1, tracksChunkSizes[i], 1)); lastChunkSize = tracksChunkSizes[i]; } } stbl.addBox(stsc); } protected void createSdtp(Track track, SampleTableBox stbl) { if (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty()) { SampleDependencyTypeBox sdtp = new SampleDependencyTypeBox(); sdtp.setEntries(track.getSampleDependencies()); stbl.addBox(sdtp); } } protected void createStss(Track track, SampleTableBox stbl) { long[] syncSamples = track.getSyncSamples(); if (syncSamples != null && syncSamples.length > 0) { SyncSampleBox stss = new SyncSampleBox(); stss.setSampleNumber(syncSamples); stbl.addBox(stss); } } protected void createCtts(Track track, SampleTableBox stbl) { List<CompositionTimeToSample.Entry> compositionTimeToSampleEntries = track.getCompositionTimeEntries(); if (compositionTimeToSampleEntries != null && !compositionTimeToSampleEntries.isEmpty()) { CompositionTimeToSample ctts = new CompositionTimeToSample(); ctts.setEntries(compositionTimeToSampleEntries); stbl.addBox(ctts); } } protected void createStts(Track track, SampleTableBox stbl) { TimeToSampleBox.Entry lastEntry = null; List<TimeToSampleBox.Entry> entries = new ArrayList<TimeToSampleBox.Entry>(); for (long delta : track.getSampleDurations()) { if (lastEntry != null && lastEntry.getDelta() == delta) { lastEntry.setCount(lastEntry.getCount() + 1); } else { lastEntry = new TimeToSampleBox.Entry(1, delta); entries.add(lastEntry); } } TimeToSampleBox stts = new TimeToSampleBox(); stts.setEntries(entries); stbl.addBox(stts); } private class InterleaveChunkMdat implements Box { List<Track> tracks; List<List<Sample>> chunkList = new ArrayList<List<Sample>>(); Container parent; long contentSize; public Container getParent() { return parent; } public long getOffset() { throw new RuntimeException("Doesn't have any meaning for programmatically created boxes"); } public void setParent(Container parent) { this.parent = parent; } public void parse(DataSource dataSource, ByteBuffer header, long contentSize, BoxParser boxParser) throws IOException { } private InterleaveChunkMdat(Movie movie, Map<Track, int[]> chunks, long contentSize) { this.contentSize = contentSize; this.tracks = movie.getTracks(); for (int i = 0; i < chunks.values().iterator().next().length; i++) { for (Track track : tracks) { int[] chunkSizes = chunks.get(track); long firstSampleOfChunk = 0; for (int j = 0; j < i; j++) { firstSampleOfChunk += chunkSizes[j]; } List<Sample> chunk = DefaultMp4Builder.this.track2Sample.get(track).subList(l2i(firstSampleOfChunk), l2i(firstSampleOfChunk + chunkSizes[i])); chunkList.add(chunk); } } } public long getDataOffset() { Object b = this; long offset = 16; while (b instanceof Box) { for (Box box : ((Box) b).getParent().getBoxes()) { if (b == box) { break; } offset += box.getSize(); } b = ((Box) b).getParent(); } return offset; } public String getType() { return "mdat"; } public long getSize() { return 16 + contentSize; } private boolean isSmallBox(long contentSize) { return (contentSize + 8) < 4294967296L; } public void getBox(WritableByteChannel writableByteChannel) throws IOException { ByteBuffer bb = ByteBuffer.allocate(16); long size = getSize(); if (isSmallBox(size)) { IsoTypeWriter.writeUInt32(bb, size); } else { IsoTypeWriter.writeUInt32(bb, 1); } bb.put(IsoFile.fourCCtoBytes("mdat")); if (isSmallBox(size)) { bb.put(new byte[8]); } else { IsoTypeWriter.writeUInt64(bb, size); } bb.rewind(); writableByteChannel.write(bb); for (List<Sample> samples : chunkList) { for (Sample sample : samples) { sample.writeTo(writableByteChannel); } } } } /** * Gets the chunk sizes for the given track. * * @param track * @param movie * @return */ int[] getChunkSizes(Track track, Movie movie) { long[] referenceChunkStarts = intersectionFinder.sampleNumbers(track); int[] chunkSizes = new int[referenceChunkStarts.length]; for (int i = 0; i < referenceChunkStarts.length; i++) { long start = referenceChunkStarts[i] - 1; long end; if (referenceChunkStarts.length == i + 1) { end = track.getSamples().size(); } else { end = referenceChunkStarts[i + 1] - 1; } chunkSizes[i] = l2i(end - start); // The Stretch makes sure that there are as much audio and video chunks! } assert DefaultMp4Builder.this.track2Sample.get(track).size() == sum(chunkSizes) : "The number of samples and the sum of all chunk lengths must be equal"; return chunkSizes; } private static long sum(int[] ls) { long rc = 0; for (long l : ls) { rc += l; } return rc; } private static long sum(long[] ls) { long rc = 0; for (long l : ls) { rc += l; } return rc; } public long getTimescale(Movie movie) { long timescale = movie.getTracks().iterator().next().getTrackMetaData().getTimescale(); for (Track track : movie.getTracks()) { timescale = gcd(track.getTrackMetaData().getTimescale(), timescale); } return timescale; } public static long gcd(long a, long b) { if (b == 0) { return a; } return gcd(b, a % b); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.actionSystem.impl; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.ActionManagerEx; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.actionSystem.impl.actionholder.ActionRef; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.TransactionGuard; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.SizedIcon; import com.intellij.ui.components.JBCheckBoxMenuItem; import com.intellij.ui.plaf.beg.BegMenuItemUI; import com.intellij.ui.plaf.gtk.GtkMenuItemUI; import com.intellij.util.PlatformIcons; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.plaf.MenuItemUI; import javax.swing.plaf.synth.SynthMenuItemUI; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.HashSet; import java.util.Set; import static com.intellij.openapi.keymap.KeymapUtil.getActiveKeymapShortcuts; public class ActionMenuItem extends JBCheckBoxMenuItem { private static final Icon ourCheckedIcon = JBUI.scale(new SizedIcon(PlatformIcons.CHECK_ICON, 18, 18)); private static final Icon ourUncheckedIcon = EmptyIcon.ICON_18; private final ActionRef<AnAction> myAction; private final Presentation myPresentation; private final String myPlace; private final boolean myInsideCheckedGroup; private final boolean myEnableMnemonics; private final boolean myToggleable; private DataContext myContext; private AnActionEvent myEvent; private MenuItemSynchronizer myMenuItemSynchronizer; private boolean myToggled; public ActionMenuItem(final AnAction action, final Presentation presentation, @NotNull final String place, @NotNull DataContext context, final boolean enableMnemonics, final boolean prepareNow, final boolean insideCheckedGroup) { myAction = ActionRef.fromAction(action); myPresentation = presentation; myPlace = place; myContext = context; myEnableMnemonics = enableMnemonics; myToggleable = action instanceof Toggleable; myInsideCheckedGroup = insideCheckedGroup; myEvent = new AnActionEvent(null, context, place, myPresentation, ActionManager.getInstance(), 0, true, false); addActionListener(new ActionTransmitter()); setBorderPainted(false); updateUI(); if (prepareNow) { init(); } else { setText("loading..."); } } private static boolean isEnterKeyStroke(KeyStroke keyStroke) { return keyStroke.getKeyCode() == KeyEvent.VK_ENTER && keyStroke.getModifiers() == 0; } public void prepare() { init(); installSynchronizer(); } /** * We have to make this method public to allow BegMenuItemUI to invoke it. */ @Override public void fireActionPerformed(ActionEvent event) { TransactionGuard.submitTransaction(ApplicationManager.getApplication(), () -> super.fireActionPerformed(event)); } @Override public void addNotify() { super.addNotify(); installSynchronizer(); init(); } @Override public void removeNotify() { uninstallSynchronizer(); super.removeNotify(); } private void installSynchronizer() { if (myMenuItemSynchronizer == null) { myMenuItemSynchronizer = new MenuItemSynchronizer(); } } private void uninstallSynchronizer() { if (myMenuItemSynchronizer != null) { Disposer.dispose(myMenuItemSynchronizer); myMenuItemSynchronizer = null; } } private void init() { setVisible(myPresentation.isVisible()); setEnabled(myPresentation.isEnabled()); setMnemonic(myEnableMnemonics ? myPresentation.getMnemonic() : 0); setText(myPresentation.getText()); final int mnemonicIndex = myEnableMnemonics ? myPresentation.getDisplayedMnemonicIndex() : -1; if (getText() != null && mnemonicIndex >= 0 && mnemonicIndex < getText().length()) { setDisplayedMnemonicIndex(mnemonicIndex); } AnAction action = myAction.getAction(); updateIcon(action); String id = ActionManager.getInstance().getId(action); if (id != null) { setAcceleratorFromShortcuts(getActiveKeymapShortcuts(id).getShortcuts()); } else { final ShortcutSet shortcutSet = action.getShortcutSet(); if (shortcutSet != null) { setAcceleratorFromShortcuts(shortcutSet.getShortcuts()); } } } private void setAcceleratorFromShortcuts(@NotNull Shortcut[] shortcuts) { for (Shortcut shortcut : shortcuts) { if (shortcut instanceof KeyboardShortcut) { final KeyStroke firstKeyStroke = ((KeyboardShortcut)shortcut).getFirstKeyStroke(); //If action has Enter shortcut, do not add it. Otherwise, user won't be able to chose any ActionMenuItem other than that if (!isEnterKeyStroke(firstKeyStroke)) { setAccelerator(firstKeyStroke); } break; } } } @Override public void updateUI() { if (UIUtil.isStandardMenuLAF()) { super.updateUI(); } else { setUI(BegMenuItemUI.createUI(this)); } } @Override public void setUI(MenuItemUI ui) { MenuItemUI newUi = UIUtil.isUnderGTKLookAndFeel() && ui instanceof SynthMenuItemUI ? new GtkMenuItemUI((SynthMenuItemUI)ui) : ui; super.setUI(newUi); } /** * Updates long description of action at the status bar. */ @Override public void menuSelectionChanged(boolean isIncluded) { super.menuSelectionChanged(isIncluded); ActionMenu.showDescriptionInStatusBar(isIncluded, this, myPresentation.getDescription()); } public String getFirstShortcutText() { return KeymapUtil.getFirstKeyboardShortcutText(myAction.getAction()); } public void updateContext(@NotNull DataContext context) { myContext = context; myEvent = new AnActionEvent(null, context, myPlace, myPresentation, ActionManager.getInstance(), 0, true, false); } private void updateIcon(AnAction action) { if (isToggleable() && (myPresentation.getIcon() == null || myInsideCheckedGroup || !UISettings.getInstance().getShowIconsInMenus())) { action.update(myEvent); myToggled = Boolean.TRUE.equals(myEvent.getPresentation().getClientProperty(Toggleable.SELECTED_PROPERTY)); if (ActionPlaces.MAIN_MENU.equals(myPlace) && SystemInfo.isMacSystemMenu || UIUtil.isUnderNimbusLookAndFeel() || UIUtil.isUnderWindowsLookAndFeel() && SystemInfo.isWin7OrNewer) { setState(myToggled); } else if (!(getUI() instanceof GtkMenuItemUI)) { if (myToggled) { setIcon(ourCheckedIcon); setDisabledIcon(IconLoader.getDisabledIcon(ourCheckedIcon)); } else { setIcon(ourUncheckedIcon); setDisabledIcon(IconLoader.getDisabledIcon(ourUncheckedIcon)); } } } else { if (UISettings.getInstance().getShowIconsInMenus()) { Icon icon = myPresentation.getIcon(); if (action instanceof ToggleAction && ((ToggleAction)action).isSelected(myEvent)) { icon = new PoppedIcon(icon, 16, 16); } setIcon(icon); if (myPresentation.getDisabledIcon() != null) { setDisabledIcon(myPresentation.getDisabledIcon()); } else { setDisabledIcon(IconLoader.getDisabledIcon(icon)); } } } } @Override public void setIcon(Icon icon) { if (SystemInfo.isMacSystemMenu && ActionPlaces.MAIN_MENU.equals(myPlace)) { if (icon instanceof IconLoader.LazyIcon) { // [tav] JDK can't paint correctly our HiDPI icons at the system menu bar icon = ((IconLoader.LazyIcon)icon).inOriginalScale(); } } super.setIcon(icon); } public boolean isToggleable() { return myToggleable; } @Override public boolean isSelected() { return myToggled; } private final class ActionTransmitter implements ActionListener { /** * @param component component * @return whether the component in Swing tree or not. This method is more * weak then {@link Component#isShowing() } */ private boolean isInTree(final Component component) { if (component instanceof Window) { return component.isShowing(); } else { Window windowAncestor = SwingUtilities.getWindowAncestor(component); return windowAncestor != null && windowAncestor.isShowing(); } } @Override public void actionPerformed(final ActionEvent e) { final IdeFocusManager fm = IdeFocusManager.findInstanceByContext(myContext); final ActionCallback typeAhead = new ActionCallback(); final String id = ActionManager.getInstance().getId(myAction.getAction()); if (id != null) { FeatureUsageTracker.getInstance().triggerFeatureUsed("context.menu.click.stats." + id.replace(' ', '.')); } fm.typeAheadUntil(typeAhead, getText()); fm.runOnOwnContext(myContext, () -> { final AnActionEvent event = new AnActionEvent( new MouseEvent(ActionMenuItem.this, MouseEvent.MOUSE_PRESSED, 0, e.getModifiers(), getWidth() / 2, getHeight() / 2, 1, false), myContext, myPlace, myPresentation, ActionManager.getInstance(), e.getModifiers(), true, false ); final AnAction menuItemAction = myAction.getAction(); if (ActionUtil.lastUpdateAndCheckDumb(menuItemAction, event, false)) { ActionManagerEx actionManager = ActionManagerEx.getInstanceEx(); actionManager.fireBeforeActionPerformed(menuItemAction, myContext, event); fm.doWhenFocusSettlesDown(typeAhead::setDone); ActionUtil.performActionDumbAware(menuItemAction, event); actionManager.queueActionPerformedEvent(menuItemAction, myContext, event); } else { typeAhead.setDone(); } }); } } private final class MenuItemSynchronizer implements PropertyChangeListener, Disposable { @NonNls private static final String SELECTED = "selected"; private final Set<String> mySynchronized = new HashSet<>(); private MenuItemSynchronizer() { myPresentation.addPropertyChangeListener(this); } @Override public void dispose() { myPresentation.removePropertyChangeListener(this); } @Override public void propertyChange(PropertyChangeEvent e) { boolean queueForDispose = getParent() == null; String name = e.getPropertyName(); if (mySynchronized.contains(name)) return; mySynchronized.add(name); try { if (Presentation.PROP_VISIBLE.equals(name)) { final boolean visible = myPresentation.isVisible(); if (!visible && SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) { setEnabled(false); } else { setVisible(visible); } } else if (Presentation.PROP_ENABLED.equals(name)) { setEnabled(myPresentation.isEnabled()); updateIcon(myAction.getAction()); } else if (Presentation.PROP_MNEMONIC_KEY.equals(name)) { setMnemonic(myPresentation.getMnemonic()); } else if (Presentation.PROP_MNEMONIC_INDEX.equals(name)) { setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex()); } else if (Presentation.PROP_TEXT.equals(name)) { setText(myPresentation.getText()); } else if (Presentation.PROP_ICON.equals(name) || Presentation.PROP_DISABLED_ICON.equals(name) || SELECTED.equals(name)) { updateIcon(myAction.getAction()); } } finally { mySynchronized.remove(name); if (queueForDispose) { // later since we cannot remove property listeners inside event processing //noinspection SSBasedInspection SwingUtilities.invokeLater(() -> { if (getParent() == null) { uninstallSynchronizer(); } }); } } } } }
/* * Copyright (c) 2019 Informatics Matters Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.squonk.execution.steps.impl; import org.apache.camel.CamelContext; import org.squonk.api.VariableHandler; import org.squonk.core.DefaultServiceDescriptor; import org.squonk.execution.runners.ContainerRunner; import org.squonk.execution.steps.AbstractThinStep; import org.squonk.execution.variable.impl.FilesystemReadContext; import org.squonk.execution.variable.impl.FilesystemWriteContext; import org.squonk.io.IODescriptor; import org.squonk.io.SquonkDataSource; import org.squonk.types.DefaultHandler; import org.squonk.types.TypeHandlerUtils; import java.io.File; import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; /** * Base class for steps that run in containers, typically Docker containers. */ public abstract class AbstractContainerStep extends AbstractThinStep { private static final Logger LOG = Logger.getLogger(AbstractContainerStep.class.getName()); protected Float containerExecutionTime = null; protected ContainerRunner containerRunner = null; @Override public Map<String, Object> doExecute(Map<String, Object> inputs) throws Exception { statusMessage = MSG_PREPARING_CONTAINER; DefaultServiceDescriptor dsd = getDefaultServiceDescriptor(); containerRunner = prepareContainerRunner(); Map<String, List<SquonkDataSource>> dataSourcesMap = doExecuteForDataSources(inputs, containerRunner, dsd); LOG.info("Execution generated " + dataSourcesMap.size() + " outputs"); int i = 1; if (LOG.isLoggable(Level.FINE)) { for (Map.Entry<String, List<SquonkDataSource>> e : dataSourcesMap.entrySet()) { StringBuilder b = new StringBuilder("Output ") .append(i) .append(": ") .append(e.getKey()) .append(" -> ["); for (SquonkDataSource sds : e.getValue()) { b.append(" ") .append(sds.getName()) .append(":") .append(sds.getContentType()); } b.append(" ]"); LOG.fine(b.toString()); i++; } } Map<String, Object> results = new LinkedHashMap<>(); for (IODescriptor iod : serviceDescriptor.getServiceConfig().getOutputDescriptors()) { List<SquonkDataSource> dataSources = dataSourcesMap.get(iod.getName()); if (dataSources == null || dataSources.isEmpty()) { LOG.warning("No dataSources found for variable " + iod.getName()); } else { Object variable = TypeHandlerUtils.convertDataSourcesToVariable(dataSources, iod.getPrimaryType(), iod.getSecondaryType()); results.put(iod.getName(), variable); } } return results; } @Override public void doCleanup() { handleCleanup(); } protected Map<String, List<SquonkDataSource>> doExecuteForDataSources( Map<String, Object> inputs, ContainerRunner containerRunner, DefaultServiceDescriptor descriptor) throws Exception { // create input files statusMessage = MSG_PREPARING_INPUT; writeInputs(inputs, descriptor, containerRunner); handleExecute(containerRunner); statusMessage = MSG_PREPARING_OUTPUT; Map<String, List<SquonkDataSource>> results = readOutputs(descriptor, containerRunner.getHostWorkDir()); handleMetrics(containerRunner); return results; } protected abstract ContainerRunner prepareContainerRunner() throws IOException; public ContainerRunner getContainerRunner() { return containerRunner; } protected void handleExecute(ContainerRunner containerRunner) { // run the command statusMessage = MSG_RUNNING_CONTAINER; LOG.info("Executing ..."); long t0 = System.currentTimeMillis(); int status = executeContainerRunner(containerRunner); long t1 = System.currentTimeMillis(); containerExecutionTime = (t1 - t0) / 1000.0f; LOG.info(String.format("Executed in %s seconds with return status of %s", containerExecutionTime, status)); if (status != 0) { String log = containerRunner.getLog(); LOG.warning("Execution errors: " + log); statusMessage = "Container execution failed"; throw new RuntimeException("Container execution failed:\n" + log); } } protected void handleCleanup() { // This block essentially replicates the actions // In the cleanup() method of ExternalExecutor.java // At some point we might want to rationalise the interfaces. if (containerRunner != null && DEBUG_MODE < 2) { containerRunner.cleanup(); LOG.info("Results cleaned up (DEBUG_MODE=" + DEBUG_MODE + ")"); } else { if (containerRunner == null) { LOG.info("Skipping cleanup (containerRunner=null)"); } else { LOG.info("Skipping cleanup (DEBUG_MODE=" + DEBUG_MODE + ")"); } } } protected int executeContainerRunner(ContainerRunner containerRunner) { return containerRunner.execute(containerRunner.getLocalWorkDir() + "/execute"); } protected void handleMetrics(ContainerRunner containerRunner) throws IOException { statusMessage = MSG_PROCESSING_RESULTS_READY; Properties props = containerRunner.getFileAsProperties("output_metrics.txt"); generateMetricsAndStatus(props, containerExecutionTime); } protected void writeInputs( Map<String, Object> data, DefaultServiceDescriptor serviceDescriptor, ContainerRunner runner) throws Exception { IODescriptor[] inputDescriptors = serviceDescriptor.resolveInputIODescriptors(); if (inputDescriptors != null) { LOG.info("Handling " + inputDescriptors.length + " inputs"); for (IODescriptor iod : inputDescriptors) { Object value = data.get(iod.getName()); if (value == null) { LOG.warning("No input found for " + iod.getName()); } else { LOG.info("Writing input for " + iod.getName() + " " + iod.getMediaType()); doWriteInput(value, runner, iod); } } } } protected <P, Q> void doWriteInput( P input, ContainerRunner runner, IODescriptor<P, Q> iod) throws Exception { LOG.info("Handling input for " + iod.getName()); VariableHandler<P> vh = DefaultHandler.createVariableHandler(iod.getPrimaryType(), iod.getSecondaryType()); File dir = runner.getHostWorkDir(); FilesystemWriteContext writeContext = new FilesystemWriteContext(dir, iod.getName()); vh.writeVariable(input, writeContext); } protected Map<String, List<SquonkDataSource>> readOutputs(DefaultServiceDescriptor serviceDescriptor, File workdir) throws Exception { IODescriptor[] outputDescriptors = serviceDescriptor.resolveOutputIODescriptors(); Map<String, List<SquonkDataSource>> results = new LinkedHashMap<>(); if (outputDescriptors != null) { LOG.info("Handling " + outputDescriptors.length + " outputs"); for (IODescriptor iod : outputDescriptors) { LOG.info("Reading output for " + iod.getName() + " " + iod.getMediaType()); List<SquonkDataSource> result = doReadOutput(workdir, iod); results.put(iod.getName(), result); } } return results; } protected <P, Q> List<SquonkDataSource> doReadOutput(File workdir, IODescriptor<P, Q> iod) throws Exception { List<SquonkDataSource> outputs = buildOutputs(workdir, iod); return outputs; } private <P, Q> List<SquonkDataSource> buildOutputs(File workdir, IODescriptor<P, Q> iod) throws Exception { VariableHandler<P> vh = DefaultHandler.createVariableHandler(iod.getPrimaryType(), iod.getSecondaryType()); VariableHandler.ReadContext readContext = new FilesystemReadContext(workdir, iod.getName()); List<SquonkDataSource> dataSources = vh.readDataSources(readContext); return dataSources; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the * NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package com.ricemap.spateDB.shape; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; import com.ricemap.spateDB.io.TextSerializerHelper; /** * A class that holds coordinates of a Prism. For predicate test functions * (e.g. intersection), the Prism is considered open-ended. This means that * the right and top edge are outside the Prism. * * @author tonyren, Ahmed Eldawy * */ public class Prism implements Shape, WritableComparable<Prism> { public double t1; public double x1; public double y1; public double t2; public double x2; public double y2; public Prism() { this(0, 0, 0, 0, 0, 0); } /** * Constructs a new <code>Prism</code>, initialized to match the values * of the specified <code>Prism</code>. * * @param r * the <code>Prism</code> from which to copy initial values * to a newly constructed <code>Prism</code> * @since 1.1 */ public Prism(Prism r) { this(r.t1, r.x1, r.y1, r.t2, r.x2, r.y2); } public Prism(double t1, double x1, double y1, double t2, double x2, double y2) { this.set(t1, x1, y1, t2, x2, y2); } public void set(Shape s) { Prism mbr = s.getMBR(); set(mbr.t1, mbr.x1, mbr.y1,mbr.t2, mbr.x2, mbr.y2); } public void set(double t1, double x1, double y1, double t2, double x2, double y2) { this.t1 = t1; this.x1 = x1; this.y1 = y1; this.t2 = t2; this.x2 = x2; this.y2 = y2; } public void write(DataOutput out) throws IOException { out.writeDouble(t1); out.writeDouble(x1); out.writeDouble(y1); out.writeDouble(t2); out.writeDouble(x2); out.writeDouble(y2); } public void readFields(DataInput in) throws IOException { this.t1 = in.readDouble(); this.x1 = in.readDouble(); this.y1 = in.readDouble(); this.t2 = in.readDouble(); this.x2 = in.readDouble(); this.y2 = in.readDouble(); } /** * Comparison is done by lexicographic ordering of attributes < x1, y1, x2, * y2> */ public int compareTo(Shape s) { Prism rect2 = (Prism) s; // Sort by t1 then x1 then y1 if (this.t1 < rect2.t1) return -1; if (this.t1 > rect2.t1) return 1; if (this.x1 < rect2.x1) return -1; if (this.x1 > rect2.x1) return 1; if (this.y1 < rect2.y1) return -1; if (this.y1 > rect2.y1) return 1; // Sort by t2 then x2 then y2 if (this.t2 < rect2.t2) return -1; if (this.t2 > rect2.t2) return 1; if (this.x2 < rect2.x2) return -1; if (this.x2 > rect2.x2) return 1; if (this.y2 < rect2.y2) return -1; if (this.y2 > rect2.y2) return 1; return 0; } public boolean equals(Object obj) { Prism r2 = (Prism) obj; boolean result = this.t1 == r2.t1 && this.x1 == r2.x1 && this.y1 == r2.y1 && this.t2 == r2.t2 && this.x2 == r2.x2 && this.y2 == r2.y2; return result; } @Override public double distanceTo(double pt, double px, double py) { return this.getMaxDistanceTo(pt, px, py); } /** * Maximum distance to the perimeter of the Prism * * @param px * @param py * @return */ public double getMaxDistanceTo(double pt, double px, double py) { double dt = Math.max(pt - this.t1, this.t2 - pt); double dx = Math.max(px - this.x1, this.x2 - px); double dy = Math.max(py - this.y1, this.y2 - py); return Math.sqrt(dt * dt + dx * dx + dy * dy); } public double getMinDistanceTo(double pt, double px, double py) { if (this.contains(pt, px, py)) return 0; double dt = Math.min(Math.abs(pt - this.t1), Math.abs(this.t2 - pt)); double dx = Math.min(Math.abs(px - this.x1), Math.abs(this.x2 - px)); double dy = Math.min(Math.abs(py - this.y1), Math.abs(this.y2 - py)); if ((pt < this.t1 || pt > this.t2) && (px < this.x1 || px > this.x2) && (py < this.y1 || py > this.y2)) { return Math.sqrt(dt * dt + dx * dx + dy * dy); } return Math.min(dt, Math.min(dx, dy)); } public double getMinDistance(Prism r2) { // dx is the horizontal gap between the two Prisms. If their x // ranges // overlap, dx is zero double dt = 0; if (r2.t1 > this.t2) dt = r2.t1 - this.t2; else if (this.t1 > r2.t2) dt = this.t1 - r2.t2; double dx = 0; if (r2.x1 > this.x2) dx = r2.x1 - this.x2; else if (this.x1 > r2.x2) dx = this.x1 - r2.x2; double dy = 0; if (r2.y1 > this.y2) dy = r2.y1 - this.y2; else if (this.y1 > r2.y2) dy = this.y1 - r2.y2; // Case 1: Overlapping Prisms if (dt == 0 && dx == 0 && dy == 0) return 0; // Case 2: Not overlapping in any dimension return Math.sqrt(dt * dt + dx * dx + dy * dy); } public double getMaxDistance(Prism r2) { double tmin = Math.min(this.t1, r2.t1); double tmax = Math.max(this.t2, r2.t2); double xmin = Math.min(this.x1, r2.x1); double xmax = Math.max(this.x2, r2.x2); double ymin = Math.min(this.y1, r2.y1); double ymax = Math.max(this.y2, r2.y2); double dt = tmax - tmin; double dx = xmax - xmin; double dy = ymax - ymin; return Math.sqrt(dt * dt + dx * dx + dy * dy); } @Override public Prism clone() { return new Prism(this); } @Override public Prism getMBR() { return this; } public boolean isIntersected(Shape s) { if (s instanceof Point3d) { Point3d pt = (Point3d) s; return pt.t >= t1 && pt.t <=t2 && pt.x >= x1 && pt.x <= x2 && pt.y >= y1 && pt.y <= y2; } Prism r = s.getMBR(); if (r == null) return false; return (this.t2 > r.t1 && r.t2 > this.t1 && this.x2 > r.x1 && r.x2 > this.x1 && this.y2 > r.y1 && r.y2 > this.y1); } public Prism getIntersection(Shape s) { if (!s.isIntersected(this)) return null; Prism r = s.getMBR(); double it1 = Math.max(this.t1, r.t1); double it2 = Math.min(this.t2, r.t2); double ix1 = Math.max(this.x1, r.x1); double ix2 = Math.min(this.x2, r.x2); double iy1 = Math.max(this.y1, r.y1); double iy2 = Math.min(this.y2, r.y2); return new Prism(it1, ix1, iy1, it2, ix2, iy2); } public boolean contains(Point3d p) { return contains(p.t, p.x, p.y); } public boolean contains(double t, double x, double y) { return t >= t1 && t < t2 && x >= x1 && x < x2 && y >= y1 && y < y2; } public boolean contains(Prism r) { return contains(r.t1, r.x1, r.y1, r.t2, r.x2, r.y2); } public Prism union(final Shape s) { Prism r = s.getMBR(); double ut1 = Math.min(t1, r.t1); double ut2 = Math.max(t2, r.t2); double ux1 = Math.min(x1, r.x1); double ux2 = Math.max(x2, r.x2); double uy1 = Math.min(y1, r.y1); double uy2 = Math.max(y2, r.y2); return new Prism(ut1, ux1, uy1, ut2, ux2, uy2); } public void expand(final Shape s) { Prism r = s.getMBR(); if (r.t1 < this.t1) this.t1 = r.t1; if (r.t2 > this.t2) this.t2 = r.t2; if (r.x1 < this.x1) this.x1 = r.x1; if (r.x2 > this.x2) this.x2 = r.x2; if (r.y1 < this.y1) this.y1 = r.y1; if (r.y2 > this.y2) this.y2 = r.y2; } public boolean contains(double rt1, double rx1, double ry1, double rt2, double rx2, double ry2) { return rt1 >= t1 && rt2 <= t2 && rx1 >= x1 && rx2 <= x2 && ry2 >= y1 && ry2 <= y2; } public Point3d getCenterPoint() { return new Point3d((t1 + t2) /2, (x1 + x2) /2, (y1 + y2)/2); } @Override public Text toText(Text text) { TextSerializerHelper.serializeDouble(t1, text, ','); TextSerializerHelper.serializeDouble(x1, text, ','); TextSerializerHelper.serializeDouble(y1, text, ','); TextSerializerHelper.serializeDouble(t2, text, ','); TextSerializerHelper.serializeDouble(x2, text, ','); TextSerializerHelper.serializeDouble(y2, text, '\0'); return text; } @Override public void fromText(Text text) { t1 = TextSerializerHelper.consumeDouble(text, ','); x1 = TextSerializerHelper.consumeDouble(text, ','); y1 = TextSerializerHelper.consumeDouble(text, ','); t2 = TextSerializerHelper.consumeDouble(text, ','); x2 = TextSerializerHelper.consumeDouble(text, ','); y2 = TextSerializerHelper.consumeDouble(text, '\0'); } @Override public String toString() { return "Prism: (" + t1 + "," + x1 + "," + y1 + ")-(" + t2 + "," + x2 + "," + y2 + ")"; } public boolean isValid() { return !Double.isNaN(t1); } public void invalidate() { this.t1 = Double.NaN; } public double getDepth(){ return t2 - t1; } public double getHeight() { return y2 - y1; } public double getWidth() { return x2 - x1; } @Override public int compareTo(Prism r2) { if (this.t1 < r2.t1) return -1; if (this.t1 > r2.t1) return 1; if (this.x1 < r2.x1) return -1; if (this.x1 > r2.x1) return 1; if (this.y1 < r2.y1) return -1; if (this.y1 > r2.y1) return 1; if (this.t2 < r2.t2) return -1; if (this.t2 > r2.t2) return 1; if (this.x2 < r2.x2) return -1; if (this.x2 > r2.x2) return 1; if (this.y2 < r2.y2) return -1; if (this.y2 > r2.y2) return 1; return 0; } @Override public int getSizeofAllFields() { return 48; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.partition.impl.btree.mavibot; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.util.Iterator; import java.util.UUID; import org.apache.commons.io.FileUtils; import org.apache.directory.api.ldap.model.constants.SchemaConstants; import org.apache.directory.api.ldap.model.csn.CsnFactory; import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.entry.DefaultAttribute; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.entry.DefaultModification; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.entry.Modification; import org.apache.directory.api.ldap.model.entry.ModificationOperation; import org.apache.directory.api.ldap.model.exception.LdapNoSuchObjectException; import org.apache.directory.api.ldap.model.exception.LdapSchemaViolationException; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.name.Rdn; import org.apache.directory.api.ldap.model.schema.AttributeType; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.schema.extractor.SchemaLdifExtractor; import org.apache.directory.api.ldap.schema.extractor.impl.DefaultSchemaLdifExtractor; import org.apache.directory.api.ldap.schema.loader.LdifSchemaLoader; import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.api.util.Strings; import org.apache.directory.api.util.exception.Exceptions; import org.apache.directory.mavibot.btree.RecordManager; import org.apache.directory.server.constants.ApacheSchemaConstants; import org.apache.directory.server.core.api.CacheService; import org.apache.directory.server.core.api.CoreSession; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.DnFactory; import org.apache.directory.server.core.api.LdapPrincipal; import org.apache.directory.server.core.api.MockCoreSession; import org.apache.directory.server.core.api.MockDirectoryService; import org.apache.directory.server.core.api.interceptor.context.AddOperationContext; import org.apache.directory.server.core.api.interceptor.context.LookupOperationContext; import org.apache.directory.server.core.shared.DefaultDnFactory; import org.apache.directory.server.xdbm.Index; import org.apache.directory.server.xdbm.IndexNotFoundException; import org.apache.directory.server.xdbm.Store; import org.apache.directory.server.xdbm.StoreUtils; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Unit test cases for MavibotStore * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @SuppressWarnings("unchecked") public class MavibotStoreTest { private static final Logger LOG = LoggerFactory.getLogger( MavibotStoreTest.class ); private File wkdir; private MavibotPartition store; private CoreSession session; private static SchemaManager schemaManager = null; private static DnFactory dnFactory; private static LdifSchemaLoader loader; private static Dn EXAMPLE_COM; /** The OU AttributeType instance */ private static AttributeType OU_AT; /** The ApacheAlias AttributeType instance */ private static AttributeType APACHE_ALIAS_AT; /** The DC AttributeType instance */ private static AttributeType DC_AT; /** The SN AttributeType instance */ private static AttributeType SN_AT; private RecordManager recordMan; private static CacheService cacheService; @Rule public TemporaryFolder tmpDir = new TemporaryFolder(); @BeforeClass public static void setup() throws Exception { String workingDirectory = System.getProperty( "workingDirectory" ); if ( workingDirectory == null ) { String path = MavibotStoreTest.class.getResource( "" ).getPath(); int targetPos = path.indexOf( "target" ); workingDirectory = path.substring( 0, targetPos + 6 ); } File schemaRepository = new File( workingDirectory, "schema" ); SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) ); extractor.extractOrCopy( true ); loader = new LdifSchemaLoader( schemaRepository ); schemaManager = new DefaultSchemaManager( loader ); boolean loaded = schemaManager.loadAllEnabled(); if ( !loaded ) { fail( "Schema load failed : " + Exceptions.printErrors( schemaManager.getErrors() ) ); } EXAMPLE_COM = new Dn( schemaManager, "dc=example,dc=com" ); OU_AT = schemaManager.getAttributeType( SchemaConstants.OU_AT ); DC_AT = schemaManager.getAttributeType( SchemaConstants.DC_AT ); SN_AT = schemaManager.getAttributeType( SchemaConstants.SN_AT ); APACHE_ALIAS_AT = schemaManager.getAttributeType( ApacheSchemaConstants.APACHE_ALIAS_AT ); cacheService = new CacheService(); cacheService.initialize( null ); dnFactory = new DefaultDnFactory( schemaManager, cacheService.getCache( "dnCache" ) ); } @Before public void createStore() throws Exception { // setup the working directory for the store wkdir = tmpDir.newFolder( getClass().getSimpleName() ); // initialize the store store = new MavibotPartition( schemaManager, dnFactory ); store.setId( "example" ); store.setCacheSize( 10 ); store.setPartitionPath( wkdir.toURI() ); store.setSyncOnWrite( false ); MavibotIndex ouIndex = new MavibotIndex( SchemaConstants.OU_AT_OID, false ); ouIndex.setWkDirPath( wkdir.toURI() ); store.addIndex( ouIndex ); MavibotIndex uidIndex = new MavibotIndex( SchemaConstants.UID_AT_OID, false ); uidIndex.setWkDirPath( wkdir.toURI() ); store.addIndex( uidIndex ); Dn suffixDn = new Dn( schemaManager, "o=Good Times Co." ); store.setSuffixDn( suffixDn ); store.setCacheService( cacheService ); store.initialize(); recordMan = store.getRecordMan(); StoreUtils.loadExampleData( store, schemaManager ); DirectoryService directoryService = new MockDirectoryService(); directoryService.setSchemaManager( schemaManager ); session = new MockCoreSession( new LdapPrincipal(), directoryService ); LOG.debug( "Created new store" ); } @After public void destroyStore() throws Exception { if ( store != null ) { // make sure all files are closed so that they can be deleted on Windows. //store.destroy(); } store = null; wkdir = null; } /** * Tests a suffix with two name components: dc=example,dc=com. * When reading this entry back from the store the Dn must * consist of two RDNs. */ @Test public void testTwoComponentSuffix() throws Exception { // setup the working directory for the 2nd store File wkdir2 = tmpDir.newFolder( getClass().getSimpleName() + "-store2" ); // initialize the 2nd store MavibotPartition store2 = new MavibotPartition( schemaManager, dnFactory ); store2.setId( "example2" ); store2.setCacheSize( 10 ); store2.setPartitionPath( wkdir2.toURI() ); store2.setSyncOnWrite( false ); store2.addIndex( new MavibotIndex( SchemaConstants.OU_AT_OID, false ) ); store2.addIndex( new MavibotIndex( SchemaConstants.UID_AT_OID, false ) ); store2.setSuffixDn( EXAMPLE_COM ); store2.setCacheService( cacheService ); store2.initialize(); // inject context entry Dn suffixDn = new Dn( schemaManager, "dc=example,dc=com" ); Entry entry = new DefaultEntry( schemaManager, suffixDn, "objectClass: top", "objectClass: domain", "dc: example", SchemaConstants.ENTRY_CSN_AT, new CsnFactory( 0 ).newInstance().toString(), SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() ); store2.add( new AddOperationContext( null, entry ) ); // lookup the context entry String id = store2.getEntryId( suffixDn ); Entry lookup = store2.fetch( id, suffixDn ); assertEquals( 2, lookup.getDn().size() ); // make sure all files are closed so that they can be deleted on Windows. //store2.destroy(); FileUtils.deleteDirectory( wkdir2 ); } @Test public void testSimplePropertiesUnlocked() throws Exception { MavibotPartition MavibotPartition = new MavibotPartition( schemaManager, dnFactory ); MavibotPartition.setSyncOnWrite( true ); // for code coverage assertNull( MavibotPartition.getAliasIndex() ); Index<Dn, String> index = new MavibotIndex<Dn>( ApacheSchemaConstants.APACHE_ALIAS_AT_OID, true ); ( ( Store ) MavibotPartition ).addIndex( index ); assertNotNull( MavibotPartition.getAliasIndex() ); assertEquals( MavibotPartition.DEFAULT_CACHE_SIZE, MavibotPartition.getCacheSize() ); MavibotPartition.setCacheSize( 24 ); assertEquals( 24, MavibotPartition.getCacheSize() ); assertNull( MavibotPartition.getPresenceIndex() ); MavibotPartition .addIndex( new MavibotIndex<String>( ApacheSchemaConstants.APACHE_PRESENCE_AT_OID, false ) ); assertNotNull( MavibotPartition.getPresenceIndex() ); assertNull( MavibotPartition.getId() ); MavibotPartition.setId( "foo" ); assertEquals( "foo", MavibotPartition.getId() ); assertNull( MavibotPartition.getRdnIndex() ); MavibotPartition.addIndex( new MavibotRdnIndex() ); assertNotNull( MavibotPartition.getRdnIndex() ); assertNull( MavibotPartition.getOneAliasIndex() ); ( ( Store ) MavibotPartition ).addIndex( new MavibotIndex<Long>( ApacheSchemaConstants.APACHE_ONE_ALIAS_AT_OID, true ) ); assertNotNull( MavibotPartition.getOneAliasIndex() ); assertNull( MavibotPartition.getSubAliasIndex() ); MavibotPartition .addIndex( new MavibotIndex<Long>( ApacheSchemaConstants.APACHE_SUB_ALIAS_AT_OID, true ) ); assertNotNull( MavibotPartition.getSubAliasIndex() ); assertNull( MavibotPartition.getSuffixDn() ); MavibotPartition.setSuffixDn( EXAMPLE_COM ); assertEquals( "dc=example,dc=com", MavibotPartition.getSuffixDn().getName() ); assertNotNull( MavibotPartition.getSuffixDn() ); assertFalse( MavibotPartition.getUserIndices().hasNext() ); MavibotPartition.addIndex( new MavibotIndex<Object>( "2.5.4.3", false ) ); assertEquals( true, MavibotPartition.getUserIndices().hasNext() ); assertNull( MavibotPartition.getPartitionPath() ); MavibotPartition.setPartitionPath( new File( "." ).toURI() ); assertEquals( new File( "." ).toURI(), MavibotPartition.getPartitionPath() ); assertFalse( MavibotPartition.isInitialized() ); assertTrue( MavibotPartition.isSyncOnWrite() ); MavibotPartition.setSyncOnWrite( false ); assertFalse( MavibotPartition.isSyncOnWrite() ); MavibotPartition.sync(); // make sure all files are closed so that they can be deleted on Windows. MavibotPartition.destroy(); } @Test public void testSimplePropertiesLocked() throws Exception { assertNotNull( store.getAliasIndex() ); try { store.addIndex( new MavibotIndex<Dn>( ApacheSchemaConstants.APACHE_ALIAS_AT_OID, true ) ); fail(); } catch ( IllegalStateException e ) { } assertEquals( 10, store.getCacheSize() ); try { store.setCacheSize( 24 ); } catch ( IllegalStateException e ) { } assertNotNull( store.getPresenceIndex() ); try { store.addIndex( new MavibotIndex<String>( ApacheSchemaConstants.APACHE_PRESENCE_AT_OID, false ) ); fail(); } catch ( IllegalStateException e ) { } assertNotNull( store.getId() ); try { store.setId( "foo" ); fail(); } catch ( IllegalStateException e ) { } assertNotNull( store.getRdnIndex() ); try { store.addIndex( new MavibotRdnIndex() ); fail(); } catch ( IllegalStateException e ) { } assertNotNull( store.getOneAliasIndex() ); try { store.addIndex( new MavibotIndex<Long>( ApacheSchemaConstants.APACHE_ONE_ALIAS_AT_OID, true ) ); fail(); } catch ( IllegalStateException e ) { } assertNotNull( store.getSubAliasIndex() ); try { store.addIndex( new MavibotIndex<Long>( ApacheSchemaConstants.APACHE_SUB_ALIAS_AT_OID, true ) ); fail(); } catch ( IllegalStateException e ) { } assertNotNull( store.getSuffixDn() ); try { store.setSuffixDn( EXAMPLE_COM ); fail(); } catch ( IllegalStateException e ) { } Iterator<String> systemIndices = store.getSystemIndices(); for ( int i = 0; i < 8; i++ ) { assertTrue( systemIndices.hasNext() ); assertNotNull( systemIndices.next() ); } assertFalse( systemIndices.hasNext() ); assertNotNull( store.getSystemIndex( APACHE_ALIAS_AT ) ); try { store.getSystemIndex( SN_AT ); fail(); } catch ( IndexNotFoundException e ) { } try { store.getSystemIndex( DC_AT ); fail(); } catch ( IndexNotFoundException e ) { } assertNotNull( store.getSuffixDn() ); Iterator<String> userIndices = store.getUserIndices(); int count = 0; while ( userIndices.hasNext() ) { userIndices.next(); count++; } assertEquals( 2, count ); assertFalse( store.hasUserIndexOn( DC_AT ) ); assertTrue( store.hasUserIndexOn( OU_AT ) ); assertTrue( store.hasSystemIndexOn( APACHE_ALIAS_AT ) ); userIndices = store.getUserIndices(); assertTrue( userIndices.hasNext() ); assertNotNull( userIndices.next() ); assertTrue( userIndices.hasNext() ); assertNotNull( userIndices.next() ); assertFalse( userIndices.hasNext() ); assertNotNull( store.getUserIndex( OU_AT ) ); try { store.getUserIndex( SN_AT ); fail(); } catch ( IndexNotFoundException e ) { } try { store.getUserIndex( DC_AT ); fail(); } catch ( IndexNotFoundException e ) { } assertNotNull( store.getPartitionPath() ); try { store.setPartitionPath( new File( "." ).toURI() ); fail(); } catch ( IllegalStateException e ) { } assertTrue( store.isInitialized() ); assertFalse( store.isSyncOnWrite() ); store.sync(); } @Test public void testFreshStore() throws Exception { Dn dn = new Dn( schemaManager, "o=Good Times Co." ); assertEquals( Strings.getUUID( 1L ), store.getEntryId( dn ) ); assertEquals( 11, store.count() ); assertEquals( "o=Good Times Co.", store.getEntryDn( Strings.getUUID( 1L ) ).getName() ); assertEquals( dn.getNormName(), store.getEntryDn( Strings.getUUID( 1L ) ).getNormName() ); assertEquals( dn.getName(), store.getEntryDn( Strings.getUUID( 1L ) ).getName() ); // note that the suffix entry returns 0 for it's parent which does not exist assertEquals( Strings.getUUID( 0L ), store.getParentId( store.getEntryId( dn ) ) ); assertNull( store.getParentId( Strings.getUUID( 0L ) ) ); // should NOW be allowed store.delete( Strings.getUUID( 1L ) ); } /* @Test public void testEntryOperations() throws Exception { assertEquals( 3, store.getChildCount( Strings.getUUID( 1L ) ) ); Cursor<IndexEntry<String, String>> cursor = store.list( Strings.getUUID( 1L ) ); assertNotNull( cursor ); cursor.beforeFirst(); assertTrue( cursor.next() ); assertEquals( Strings.getUUID( 3L ), cursor.get().getId() ); assertTrue( cursor.next() ); assertEquals( Strings.getUUID( 4L ), cursor.get().getId() ); assertTrue( cursor.next() ); assertEquals( Strings.getUUID( 2L ), cursor.get().getId() ); assertFalse( cursor.next() ); cursor.close(); assertEquals( 3, store.getChildCount( Strings.getUUID( 1L ) ) ); store.delete( Strings.getUUID( 2L ) ); assertEquals( 2, store.getChildCount( Strings.getUUID( 1L ) ) ); assertEquals( 10, store.count() ); // add an alias and delete to test dropAliasIndices method Dn dn = new Dn( schemaManager, "commonName=Jack Daniels,ou=Apache,ou=Board of Directors,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "objectClass: top", "objectClass: alias", "objectClass: extensibleObject", "ou: Apache", "commonName: Jack Daniels", "aliasedObjectName: cn=Jack Daniels,ou=Engineering,o=Good Times Co.", "entryCSN", new CsnFactory( 1 ).newInstance().toString(), "entryUUID", Strings.getUUID( 12L ).toString() ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); store.delete( Strings.getUUID( 12L ) ); // drops the alias indices } */ @Test(expected = LdapNoSuchObjectException.class) public void testAddWithoutParentId() throws Exception { Dn dn = new Dn( schemaManager, "cn=Marting King,ou=Not Present,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "ou: Not Present", "cn: Martin King" ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); } @Test(expected = LdapSchemaViolationException.class) public void testAddWithoutObjectClass() throws Exception { Dn dn = new Dn( schemaManager, "cn=Martin King,ou=Sales,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "ou: Sales", "cn: Martin King" ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); } @Test public void testModifyAddOUAttrib() throws Exception { Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." ); Attribute attrib = new DefaultAttribute( SchemaConstants.OU_AT, OU_AT ); attrib.add( "Engineering" ); Modification add = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attrib ); store.modify( dn, add ); } @Test public void testRename() throws Exception { Dn dn = new Dn( schemaManager, "cn=Private Ryan,ou=Engineering,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "ou: Engineering", "cn: Private Ryan", "entryCSN", new CsnFactory( 1 ).newInstance().toString(), "entryUUID", UUID.randomUUID().toString() ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); Rdn rdn = new Rdn( "sn=James" ); store.rename( dn, rdn, true, null ); dn = new Dn( schemaManager, "sn=James,ou=Engineering,o=Good Times Co." ); Entry renamed = store.lookup( new LookupOperationContext( session, dn ) ); assertNotNull( renamed ); assertEquals( "James", renamed.getDn().getRdn().getValue().getString() ); } @Test public void testRenameEscaped() throws Exception { Dn dn = new Dn( schemaManager, "cn=Private Ryan,ou=Engineering,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "ou: Engineering", "cn: Private Ryan", "entryCSN", new CsnFactory( 1 ).newInstance().toString(), "entryUUID", UUID.randomUUID().toString() ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); Rdn rdn = new Rdn( "sn=Ja\\+es" ); store.rename( dn, rdn, true, null ); Dn dn2 = new Dn( schemaManager, "sn=Ja\\+es,ou=Engineering,o=Good Times Co." ); String id = store.getEntryId( dn2 ); assertNotNull( id ); Entry entry2 = store.fetch( id, dn2 ); assertEquals( "Ja+es", entry2.get( "sn" ).getString() ); } @Test public void testMove() throws Exception { Dn childDn = new Dn( schemaManager, "cn=Private Ryan,ou=Engineering,o=Good Times Co." ); Entry childEntry = new DefaultEntry( schemaManager, childDn, "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "ou", "Engineering", "cn", "Private Ryan", "entryCSN", new CsnFactory( 1 ).newInstance().toString(), "entryUUID", UUID.randomUUID().toString() ); assertEquals( 3, store.getSubAliasIndex().count() ); AddOperationContext addContext = new AddOperationContext( null, childEntry ); store.add( addContext ); assertEquals( 3, store.getSubAliasIndex().count() ); Dn parentDn = new Dn( schemaManager, "ou=Sales,o=Good Times Co." ); Rdn rdn = new Rdn( "cn=Ryan" ); store.moveAndRename( childDn, parentDn, rdn, childEntry, true ); // to drop the alias indices childDn = new Dn( schemaManager, "commonName=Jim Bean,ou=Apache,ou=Board of Directors,o=Good Times Co." ); parentDn = new Dn( schemaManager, "ou=Engineering,o=Good Times Co." ); assertEquals( 3, store.getSubAliasIndex().count() ); Dn newDn = parentDn.add( childDn.getRdn() ); store.move( childDn, parentDn, newDn, null ); assertEquals( 3, store.getSubAliasIndex().count() ); } @Test public void testModifyAdd() throws Exception { Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." ); Attribute attrib = new DefaultAttribute( "sn", SN_AT ); String attribVal = "Walker"; attrib.add( attribVal ); Modification add = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attrib ); Entry lookedup = store.fetch( store.getEntryId( dn ), dn ); lookedup = store.modify( dn, add ); assertTrue( lookedup.get( "sn" ).contains( attribVal ) ); } @Test public void testModifyReplace() throws Exception { Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." ); Attribute attrib = new DefaultAttribute( SchemaConstants.SN_AT, SN_AT ); String attribVal = "Johnny"; attrib.add( attribVal ); Modification add = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrib ); Entry lookedup = store.fetch( store.getEntryId( dn ), dn ); assertEquals( "WAlkeR", lookedup.get( "sn" ).get().getString() ); // before replacing lookedup = store.modify( dn, add ); assertEquals( attribVal, lookedup.get( "sn" ).get().getString() ); // testing the store.modify( dn, mod, entry ) API Modification replace = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, SN_AT, "JWalker" ); lookedup = store.modify( dn, replace ); assertEquals( "JWalker", lookedup.get( "sn" ).get().getString() ); assertEquals( 1, lookedup.get( "sn" ).size() ); } @Test public void testModifyRemove() throws Exception { Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." ); Attribute attrib = new DefaultAttribute( SchemaConstants.SN_AT, SN_AT ); Modification add = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrib ); Entry lookedup = store.fetch( store.getEntryId( dn ), dn ); assertNotNull( lookedup.get( "sn" ).get() ); lookedup = store.modify( dn, add ); assertNull( lookedup.get( "sn" ) ); // add an entry for the sake of testing the remove operation add = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, SN_AT, "JWalker" ); lookedup = store.modify( dn, add ); assertNotNull( lookedup.get( "sn" ) ); Modification remove = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, SN_AT ); lookedup = store.modify( dn, remove ); assertNull( lookedup.get( "sn" ) ); } @Test public void testModifyReplaceNonExistingIndexAttribute() throws Exception { Dn dn = new Dn( schemaManager, "cn=Tim B,ou=Sales,o=Good Times Co." ); Entry entry = new DefaultEntry( schemaManager, dn, "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "cn", "Tim B", "entryCSN", new CsnFactory( 1 ).newInstance().toString(), "entryUUID", UUID.randomUUID().toString() ); AddOperationContext addContext = new AddOperationContext( null, entry ); store.add( addContext ); Attribute attrib = new DefaultAttribute( SchemaConstants.OU_AT, OU_AT ); String attribVal = "Marketing"; attrib.add( attribVal ); Modification add = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrib ); Entry lookedup = store.fetch( store.getEntryId( dn ), dn ); assertNull( lookedup.get( "ou" ) ); // before replacing lookedup = store.modify( dn, add ); assertEquals( attribVal, lookedup.get( "ou" ).get().getString() ); } @Test @Ignore("Ignore till mavibot file nam extensions are frozen") public void testDeleteUnusedIndexFiles() throws Exception { File ouIndexDbFile = new File( wkdir, SchemaConstants.OU_AT_OID + ".db" ); File ouIndexTxtFile = new File( wkdir, SchemaConstants.OU_AT_OID + "-ou.txt" ); File uuidIndexDbFile = new File( wkdir, SchemaConstants.ENTRY_UUID_AT_OID + ".db" ); assertTrue( ouIndexDbFile.exists() ); assertTrue( ouIndexTxtFile.exists() ); // destroy the store to manually start the init phase // by keeping the same work dir store.destroy(); // just assert again that ou files exist even after destroying the store assertTrue( ouIndexDbFile.exists() ); assertTrue( ouIndexTxtFile.exists() ); store = new MavibotPartition( schemaManager, dnFactory ); store.setId( "example" ); store.setCacheSize( 10 ); store.setPartitionPath( wkdir.toURI() ); store.setSyncOnWrite( false ); // do not add ou index this time store.addIndex( new MavibotIndex( SchemaConstants.UID_AT_OID, false ) ); Dn suffixDn = new Dn( schemaManager, "o=Good Times Co." ); store.setSuffixDn( suffixDn ); // init the store to call deleteUnusedIndexFiles() method store.initialize(); assertFalse( ouIndexDbFile.exists() ); assertFalse( ouIndexTxtFile.exists() ); } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.client.core.ep.deserializer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.InputStream; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TimeZone; import org.apache.olingo.odata2.api.edm.EdmEntitySet; import org.apache.olingo.odata2.api.ep.EntityProviderException; import org.apache.olingo.odata2.api.ep.entry.EntryMetadata; import org.apache.olingo.odata2.api.ep.entry.MediaMetadata; import org.apache.olingo.odata2.api.ep.entry.ODataEntry; import org.apache.olingo.odata2.api.ep.feed.ODataFeed; import org.apache.olingo.odata2.api.uri.ExpandSelectTreeNode; import org.apache.olingo.odata2.client.api.ep.DeserializerProperties; import org.apache.olingo.odata2.client.api.ep.EntityStream; import org.apache.olingo.odata2.testutil.mock.MockFacade; import org.junit.Test; /** * */ public class JsonEntryDeserializerTest extends AbstractDeserializerTest { private static final String SIMPLE_ENTRY_BUILDING = "JsonBuilding.json"; private static final String SIMPLE_ENTRY_ROOM = "JsonRoom.json"; private static final String SIMPLE_ENTRY_EMPLOYEE = "JsonEmployee.json"; private static final String SIMPLE_ENTRY_TEAM = "JsonTeam.json"; private static final String INVALID_ENTRY_TEAM_DOUBLE_NAME_PROPERTY = "JsonInvalidTeamDoubleNameProperty.json"; private static final String SIMPLE_ENTRY_BUILDING_WITHOUT_D = "JsonBuildingWithoutD.json"; // Negative Test jsonStart private static final String negativeJsonStart_1 = "{ \"abc\": {"; private static final String negativeJsonStart_2 = "{ \"d\": [a: 1, b: 2] }"; @Test public void readContentOnlyEmployee() throws Exception { // prepare String content = readFile("JsonEmployeeContentOnly.json"); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Employees"); InputStream contentBody = createContentAsStream(content); EntityStream contentStream = new EntityStream(); contentStream.setContent(contentBody); contentStream.setReadProperties(DeserializerProperties.init().build()); // execute JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, contentStream); // verify assertEquals(9, result.getProperties().size()); } @Test public void readContentOnlyRoom() throws Exception { // prepare String content = readFile("JsonRoomContentOnly.json"); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); InputStream contentBody = createContentAsStream(content); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DeserializerProperties.init().build()); // execute JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); // verify assertEquals(4, result.getProperties().size()); } @Test public void readContentOnlyEmployeeWithAdditionalLink() throws Exception { // prepare String content = readFile("JsonEmployeeContentOnlyWithAdditionalLink.json"); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Employees"); InputStream contentBody = createContentAsStream(content); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DeserializerProperties.init().build()); // execute JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); // verify assertEquals(9, result.getProperties().size()); List<String> associationUris = result.getMetadata().getAssociationUris("ne_Manager"); assertEquals(1, associationUris.size()); assertEquals("http://host:8080/ReferenceScenario.svc/Managers('1')", associationUris.get(0)); } @Test public void readContentOnlyRoomWithAdditionalLink() throws Exception { // prepare String content = readFile("JsonRoomContentOnlyWithAdditionalLink.json"); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); InputStream contentBody = createContentAsStream(content); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DeserializerProperties.init().build()); // execute JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); // verify assertEquals(4, result.getProperties().size()); List<String> associationUris = result.getMetadata().getAssociationUris("nr_Building"); assertEquals(1, associationUris.size()); assertEquals("http://host:8080/ReferenceScenario.svc/Buildings('1')", associationUris.get(0)); } @Test(expected = EntityProviderException.class) public void doubleClosingBracketsAtTheEnd() throws Exception { String invalidJson = "{ \"Id\" : \"1\", \"Seats\" : 1, \"Version\" : 1}}"; EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); InputStream contentBody = createContentAsStream(invalidJson); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DEFAULT_PROPERTIES); // execute JsonEntityDeserializer xec = new JsonEntityDeserializer(); xec.readEntry(entitySet, entityStream); } @Test public void readSimpleRoomEntry() throws Exception { ODataEntry roomEntry = prepareAndExecuteEntry(SIMPLE_ENTRY_ROOM, "Rooms", DEFAULT_PROPERTIES); // verify Map<String, Object> properties = roomEntry.getProperties(); assertEquals(4, properties.size()); assertEquals("1", properties.get("Id")); assertEquals("Room 1", properties.get("Name")); assertEquals((short) 1, properties.get("Seats")); assertEquals((short) 1, properties.get("Version")); List<String> associationUris = roomEntry.getMetadata().getAssociationUris("nr_Employees"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Rooms('1')/nr_Employees", associationUris.get(0)); associationUris = roomEntry.getMetadata().getAssociationUris("nr_Building"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Rooms('1')/nr_Building", associationUris.get(0)); EntryMetadata metadata = roomEntry.getMetadata(); assertEquals("W/\"1\"", metadata.getEtag()); } @SuppressWarnings("unchecked") @Test public void readSimpleEmployeeEntry() throws Exception { ODataEntry result = prepareAndExecuteEntry(SIMPLE_ENTRY_EMPLOYEE, "Employees", DEFAULT_PROPERTIES); // verify Map<String, Object> properties = result.getProperties(); assertEquals(9, properties.size()); assertEquals("1", properties.get("EmployeeId")); assertEquals("Walter Winter", properties.get("EmployeeName")); assertEquals("1", properties.get("ManagerId")); assertEquals("1", properties.get("RoomId")); assertEquals("1", properties.get("TeamId")); Map<String, Object> location = (Map<String, Object>) properties.get("Location"); assertEquals(2, location.size()); assertEquals("Germany", location.get("Country")); Map<String, Object> city = (Map<String, Object>) location.get("City"); assertEquals(2, city.size()); assertEquals("69124", city.get("PostalCode")); assertEquals("Heidelberg", city.get("CityName")); assertEquals(Integer.valueOf(52), properties.get("Age")); Calendar entryDate = (Calendar) properties.get("EntryDate"); assertEquals(915148800000L, entryDate.getTimeInMillis()); assertEquals(TimeZone.getTimeZone("GMT"), entryDate.getTimeZone()); assertEquals("Employees('1')/$value", properties.get("ImageUrl")); List<String> associationUris = result.getMetadata().getAssociationUris("ne_Manager"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Employees('1')/ne_Manager", associationUris.get(0)); associationUris = result.getMetadata().getAssociationUris("ne_Team"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Employees('1')/ne_Team", associationUris.get(0)); associationUris = result.getMetadata().getAssociationUris("ne_Room"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Employees('1')/ne_Room", associationUris.get(0)); MediaMetadata mediaMetadata = result.getMediaMetadata(); assertEquals("image/jpeg", mediaMetadata.getContentType()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Employees('1')/$value", mediaMetadata.getEditLink()); assertEquals("Employees('1')/$value", mediaMetadata.getSourceLink()); assertNull(mediaMetadata.getEtag()); } @Test public void readSimpleTeamEntry() throws Exception { ODataEntry result = prepareAndExecuteEntry(SIMPLE_ENTRY_TEAM, "Teams", DEFAULT_PROPERTIES); Map<String, Object> properties = result.getProperties(); assertNotNull(properties); assertEquals("1", properties.get("Id")); assertEquals("Team 1", properties.get("Name")); assertEquals(Boolean.FALSE, properties.get("isScrumTeam")); assertNull(properties.get("nt_Employees")); List<String> associationUris = result.getMetadata().getAssociationUris("nt_Employees"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Teams('1')/nt_Employees", associationUris.get(0)); checkMediaDataInitial(result.getMediaMetadata()); } @Test public void readSimpleBuildingEntry() throws Exception { ODataEntry result = prepareAndExecuteEntry(SIMPLE_ENTRY_BUILDING, "Buildings", DEFAULT_PROPERTIES); // verify Map<String, Object> properties = result.getProperties(); assertNotNull(properties); assertEquals("1", properties.get("Id")); assertEquals("Building 1", properties.get("Name")); assertNull(properties.get("Image")); assertNull(properties.get("nb_Rooms")); List<String> associationUris = result.getMetadata().getAssociationUris("nb_Rooms"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Buildings('1')/nb_Rooms", associationUris.get(0)); checkMediaDataInitial(result.getMediaMetadata()); } @Test public void readSimpleBuildingEntryWithoutD() throws Exception { ODataEntry result = prepareAndExecuteEntry(SIMPLE_ENTRY_BUILDING_WITHOUT_D, "Buildings", DEFAULT_PROPERTIES); // verify Map<String, Object> properties = result.getProperties(); assertNotNull(properties); assertEquals("1", properties.get("Id")); assertEquals("Building 1", properties.get("Name")); assertNull(properties.get("Image")); assertNull(properties.get("nb_Rooms")); List<String> associationUris = result.getMetadata().getAssociationUris("nb_Rooms"); assertEquals(1, associationUris.size()); assertEquals("http://localhost:8080/ReferenceScenario.svc/Buildings('1')/nb_Rooms", associationUris.get(0)); checkMediaDataInitial(result.getMediaMetadata()); } @Test public void readMinimalEntry() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Teams"); EntityStream entityStream = new EntityStream(); entityStream.setContent(createContentAsStream("{\"Id\":\"99\"}")); entityStream.setReadProperties(DEFAULT_PROPERTIES); final ODataEntry result = new JsonEntityDeserializer().readEntry(entitySet, entityStream); final Map<String, Object> properties = result.getProperties(); assertNotNull(properties); assertEquals(1, properties.size()); assertEquals("99", properties.get("Id")); assertTrue(result.getMetadata().getAssociationUris("nt_Employees").isEmpty()); checkMediaDataInitial(result.getMediaMetadata()); } @Test public void readEntryWithNullProperty() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); final String content = "{\"Id\":\"99\",\"Seats\":null}"; EntityStream entityStream = new EntityStream(); entityStream.setContent(createContentAsStream(content)); entityStream.setReadProperties(DeserializerProperties.init().build()); final ODataEntry result = new JsonEntityDeserializer().readEntry(entitySet, entityStream); final Map<String, Object> properties = result.getProperties(); assertNotNull(properties); assertEquals(2, properties.size()); assertEquals("99", properties.get("Id")); assertTrue(properties.containsKey("Seats")); assertNull(properties.get("Seats")); assertTrue(result.getMetadata().getAssociationUris("nr_Employees").isEmpty()); checkMediaDataInitial(result.getMediaMetadata()); } @Test public void readWithDoublePropertyOnTeam() throws Exception { // The file contains the name property two times try { prepareAndExecuteEntry(INVALID_ENTRY_TEAM_DOUBLE_NAME_PROPERTY, "Teams", DEFAULT_PROPERTIES); fail("Exception has to be thrown"); } catch (EntityProviderException e) { assertEquals(EntityProviderException.DOUBLE_PROPERTY.getKey(), e.getMessageReference().getKey()); } } @Test public void entryWithMetadataElementProperties() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Teams"); InputStream contentBody = createContentAsStream( "{\"__metadata\":{\"properties\":{\"nt_Employees\":{\"associationuri\":" + "\"http://some.host.com/service.root/Teams('1')/$links/nt_Employees\"}}}," + "\"Id\":\"1\"}"); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DEFAULT_PROPERTIES); ODataEntry result = new JsonEntityDeserializer().readEntry(entitySet, entityStream); checkMediaDataInitial(result.getMediaMetadata()); } private void checkMediaDataInitial(final MediaMetadata mediaMetadata) { assertNull(mediaMetadata.getContentType()); assertNull(mediaMetadata.getEditLink()); assertNull(mediaMetadata.getEtag()); assertNull(mediaMetadata.getSourceLink()); } @Test(expected = EntityProviderException.class) public void emptyEntry() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Teams"); EntityStream entityStream = new EntityStream(); entityStream.setContent(createContentAsStream("{}")); entityStream.setReadProperties(DEFAULT_PROPERTIES); new JsonEntityDeserializer().readEntry(entitySet, entityStream); } @Test(expected = EntityProviderException.class) public void wrongStart() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Teams"); InputStream contentBody = createContentAsStream(negativeJsonStart_1); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DEFAULT_PROPERTIES); new JsonEntityDeserializer().readEntry(entitySet, entityStream); } @Test(expected = EntityProviderException.class) public void wrongStart2() throws Exception { final EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Teams"); InputStream contentBody = createContentAsStream(negativeJsonStart_2); EntityStream entityStream = new EntityStream(); entityStream.setContent(contentBody); entityStream.setReadProperties(DEFAULT_PROPERTIES); new JsonEntityDeserializer().readEntry(entitySet, entityStream); } /** * Employee with inline entity Room with inline entity Buildings * Scenario of 1:1:1 navigation * E.g: Employees('1')?$expand=ne_Room/nr_Building * @throws Exception */ @Test public void employeesEntryWithEmployeeToRoomToBuilding() throws Exception { InputStream stream = getFileAsStream("JsonEmployeeInlineRoomBuilding.json"); assertNotNull(stream); EntityStream entityStream = new EntityStream(); entityStream.setContent(stream); entityStream.setReadProperties(DeserializerProperties.init() .build()); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Employees"); JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); assertNotNull(result); assertEquals(10, result.getProperties().size()); assertEquals(5, ((ODataEntry)result.getProperties().get("ne_Room")).getProperties().size()); assertEquals(3, ((ODataEntry)((ODataEntry)result.getProperties().get("ne_Room")).getProperties() .get("nr_Building")).getProperties().size()); } /** * Employee with inline entity Room with inline entity Buildings * Scenario of 1:1:1 navigation * E.g: Employees('1')?$expand=ne_Room/nr_Building * @throws Exception */ @Test public void employeesEntryWithEmployeeToRoomToBuildingWithTypeMappings() throws Exception { InputStream stream = getFileAsStream("JsonEmployeeInlineRoomBuilding.json"); assertNotNull(stream); EntityStream entityStream = new EntityStream(); entityStream.setContent(stream); Map<String, Object> typeMappings = new HashMap<String, Object>(); typeMappings.put("EntryDate", java.sql.Timestamp.class); typeMappings.put("Name", String.class); entityStream.setReadProperties(DeserializerProperties.init().addTypeMappings(typeMappings) .build()); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Employees"); JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); assertNotNull(result); assertEquals(10, result.getProperties().size()); assertEquals(5, ((ODataEntry)result.getProperties().get("ne_Room")).getProperties().size()); assertEquals(3, ((ODataEntry)((ODataEntry)result.getProperties().get("ne_Room")).getProperties() .get("nr_Building")).getProperties().size()); } /** * Room has inline entity to Employees and has inline entry To Team * Scenario of 1:n:1 navigation * E.g: Rooms('1')?$expand=nr_Employees/ne_Team * @throws Exception */ @Test public void RoomEntryWithInlineEmployeeInlineTeam() throws Exception { InputStream stream = getFileAsStream("JsonRoom_InlineEmployeesToTeam.json"); assertNotNull(stream); EntityStream entityStream = new EntityStream(); entityStream.setContent(stream); entityStream.setReadProperties(DeserializerProperties.init() .build()); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); assertNotNull(result); assertEquals(5, result.getProperties().size()); for (ODataEntry employeeEntry : ((ODataFeed)result.getProperties().get("nr_Employees")).getEntries()) { assertEquals(10, employeeEntry.getProperties().size()); assertEquals(3, ((ODataEntry)employeeEntry.getProperties().get("ne_Team")).getProperties().size()); } } /** * Room has empty inline entity to Employees and has inline entry To Team * E.g: Rooms('10')?$expand=nr_Employees/ne_Team * @throws Exception */ @Test public void RoomEntryWithEmptyInlineEmployeeInlineTeam() throws Exception { InputStream stream = getFileAsStream("JsonRoom_EmptyInlineEmployeesToTeam.json"); assertNotNull(stream); EntityStream entityStream = new EntityStream(); entityStream.setContent(stream); entityStream.setReadProperties(DeserializerProperties.init() .build()); EdmEntitySet entitySet = MockFacade.getMockEdm().getDefaultEntityContainer().getEntitySet("Rooms"); JsonEntityDeserializer xec = new JsonEntityDeserializer(); ODataEntry result = xec.readEntry(entitySet, entityStream); assertNotNull(result); assertEquals(5, result.getProperties().size()); assertEquals(0, ((ODataFeed)result.getProperties().get("nr_Employees")).getEntries().size()); } /** * @param inlineEntries * @param feed * @param entry */ private void getExpandedData(Map<String, Object> inlineEntries, ODataEntry entry) { assertNotNull(entry); Map<String, ExpandSelectTreeNode> expandNodes = entry.getExpandSelectTree().getLinks(); for (Entry<String, ExpandSelectTreeNode> expand : expandNodes.entrySet()) { assertNotNull(expand.getKey()); if (inlineEntries.containsKey(expand.getKey() + entry.getMetadata().getId())) { if (inlineEntries.get(expand.getKey() + entry.getMetadata().getId()) instanceof ODataFeed) { ODataFeed innerFeed = (ODataFeed) inlineEntries.get(expand.getKey() + entry.getMetadata().getId()); assertNotNull(innerFeed); getExpandedData(inlineEntries, innerFeed); entry.getProperties().put(expand.getKey(), innerFeed); } else if (inlineEntries.get(expand.getKey() + entry.getMetadata().getId()) instanceof ODataEntry) { ODataEntry innerEntry = (ODataEntry) inlineEntries.get(expand.getKey() + entry.getMetadata().getId()); assertNotNull(innerEntry); getExpandedData(inlineEntries, innerEntry); entry.getProperties().put(expand.getKey(), innerEntry); } } } } /** * @param inlineEntries * @param feed * @param entry */ private void getExpandedData(Map<String, Object> inlineEntries, ODataFeed feed) { assertNotNull(feed.getEntries()); List<ODataEntry> entries = feed.getEntries(); for (ODataEntry entry : entries) { Map<String, ExpandSelectTreeNode> expandNodes = entry.getExpandSelectTree().getLinks(); for (Entry<String, ExpandSelectTreeNode> expand : expandNodes.entrySet()) { assertNotNull(expand.getKey()); if (inlineEntries.containsKey(expand.getKey() + entry.getMetadata().getId())) { if (inlineEntries.get(expand.getKey() + entry.getMetadata().getId()) instanceof ODataFeed) { ODataFeed innerFeed = (ODataFeed) inlineEntries.get(expand.getKey() + entry.getMetadata().getId()); assertNotNull(innerFeed); getExpandedData(inlineEntries, innerFeed); feed.getEntries().get(feed.getEntries().indexOf(entry)).getProperties().put(expand.getKey(), innerFeed); } else if (inlineEntries.get(expand.getKey() + entry.getMetadata().getId()) instanceof ODataEntry) { ODataEntry innerEntry = (ODataEntry) inlineEntries.get(expand.getKey() + entry.getMetadata().getId()); assertNotNull(innerEntry); getExpandedData(inlineEntries, innerEntry); feed.getEntries().get(feed.getEntries().indexOf(entry)).getProperties().put(expand.getKey(), innerEntry); } } } } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.rollup.job; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupIndexerJobStats; import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.mockito.stubbing.Answer; import java.io.IOException; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; import static org.elasticsearch.xpack.rollup.job.RollupIndexer.createAggregationBuilders; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class IndexerUtilsTests extends AggregatorTestCase { public void testMissingFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = ZonedDateTime.now(ZoneOffset.UTC).minusDays(i).toInstant().toEpochMilli(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig.CalendarInterval( timestampField, DateHistogramInterval.DAY ); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig) ); MetricConfig metricConfig = new MetricConfig("does_not_exist", singletonList("max")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map<String, Object> map = doc.sourceAsMap(); assertNull(map.get("does_not_exist")); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } public void testCorrectFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = ZonedDateTime.now(ZoneOffset.UTC).minusDays(i).toInstant().toEpochMilli(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg // TODO swap this over to DateHistoConfig.Builder once DateInterval is in DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( "the_histo." + DateHistogramAggregationBuilder.NAME ).field(timestampField).fixedInterval(new DateHistogramInterval("1ms")); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, singletonList(dateHisto) ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map<String, Object> map = doc.sourceAsMap(); assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField(valueField, i)); document.add(new LongPoint(valueField, i)); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("the_terms." + TermsAggregationBuilder.NAME).field(valueField); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, singletonList(terms) ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map<String, Object> map = doc.sourceAsMap(); assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_terms." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } public void testEmptyCounts() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "ts"; String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { Document document = new Document(); long timestamp = ZonedDateTime.now(ZoneOffset.UTC).minusDays(i).toInstant().toEpochMilli(); document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( "the_histo." + DateHistogramAggregationBuilder.NAME ).field(timestampField).calendarInterval(new DateHistogramInterval("1d")); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, singletonList(dateHisto) ); MetricConfig metricConfig = new MetricConfig("another_field", Arrays.asList("avg", "sum")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { Map<String, Object> map = doc.sourceAsMap(); assertNull(map.get("another_field." + AvgAggregationBuilder.NAME + "." + RollupField.VALUE)); assertNotNull(map.get("another_field." + SumAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } public void testKeyOrdering() { CompositeAggregation composite = mock(CompositeAggregation.class); when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> { List<CompositeAggregation.Bucket> foos = new ArrayList<>(); CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3); keys.put("foo.date_histogram", 123L); keys.put("bar.terms", "baz"); keys.put("abc.histogram", 1.9); keys = shuffleMap(keys, Collections.emptySet()); when(bucket.getKey()).thenReturn(keys); List<Aggregation> list = new ArrayList<>(3); InternalNumericMetricsAggregation.SingleValue mockAgg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg.getName()).thenReturn("123"); list.add(mockAgg); InternalNumericMetricsAggregation.SingleValue mockAgg2 = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg2.getName()).thenReturn("abc"); list.add(mockAgg2); InternalNumericMetricsAggregation.SingleValue mockAgg3 = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg3.getName()).thenReturn("yay"); list.add(mockAgg3); Collections.shuffle(list, random()); Aggregations aggs = new Aggregations(list); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); foos.add(bucket); return foos; }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1L, "abc"), null); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo") .collect(Collectors.toList()); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$c9LcrFqeFW92uN_Z7sv1hA")); } /* A test to make sure very long keys don't break the hash */ public void testKeyOrderingLong() { CompositeAggregation composite = mock(CompositeAggregation.class); when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> { List<CompositeAggregation.Bucket> foos = new ArrayList<>(); CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3); keys.put("foo.date_histogram", 123L); char[] charArray = new char[IndexWriter.MAX_TERM_LENGTH]; Arrays.fill(charArray, 'a'); keys.put("bar.terms", new String(charArray)); keys.put("abc.histogram", 1.9); keys = shuffleMap(keys, Collections.emptySet()); when(bucket.getKey()).thenReturn(keys); List<Aggregation> list = new ArrayList<>(3); InternalNumericMetricsAggregation.SingleValue mockAgg = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg.getName()).thenReturn("123"); list.add(mockAgg); InternalNumericMetricsAggregation.SingleValue mockAgg2 = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg2.getName()).thenReturn("abc"); list.add(mockAgg2); InternalNumericMetricsAggregation.SingleValue mockAgg3 = mock(InternalNumericMetricsAggregation.SingleValue.class); when(mockAgg3.getName()).thenReturn("yay"); list.add(mockAgg3); Collections.shuffle(list, random()); Aggregations aggs = new Aggregations(list); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); foos.add(bucket); return foos; }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1, "abc"), null); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo") .collect(Collectors.toList()); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$VAFKZpyaEqYRPLyic57_qw")); } public void testNullKeys() { CompositeAggregation composite = mock(CompositeAggregation.class); when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> { List<CompositeAggregation.Bucket> foos = new ArrayList<>(); CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3); keys.put("bar.terms", null); keys.put("abc.histogram", null); when(bucket.getKey()).thenReturn(keys); Aggregations aggs = new Aggregations(Collections.emptyList()); when(bucket.getAggregations()).thenReturn(aggs); when(bucket.getDocCount()).thenReturn(1L); foos.add(bucket); return foos; }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), randomHistogramGroupConfig(random()), null); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo") .collect(Collectors.toList()); assertThat(docs.size(), equalTo(1)); assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); } public void testMissingBuckets() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String metricField = "metric_field"; String valueField = "value_field"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); // Every other doc omit the valueField, so that we get some null buckets if (i % 2 == 0) { document.add(new SortedNumericDocValuesField(valueField, i)); document.add(new LongPoint(valueField, i)); } document.add(new SortedNumericDocValuesField(metricField, i)); document.add(new LongPoint(metricField, i)); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); MappedFieldType metricFieldType = new NumberFieldMapper.NumberFieldType(metricField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(termsGroupConfig) ).size(numDocs * 2); MetricConfig metricConfig = new MetricConfig(metricField, singletonList("max")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(6)); for (IndexRequest doc : docs) { Map<String, Object> map = doc.sourceAsMap(); Object value = map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.VALUE); if (value == null) { assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(5)); } else { assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); } } } public void testTimezone() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats = new RollupIndexerJobStats(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); { Document document = new Document(); long timestamp = 1443659400000L; // 2015-10-01T00:30:00Z document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } { Document document = new Document(); long timestamp = 1443663000000L; // 2015-10-01T01:30:00Z document.add(new SortedNumericDocValuesField(timestampField, timestamp)); document.add(new LongPoint(timestampField, timestamp)); document.add(new SortedNumericDocValuesField(valueField, randomIntBetween(1, 100))); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); // Setup the composite agg DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( "the_histo." + DateHistogramAggregationBuilder.NAME ).field(timestampField) .calendarInterval(new DateHistogramInterval("1d")) // adds a timezone so that we aren't on default UTC .timeZone(ZoneId.of("-01:00", ZoneId.SHORT_IDS)); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder( RollupIndexer.AGGREGATION_NAME, singletonList(dateHisto) ); MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max")); List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig)); metricAgg.forEach(compositeBuilder::subAggregation); Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); CompositeAggregation composite = (CompositeAggregation) aggregator.buildTopLevel(); indexReader.close(); directory.close(); final GroupConfig groupConfig = randomGroupConfig(random()); List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo").collect(Collectors.toList()); assertThat(docs.size(), equalTo(2)); Map<String, Object> map = docs.get(0).sourceAsMap(); assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); // 2015-09-30T00:00:00.000-01:00 assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), equalTo(1443574800000L)); map = docs.get(1).sourceAsMap(); assertNotNull(map.get(valueField + "." + MaxAggregationBuilder.NAME + "." + RollupField.VALUE)); assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); // 2015-10-01T00:00:00.000-01:00 assertThat(map.get("the_histo." + DateHistogramAggregationBuilder.NAME + "." + RollupField.TIMESTAMP), equalTo(1443661200000L)); } interface Mock { List<? extends CompositeAggregation.Bucket> getBuckets(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec.binary; import org.apache.commons.codec.BinaryDecoder; import org.apache.commons.codec.BinaryEncoder; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.EncoderException; /** * Converts between byte arrays and strings of "0"s and "1"s. * * <p>This class is immutable and thread-safe.</p> * * TODO: may want to add more bit vector functions like and/or/xor/nand * TODO: also might be good to generate boolean[] from byte[] et cetera. * * @since 1.3 */ public class BinaryCodec implements BinaryDecoder, BinaryEncoder { /* * tried to avoid using ArrayUtils to minimize dependencies while using these empty arrays - dep is just not worth * it. */ /** Empty char array. */ private static final char[] EMPTY_CHAR_ARRAY = {}; /** Empty byte array. */ private static final byte[] EMPTY_BYTE_ARRAY = {}; /** Mask for bit 0 of a byte. */ private static final int BIT_0 = 1; /** Mask for bit 1 of a byte. */ private static final int BIT_1 = 0x02; /** Mask for bit 2 of a byte. */ private static final int BIT_2 = 0x04; /** Mask for bit 3 of a byte. */ private static final int BIT_3 = 0x08; /** Mask for bit 4 of a byte. */ private static final int BIT_4 = 0x10; /** Mask for bit 5 of a byte. */ private static final int BIT_5 = 0x20; /** Mask for bit 6 of a byte. */ private static final int BIT_6 = 0x40; /** Mask for bit 7 of a byte. */ private static final int BIT_7 = 0x80; private static final int[] BITS = {BIT_0, BIT_1, BIT_2, BIT_3, BIT_4, BIT_5, BIT_6, BIT_7}; /** * Decodes a byte array where each byte represents an ASCII '0' or '1'. * * @param ascii * each byte represents an ASCII '0' or '1' * @return the raw encoded binary where each bit corresponds to a byte in the byte array argument */ public static byte[] fromAscii(final byte[] ascii) { if (isEmpty(ascii)) { return EMPTY_BYTE_ARRAY; } final int asciiLength = ascii.length; // get length/8 times bytes with 3 bit shifts to the right of the length final byte[] l_raw = new byte[asciiLength >> 3]; /* * We decr index jj by 8 as we go along to not recompute indices using multiplication every time inside the * loop. */ for (int ii = 0, jj = asciiLength - 1; ii < l_raw.length; ii++, jj -= 8) { for (int bits = 0; bits < BITS.length; ++bits) { if (ascii[jj - bits] == '1') { l_raw[ii] |= BITS[bits]; } } } return l_raw; } // ------------------------------------------------------------------------ // // static codec operations // // ------------------------------------------------------------------------ /** * Decodes a char array where each char represents an ASCII '0' or '1'. * * @param ascii * each char represents an ASCII '0' or '1' * @return the raw encoded binary where each bit corresponds to a char in the char array argument */ public static byte[] fromAscii(final char[] ascii) { if (ascii == null || ascii.length == 0) { return EMPTY_BYTE_ARRAY; } final int asciiLength = ascii.length; // get length/8 times bytes with 3 bit shifts to the right of the length final byte[] l_raw = new byte[asciiLength >> 3]; /* * We decr index jj by 8 as we go along to not recompute indices using multiplication every time inside the * loop. */ for (int ii = 0, jj = asciiLength - 1; ii < l_raw.length; ii++, jj -= 8) { for (int bits = 0; bits < BITS.length; ++bits) { if (ascii[jj - bits] == '1') { l_raw[ii] |= BITS[bits]; } } } return l_raw; } /** * Returns {@code true} if the given array is {@code null} or empty (size 0.) * * @param array * the source array * @return {@code true} if the given array is {@code null} or empty (size 0.) * * @since 1.16 change visibility to public */ static boolean isEmpty(final byte[] array) { return array == null || array.length == 0; } /** * Converts an array of raw binary data into an array of ASCII 0 and 1 character bytes - each byte is a truncated * char. * * @param raw * the raw binary data to convert * @return an array of 0 and 1 character bytes for each bit of the argument * @see org.apache.commons.codec.BinaryEncoder#encode(byte[]) */ public static byte[] toAsciiBytes(final byte[] raw) { if (isEmpty(raw)) { return EMPTY_BYTE_ARRAY; } final int rawLength = raw.length; // get 8 times the bytes with 3 bit shifts to the left of the length final byte[] l_ascii = new byte[rawLength << 3]; /* * We decr index jj by 8 as we go along to not recompute indices using multiplication every time inside the * loop. */ for (int ii = 0, jj = l_ascii.length - 1; ii < rawLength; ii++, jj -= 8) { for (int bits = 0; bits < BITS.length; ++bits) { if ((raw[ii] & BITS[bits]) == 0) { l_ascii[jj - bits] = '0'; } else { l_ascii[jj - bits] = '1'; } } } return l_ascii; } /** * Converts an array of raw binary data into an array of ASCII 0 and 1 characters. * * @param raw * the raw binary data to convert * @return an array of 0 and 1 characters for each bit of the argument * @see org.apache.commons.codec.BinaryEncoder#encode(byte[]) */ public static char[] toAsciiChars(final byte[] raw) { if (isEmpty(raw)) { return EMPTY_CHAR_ARRAY; } final int rawLength = raw.length; // get 8 times the bytes with 3 bit shifts to the left of the length final char[] l_ascii = new char[rawLength << 3]; /* * We decr index jj by 8 as we go along to not recompute indices using multiplication every time inside the * loop. */ for (int ii = 0, jj = l_ascii.length - 1; ii < rawLength; ii++, jj -= 8) { for (int bits = 0; bits < BITS.length; ++bits) { if ((raw[ii] & BITS[bits]) == 0) { l_ascii[jj - bits] = '0'; } else { l_ascii[jj - bits] = '1'; } } } return l_ascii; } /** * Converts an array of raw binary data into a String of ASCII 0 and 1 characters. * * @param raw * the raw binary data to convert * @return a String of 0 and 1 characters representing the binary data * @see org.apache.commons.codec.BinaryEncoder#encode(byte[]) */ public static String toAsciiString(final byte[] raw) { return new String(toAsciiChars(raw)); } /** * Decodes a byte array where each byte represents an ASCII '0' or '1'. * * @param ascii * each byte represents an ASCII '0' or '1' * @return the raw encoded binary where each bit corresponds to a byte in the byte array argument * @see org.apache.commons.codec.Decoder#decode(Object) */ @Override public byte[] decode(final byte[] ascii) { return fromAscii(ascii); } /** * Decodes a byte array where each byte represents an ASCII '0' or '1'. * * @param ascii * each byte represents an ASCII '0' or '1' * @return the raw encoded binary where each bit corresponds to a byte in the byte array argument * @throws DecoderException * if argument is not a byte[], char[] or String * @see org.apache.commons.codec.Decoder#decode(Object) */ @Override public Object decode(final Object ascii) throws DecoderException { if (ascii == null) { return EMPTY_BYTE_ARRAY; } if (ascii instanceof byte[]) { return fromAscii((byte[]) ascii); } if (ascii instanceof char[]) { return fromAscii((char[]) ascii); } if (ascii instanceof String) { return fromAscii(((String) ascii).toCharArray()); } throw new DecoderException("argument not a byte array"); } /** * Converts an array of raw binary data into an array of ASCII 0 and 1 characters. * * @param raw * the raw binary data to convert * @return 0 and 1 ASCII character bytes one for each bit of the argument * @see org.apache.commons.codec.BinaryEncoder#encode(byte[]) */ @Override public byte[] encode(final byte[] raw) { return toAsciiBytes(raw); } /** * Converts an array of raw binary data into an array of ASCII 0 and 1 chars. * * @param raw * the raw binary data to convert * @return 0 and 1 ASCII character chars one for each bit of the argument * @throws EncoderException * if the argument is not a byte[] * @see org.apache.commons.codec.Encoder#encode(Object) */ @Override public Object encode(final Object raw) throws EncoderException { if (!(raw instanceof byte[])) { throw new EncoderException("argument not a byte array"); } return toAsciiChars((byte[]) raw); } /** * Decodes a String where each char of the String represents an ASCII '0' or '1'. * * @param ascii * String of '0' and '1' characters * @return the raw encoded binary where each bit corresponds to a byte in the byte array argument * @see org.apache.commons.codec.Decoder#decode(Object) */ public byte[] toByteArray(final String ascii) { if (ascii == null) { return EMPTY_BYTE_ARRAY; } return fromAscii(ascii.toCharArray()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.PagesIndexPageSorter; import com.facebook.presto.block.BlockEncodingManager; import com.facebook.presto.client.QueryResults; import com.facebook.presto.connector.ConnectorManager; import com.facebook.presto.connector.informationSchema.InformationSchemaModule; import com.facebook.presto.connector.jmx.JmxConnectorFactory; import com.facebook.presto.connector.system.SystemTablesModule; import com.facebook.presto.event.query.QueryCompletionEvent; import com.facebook.presto.event.query.QueryCreatedEvent; import com.facebook.presto.event.query.QueryMonitor; import com.facebook.presto.event.query.SplitCompletionEvent; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.QueryInfo; import com.facebook.presto.execution.RemoteTaskFactory; import com.facebook.presto.execution.SqlTaskManager; import com.facebook.presto.execution.TaskExecutor; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.execution.TaskManager; import com.facebook.presto.execution.TaskManagerConfig; import com.facebook.presto.failureDetector.FailureDetector; import com.facebook.presto.failureDetector.FailureDetectorModule; import com.facebook.presto.index.IndexManager; import com.facebook.presto.metadata.CatalogManager; import com.facebook.presto.metadata.CatalogManagerConfig; import com.facebook.presto.metadata.HandleJsonModule; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.NodeVersion; import com.facebook.presto.operator.ExchangeClient; import com.facebook.presto.operator.ExchangeClientConfig; import com.facebook.presto.operator.ExchangeClientFactory; import com.facebook.presto.operator.ForExchange; import com.facebook.presto.operator.ForScheduler; import com.facebook.presto.operator.index.IndexJoinLookupStats; import com.facebook.presto.spi.ConnectorFactory; import com.facebook.presto.spi.ConnectorPageSinkProvider; import com.facebook.presto.spi.ConnectorPageSourceProvider; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.PageSorter; import com.facebook.presto.spi.block.BlockEncodingFactory; import com.facebook.presto.spi.block.BlockEncodingSerde; import com.facebook.presto.spi.block.FixedWidthBlockEncoding; import com.facebook.presto.spi.block.LazySliceArrayBlockEncoding; import com.facebook.presto.spi.block.SliceArrayBlockEncoding; import com.facebook.presto.spi.block.VariableWidthBlockEncoding; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.split.PageSinkManager; import com.facebook.presto.split.PageSinkProvider; import com.facebook.presto.split.PageSourceManager; import com.facebook.presto.split.PageSourceProvider; import com.facebook.presto.sql.Serialization.ExpressionDeserializer; import com.facebook.presto.sql.Serialization.ExpressionSerializer; import com.facebook.presto.sql.Serialization.FunctionCallDeserializer; import com.facebook.presto.sql.gen.ExpressionCompiler; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.parser.SqlParserOptions; import com.facebook.presto.sql.planner.CompilerConfig; import com.facebook.presto.sql.planner.LocalExecutionPlanner; import com.facebook.presto.sql.planner.PlanOptimizersFactory; import com.facebook.presto.sql.planner.optimizations.PlanOptimizer; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.type.TypeDeserializer; import com.facebook.presto.type.TypeRegistry; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableSet; import com.google.inject.Binder; import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import com.google.inject.multibindings.Multibinder; import io.airlift.configuration.AbstractConfigurationAwareModule; import io.airlift.discovery.client.ServiceDescriptor; import io.airlift.slice.Slice; import javax.inject.Singleton; import java.util.List; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.nullToEmpty; import static com.google.inject.multibindings.MapBinder.newMapBinder; import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.configuration.ConfigurationModule.bindConfig; import static io.airlift.discovery.client.DiscoveryBinder.discoveryBinder; import static io.airlift.event.client.EventBinder.eventBinder; import static io.airlift.http.client.HttpClientBinder.httpClientBinder; import static io.airlift.jaxrs.JaxrsBinder.jaxrsBinder; import static io.airlift.json.JsonBinder.jsonBinder; import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.weakref.jmx.guice.ExportBinder.newExporter; public class ServerMainModule extends AbstractConfigurationAwareModule { private final SqlParserOptions sqlParserOptions; public ServerMainModule(SqlParserOptions sqlParserOptions) { this.sqlParserOptions = checkNotNull(sqlParserOptions, "sqlParserOptions is null"); } @Override protected void setup(Binder binder) { ServerConfig serverConfig = buildConfigObject(ServerConfig.class); // TODO: this should only be installed if this is a coordinator binder.install(new CoordinatorModule()); if (serverConfig.isCoordinator()) { discoveryBinder(binder).bindHttpAnnouncement("presto-coordinator"); } binder.bind(SqlParser.class).in(Scopes.SINGLETON); binder.bind(SqlParserOptions.class).toInstance(sqlParserOptions); bindFailureDetector(binder, serverConfig.isCoordinator()); jaxrsBinder(binder).bind(ThrowableMapper.class); // task execution jaxrsBinder(binder).bind(TaskResource.class); binder.bind(TaskManager.class).to(SqlTaskManager.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskManager.class).withGeneratedName(); binder.bind(TaskExecutor.class).in(Scopes.SINGLETON); newExporter(binder).export(TaskExecutor.class).withGeneratedName(); binder.bind(LocalExecutionPlanner.class).in(Scopes.SINGLETON); bindConfig(binder).to(CompilerConfig.class); binder.bind(ExpressionCompiler.class).in(Scopes.SINGLETON); newExporter(binder).export(ExpressionCompiler.class).withGeneratedName(); bindConfig(binder).to(TaskManagerConfig.class); binder.bind(IndexJoinLookupStats.class).in(Scopes.SINGLETON); newExporter(binder).export(IndexJoinLookupStats.class).withGeneratedName(); binder.bind(AsyncHttpExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(AsyncHttpExecutionMBean.class).withGeneratedName(); jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class); jaxrsBinder(binder).bind(PagesResponseWriter.class); // exchange client binder.bind(new TypeLiteral<Supplier<ExchangeClient>>() {}).to(ExchangeClientFactory.class).in(Scopes.SINGLETON); httpClientBinder(binder).bindHttpClient("exchange", ForExchange.class).withTracing(); bindConfig(binder).to(ExchangeClientConfig.class); binder.bind(ExchangeExecutionMBean.class).in(Scopes.SINGLETON); newExporter(binder).export(ExchangeExecutionMBean.class).withGeneratedName(); // execution binder.bind(LocationFactory.class).to(HttpLocationFactory.class).in(Scopes.SINGLETON); binder.bind(RemoteTaskFactory.class).to(HttpRemoteTaskFactory.class).in(Scopes.SINGLETON); newExporter(binder).export(RemoteTaskFactory.class).withGeneratedName(); httpClientBinder(binder).bindHttpClient("scheduler", ForScheduler.class).withTracing(); // data stream provider binder.bind(PageSourceManager.class).in(Scopes.SINGLETON); binder.bind(PageSourceProvider.class).to(PageSourceManager.class).in(Scopes.SINGLETON); newSetBinder(binder, ConnectorPageSourceProvider.class); // page sink provider binder.bind(PageSinkManager.class).in(Scopes.SINGLETON); binder.bind(PageSinkProvider.class).to(PageSinkManager.class).in(Scopes.SINGLETON); newSetBinder(binder, ConnectorPageSinkProvider.class); // metadata binder.bind(CatalogManager.class).in(Scopes.SINGLETON); bindConfig(binder).to(CatalogManagerConfig.class); binder.bind(MetadataManager.class).in(Scopes.SINGLETON); binder.bind(Metadata.class).to(MetadataManager.class).in(Scopes.SINGLETON); // type binder.bind(TypeRegistry.class).in(Scopes.SINGLETON); binder.bind(TypeManager.class).to(TypeRegistry.class).in(Scopes.SINGLETON); jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); newSetBinder(binder, Type.class); // index manager binder.bind(IndexManager.class).in(Scopes.SINGLETON); // handle resolver binder.install(new HandleJsonModule()); // connector binder.bind(ConnectorManager.class).in(Scopes.SINGLETON); MapBinder<String, ConnectorFactory> connectorFactoryBinder = newMapBinder(binder, String.class, ConnectorFactory.class); // jmx connector connectorFactoryBinder.addBinding("jmx").to(JmxConnectorFactory.class); // information schema binder.install(new InformationSchemaModule()); // system tables binder.install(new SystemTablesModule()); // splits jsonCodecBinder(binder).bindJsonCodec(TaskUpdateRequest.class); jsonCodecBinder(binder).bindJsonCodec(ConnectorSplit.class); jsonBinder(binder).addSerializerBinding(Slice.class).to(SliceSerializer.class); jsonBinder(binder).addDeserializerBinding(Slice.class).to(SliceDeserializer.class); jsonBinder(binder).addSerializerBinding(Expression.class).to(ExpressionSerializer.class); jsonBinder(binder).addDeserializerBinding(Expression.class).to(ExpressionDeserializer.class); jsonBinder(binder).addDeserializerBinding(FunctionCall.class).to(FunctionCallDeserializer.class); // query monitor binder.bind(QueryMonitor.class).in(Scopes.SINGLETON); eventBinder(binder).bindEventClient(QueryCreatedEvent.class); eventBinder(binder).bindEventClient(QueryCompletionEvent.class); eventBinder(binder).bindEventClient(SplitCompletionEvent.class); // Determine the NodeVersion String prestoVersion = serverConfig.getPrestoVersion(); if (prestoVersion == null) { prestoVersion = detectPrestoVersion(); } checkState(prestoVersion != null, "presto.version must be provided when it cannot be automatically determined"); NodeVersion nodeVersion = new NodeVersion(prestoVersion); binder.bind(NodeVersion.class).toInstance(nodeVersion); // presto announcement discoveryBinder(binder).bindHttpAnnouncement("presto") .addProperty("node_version", nodeVersion.toString()) .addProperty("coordinator", String.valueOf(serverConfig.isCoordinator())) .addProperty("datasources", nullToEmpty(serverConfig.getDataSources())); // statement resource jsonCodecBinder(binder).bindJsonCodec(QueryInfo.class); jsonCodecBinder(binder).bindJsonCodec(TaskInfo.class); jsonCodecBinder(binder).bindJsonCodec(QueryResults.class); jaxrsBinder(binder).bind(StatementResource.class); // execute resource jaxrsBinder(binder).bind(ExecuteResource.class); httpClientBinder(binder).bindHttpClient("execute", ForExecute.class); // plugin manager binder.bind(PluginManager.class).in(Scopes.SINGLETON); bindConfig(binder).to(PluginManagerConfig.class); // optimizers binder.bind(new TypeLiteral<List<PlanOptimizer>>() {}).toProvider(PlanOptimizersFactory.class).in(Scopes.SINGLETON); // block encodings binder.bind(BlockEncodingManager.class).in(Scopes.SINGLETON); binder.bind(BlockEncodingSerde.class).to(BlockEncodingManager.class).in(Scopes.SINGLETON); Multibinder<BlockEncodingFactory<?>> blockEncodingFactoryBinder = newSetBinder(binder, new TypeLiteral<BlockEncodingFactory<?>>() {}); blockEncodingFactoryBinder.addBinding().toInstance(VariableWidthBlockEncoding.FACTORY); blockEncodingFactoryBinder.addBinding().toInstance(FixedWidthBlockEncoding.FACTORY); blockEncodingFactoryBinder.addBinding().toInstance(SliceArrayBlockEncoding.FACTORY); blockEncodingFactoryBinder.addBinding().toInstance(LazySliceArrayBlockEncoding.FACTORY); // thread visualizer jaxrsBinder(binder).bind(ThreadResource.class); // thread execution visualizer jaxrsBinder(binder).bind(QueryExecutionResource.class); // PageSorter binder.bind(PageSorter.class).to(PagesIndexPageSorter.class).in(Scopes.SINGLETON); } @Provides @Singleton @ForExchange public ScheduledExecutorService createExchangeExecutor(ExchangeClientConfig config) { return newScheduledThreadPool(config.getClientThreads(), daemonThreadsNamed("exchange-client-%s")); } @Provides @Singleton @ForAsyncHttpResponse public static ScheduledExecutorService createAsyncHttpResponseExecutor(TaskManagerConfig config) { return newScheduledThreadPool(config.getHttpNotificationThreads(), daemonThreadsNamed("async-http-response-%s")); } private static String detectPrestoVersion() { String title = PrestoServer.class.getPackage().getImplementationTitle(); String version = PrestoServer.class.getPackage().getImplementationVersion(); return ((title == null) || (version == null)) ? null : (title + ":" + version); } private static void bindFailureDetector(Binder binder, boolean coordinator) { // TODO: this is a hack until the coordinator module works correctly if (coordinator) { binder.install(new FailureDetectorModule()); jaxrsBinder(binder).bind(NodeResource.class); } else { binder.bind(FailureDetector.class).toInstance(new FailureDetector() { @Override public Set<ServiceDescriptor> getFailed() { return ImmutableSet.of(); } }); } } }
package cz.metacentrum.perun.core.impl; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import javax.sql.DataSource; import cz.metacentrum.perun.core.api.exceptions.GroupRelationDoesNotExist; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.DuplicateKeyException; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcPerunTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import cz.metacentrum.perun.core.api.BeansUtils; import cz.metacentrum.perun.core.api.Attribute; import cz.metacentrum.perun.core.api.ExtSource; import cz.metacentrum.perun.core.api.Facility; import cz.metacentrum.perun.core.api.Group; import cz.metacentrum.perun.core.api.GroupsManager; import cz.metacentrum.perun.core.api.Member; import cz.metacentrum.perun.core.api.MembershipType; import cz.metacentrum.perun.core.api.Pair; import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.Resource; import cz.metacentrum.perun.core.api.Status; import cz.metacentrum.perun.core.api.User; import cz.metacentrum.perun.core.api.Vo; import cz.metacentrum.perun.core.api.exceptions.AlreadyMemberException; import cz.metacentrum.perun.core.api.exceptions.ConsistencyErrorException; import cz.metacentrum.perun.core.api.exceptions.GroupAlreadyRemovedException; import cz.metacentrum.perun.core.api.exceptions.GroupExistsException; import cz.metacentrum.perun.core.api.exceptions.GroupNotExistsException; import cz.metacentrum.perun.core.api.exceptions.InternalErrorException; import cz.metacentrum.perun.core.api.exceptions.NotGroupMemberException; import cz.metacentrum.perun.core.api.exceptions.ParentGroupNotExistsException; import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException; import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException; import cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl; import cz.metacentrum.perun.core.implApi.GroupsManagerImplApi; import java.util.HashSet; import java.util.Set; /** * Implementation of GroupsManager * * @author Michal Prochazka michalp@ics.muni.cz * @author Slavek Licehammer glory@ics.muni.cz */ public class GroupsManagerImpl implements GroupsManagerImplApi { private final static Logger log = LoggerFactory.getLogger(GroupsManagerImpl.class); public final static int MEMBERSGROUP = 1; public final static int ADMINSGROUP = 2; public final static int SUBGROUP = 3; protected final static String groupMappingSelectQuery = "groups.id as groups_id, groups.parent_group_id as groups_parent_group_id, groups.name as groups_name, groups.dsc as groups_dsc, " + "groups.vo_id as groups_vo_id, groups.created_at as groups_created_at, groups.created_by as groups_created_by, groups.modified_by as groups_modified_by, groups.modified_at as groups_modified_at, " + "groups.modified_by_uid as groups_modified_by_uid, groups.created_by_uid as groups_created_by_uid "; // http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/jdbc.html private JdbcPerunTemplate jdbc; private NamedParameterJdbcTemplate namedParameterJdbcTemplate; // Group mapper protected static final RowMapper<Group> GROUP_MAPPER = new RowMapper<Group>() { public Group mapRow(ResultSet rs, int i) throws SQLException { Group g = new Group(); g.setId(rs.getInt("groups_id")); //ParentGroup with ID=0 is not supported if(rs.getInt("groups_parent_group_id") != 0) g.setParentGroupId(rs.getInt("groups_parent_group_id")); else g.setParentGroupId(null); g.setName(rs.getString("groups_name")); g.setShortName(g.getName().substring(g.getName().lastIndexOf(":") + 1)); g.setDescription(rs.getString("groups_dsc")); g.setVoId(rs.getInt("groups_vo_id")); g.setCreatedAt(rs.getString("groups_created_at")); g.setCreatedBy(rs.getString("groups_created_by")); g.setModifiedAt(rs.getString("groups_modified_at")); g.setModifiedBy(rs.getString("groups_modified_by")); if(rs.getInt("groups_modified_by_uid") == 0) g.setModifiedByUid(null); else g.setModifiedByUid(rs.getInt("groups_modified_by_uid")); if(rs.getInt("groups_created_by_uid") == 0) g.setCreatedByUid(null); else g.setCreatedByUid(rs.getInt("groups_created_by_uid")); return g; } }; private static final RowMapper<Pair<Group, Resource>> GROUP_RESOURCE_MAPPER = new RowMapper<Pair<Group, Resource>>() { public Pair<Group, Resource> mapRow(ResultSet rs, int i) throws SQLException { Pair<Group, Resource> pair = new Pair<Group, Resource>(); pair.put(GROUP_MAPPER.mapRow(rs, i), ResourcesManagerImpl.RESOURCE_MAPPER.mapRow(rs, i)); return pair; } }; /** * Create new instance of this class. * */ public GroupsManagerImpl(DataSource perunPool) { this.jdbc = new JdbcPerunTemplate(perunPool); this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(perunPool); } public Group createGroup(PerunSession sess, Vo vo, Group group) throws GroupExistsException, InternalErrorException { Utils.notNull(group, "group"); Utils.notNull(group.getName(), "group.getName()"); // Check if the group already exists if(group.getParentGroupId() == null) { // check if the TOP level group exists if (1 == jdbc.queryForInt("select count('x') from groups where lower(name)=lower(?) and vo_id=? and parent_group_id IS NULL", group.getName(), vo.getId())) { throw new GroupExistsException("Group [" + group.getName() + "] already exists under VO [" + vo.getShortName() + "] and has parent Group with id is [NULL]"); } } else { // check if subgroup exists under parent group if (1 == jdbc.queryForInt("select count('x') from groups where lower(name)=lower(?) and vo_id=? and parent_group_id=?", group.getName(), vo.getId(), group.getParentGroupId())) { throw new GroupExistsException("Group [" + group.getName() + "] already exists under VO [" + vo.getShortName() + "] and has parent Group with id [" + group.getParentGroupId() + "]"); } } // Check the group name, it can contain only a-Z0-9_- and space if (!group.getShortName().matches("^[- a-zA-Z.0-9_]+$")) { throw new InternalErrorException(new IllegalArgumentException("Wrong group name, group name can contain only a-Z0-9.-_: and space characters. " + group)); } try { // Store the group into the DB int newId = Utils.getNewId(jdbc, "groups_id_seq"); jdbc.update("insert into groups (id, parent_group_id, name, dsc, vo_id, created_by,created_at,modified_by,modified_at,created_by_uid,modified_by_uid) " + "values (?,?,?,?,?,?," + Compatibility.getSysdate() + ",?," + Compatibility.getSysdate() + ",?,?)", newId, group.getParentGroupId(), group.getName(), group.getDescription(), vo.getId(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), sess.getPerunPrincipal().getUserId()); group.setId(newId); group.setVoId(vo.getId()); return group; } catch (RuntimeException err) { throw new InternalErrorException(err); } } public String getName(int id) throws InternalErrorException { List name= null; try { name = jdbc.query("group.name as (with temp (name, id, parent_group_id) as ((select name, id, parent_group_id from GROUPS where parent_group_id is null) union all (select cast((temp.name + ':' + groups.name) as varchar(128)), " + "groups.id, groups.parent_group_id from groups inner join temp on temp.id = groups.parent_group_id )) select name from temp where group.id = ?" ,new RowMapper() { public Object mapRow(ResultSet resultSet, int i) throws SQLException { return resultSet.getString(1); } },id); } catch (RuntimeException e) { throw new InternalErrorException(e); } String result=(String)name.get(0); return result; } /* * Create a subgroup * * @see cz.metacentrum.perun.core.implApi.GroupsManagerImplApi#createGroup(cz.metacentrum.perun.core.api.PerunSession, cz.metacentrum.perun.core.api.Vo, cz.metacentrum.perun.core.api.Group, cz.metacentrum.perun.core.api.Group) */ public Group createGroup(PerunSession sess, Vo vo, Group parentGroup, Group group) throws GroupExistsException, InternalErrorException { // Create new subGroup group.setParentGroupId(parentGroup.getId()); group.setName(parentGroup.getName()+":"+group.getShortName()); group = createGroup(sess, vo, group); return group; } public void deleteGroup(PerunSession sess, Vo vo, Group group) throws InternalErrorException, GroupAlreadyRemovedException { Utils.notNull(group.getName(), "group.getName()"); try { // Delete group's members jdbc.update("delete from groups_members where group_id=?", group.getId()); // Delete authz entries for this group AuthzResolverBlImpl.removeAllAuthzForGroup(sess, group); int rowAffected = jdbc.update("delete from groups where id=?", group.getId()); if(rowAffected == 0) throw new GroupAlreadyRemovedException("Group: " + group + " , Vo: " + vo); } catch (RuntimeException err) { throw new InternalErrorException(err); } } public Group updateGroup(PerunSession sess, Group group) throws InternalErrorException { Utils.notNull(group.getName(), "group.getName()"); // Get the group stored in the DB Group dbGroup; try { dbGroup = this.getGroupById(sess, group.getId()); } catch (GroupNotExistsException e) { throw new InternalErrorException("Group existence was checked at the higher level",e); } // we allow only update on shortName part of name if (!dbGroup.getShortName().equals(group.getShortName())) { dbGroup.setShortName(group.getShortName()); try { jdbc.update("update groups set name=?,modified_by=?, modified_by_uid=?, modified_at=" + Compatibility.getSysdate() + " where id=?", dbGroup.getName(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), dbGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } if (group.getDescription() != null && !group.getDescription().equals(dbGroup.getDescription())) { try { jdbc.update("update groups set dsc=?, modified_by=?, modified_by_uid=?, modified_at=" + Compatibility.getSysdate() + " where id=?", group.getDescription(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), group.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } dbGroup.setDescription(group.getDescription()); } return dbGroup; } public Group updateGroupName(PerunSession sess, Group group) throws InternalErrorException { Utils.notNull(group.getName(), "group.getName()"); // Get the group stored in the DB Group dbGroup; try { dbGroup = this.getGroupById(sess, group.getId()); } catch (GroupNotExistsException e) { throw new InternalErrorException("Group existence was checked at the higher level",e); } if (!dbGroup.getName().equals(group.getName())) { dbGroup.setName(group.getName()); try { jdbc.update("update groups set name=?,modified_by=?, modified_by_uid=?, modified_at=" + Compatibility.getSysdate() + " where id=?", dbGroup.getName(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), dbGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } return dbGroup; } public Group updateParentGroupId(PerunSession sess, Group group) throws InternalErrorException { Utils.notNull(group, "group"); // Get the group stored in the DB Group dbGroup; try { dbGroup = this.getGroupById(sess, group.getId()); } catch (GroupNotExistsException e) { throw new InternalErrorException("Group existence was checked at the higher level",e); } //check if group parent id was changed to another id or to null if ((group.getParentGroupId() != null && !group.getParentGroupId().equals(dbGroup.getParentGroupId())) || (group.getParentGroupId() == null && dbGroup.getParentGroupId() != null)) { dbGroup.setParentGroupId(group.getParentGroupId()); try { jdbc.update("update groups set parent_group_id=?,modified_by=?, modified_by_uid=?, modified_at=" + Compatibility.getSysdate() + " where id=?", dbGroup.getParentGroupId(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), dbGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } return dbGroup; } public Group getGroupById(PerunSession sess, int id) throws GroupNotExistsException, InternalErrorException { try { return jdbc.queryForObject("select " + groupMappingSelectQuery + " from groups where groups.id=? ", GROUP_MAPPER, id); } catch (EmptyResultDataAccessException err) { throw new GroupNotExistsException("Group id=" + id); } catch (RuntimeException err) { throw new InternalErrorException(err); } } public List<User> getGroupUsers(PerunSession sess, Group group) throws InternalErrorException { try { return jdbc.query("select " + UsersManagerImpl.userMappingSelectQuery + " from groups_members join members on members.id=member_id join " + "users on members.user_id=users.id where group_id=? order by " + Compatibility.orderByBinary("users.last_name") + ", " + Compatibility.orderByBinary("users.first_name"), UsersManagerImpl.USER_MAPPER, group.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public boolean isUserMemberOfGroup(PerunSession sess, User user, Group group) throws InternalErrorException { try { return 1 <= jdbc.queryForInt("select count(1) from groups_members join members on members.id = member_id where members.user_id=? and groups_members.group_id=?", user.getId(), group.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public List<Member> getGroupMembers(PerunSession sess, Group group) throws InternalErrorException { try { return jdbc.query("select " + MembersManagerImpl.groupsMembersMappingSelectQuery + " from groups_members join members on members.id=groups_members.member_id " + "where groups_members.group_id=?", MembersManagerImpl.MEMBER_MAPPER, group.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Member>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Member> getGroupMembers(PerunSession sess, Group group, List<Status> statuses, boolean excludeStatus) throws InternalErrorException { try { MapSqlParameterSource parameters = new MapSqlParameterSource(); List<Integer> statusesCodes = new ArrayList<Integer>(); for (Status status: statuses) { statusesCodes.add(status.getCode()); } parameters.addValue("statuses", statusesCodes); parameters.addValue("group_id", group.getId()); if (excludeStatus) { // Exclude members with one of the status return this.namedParameterJdbcTemplate.query("select " + MembersManagerImpl.groupsMembersMappingSelectQuery + " from groups_members join members on members.id=groups_members.member_id " + "where groups_members.group_id=:group_id and members.status"+Compatibility.castToInteger()+" not in (:statuses)", parameters, MembersManagerImpl.MEMBER_MAPPER); } else { // Include members with one of the status return this.namedParameterJdbcTemplate.query("select " + MembersManagerImpl.groupsMembersMappingSelectQuery + " from groups_members join members on members.id=groups_members.member_id " + "where groups_members.group_id=:group_id and members.status"+Compatibility.castToInteger()+" in (:statuses)", parameters, MembersManagerImpl.MEMBER_MAPPER); } } catch (EmptyResultDataAccessException e) { return new ArrayList<Member>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Group> getGroups(PerunSession sess, Vo vo) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups where vo_id=? order by " + Compatibility.orderByBinary("groups.name" + Compatibility.castToVarchar()), GROUP_MAPPER, vo.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public List<Group> getAssignedGroupsToResource(PerunSession perunSession, Resource resource) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups join " + " groups_resources on groups.id=groups_resources.group_id " + " where groups_resources.resource_id=?", GROUP_MAPPER, resource.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Group> getAssignedGroupsToResource(PerunSession perunSession, Resource resource, Member member) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups join " + " groups_resources on groups.id=groups_resources.group_id and groups_resources.resource_id=?" + " join groups_members on groups_members.group_id=groups.id and groups_members.member_id=?", GROUP_MAPPER, resource.getId(), member.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Group> getAssignedGroupsToFacility(PerunSession perunSession, Facility facility) throws InternalErrorException { try { return jdbc.query("select distinct " + groupMappingSelectQuery + " from groups join " + " groups_resources on groups.id=groups_resources.group_id " + " join resources on groups_resources.resource_id=resources.id " + "where resources.facility_id=?", GROUP_MAPPER, facility.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Group> getSubGroups(PerunSession sess, Group parentGroup) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups where groups.parent_group_id=? " + "order by " + Compatibility.orderByBinary("groups.name" + Compatibility.castToVarchar()), GROUP_MAPPER, parentGroup.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public int getSubGroupsCount(PerunSession sess, Group parentGroup) throws InternalErrorException { try { return jdbc.queryForInt("select count(1) from groups where parent_group_id=?", parentGroup.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public List<Group> getAllGroups(PerunSession sess, Vo vo) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups where vo_id=?", GROUP_MAPPER, vo.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public Group getParentGroup(PerunSession sess, Group group) throws InternalErrorException, ParentGroupNotExistsException { try { return jdbc.queryForObject("select " + groupMappingSelectQuery + " from groups where groups.id=?", GROUP_MAPPER, group.getParentGroupId()); } catch (EmptyResultDataAccessException e) { throw new ParentGroupNotExistsException(e); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public Group getGroupByName(PerunSession sess, Vo vo, String name) throws GroupNotExistsException, InternalErrorException { try { return jdbc.queryForObject("select " + groupMappingSelectQuery + " from groups where groups.name=? and groups.vo_id=?", GROUP_MAPPER, name, vo.getId()); } catch (EmptyResultDataAccessException err) { throw new GroupNotExistsException("Group name=" + name + ", vo id=" + vo.getId()); } catch (RuntimeException err) { throw new InternalErrorException(err); } } public Member addMember(PerunSession sess, Group group, Member member, MembershipType type, int sourceGroupId) throws InternalErrorException, AlreadyMemberException, WrongAttributeValueException, WrongReferenceAttributeValueException { member.setMembershipType(type); member.setSourceGroupId(sourceGroupId); try { jdbc.update("insert into groups_members (group_id, member_id, created_by, created_at, modified_by, modified_at, created_by_uid, modified_by_uid, membership_type, source_group_id) " + "values (?,?,?," + Compatibility.getSysdate() + ",?," + Compatibility.getSysdate() + ",?,?,?,?)", group.getId(), member.getId(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getUserId(), sess.getPerunPrincipal().getUserId(), type.getCode(), sourceGroupId); } catch(DuplicateKeyException ex) { throw new AlreadyMemberException(member); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } return member; } public List<Group> getGroupsByIds(PerunSession sess, List<Integer> groupsIds) throws InternalErrorException { // If groupsIds are empty, we can immediately return empty result if (groupsIds.size() == 0) { return new ArrayList<Group>(); } MapSqlParameterSource parameters = new MapSqlParameterSource(); parameters.addValue("ids", groupsIds); try { return this.namedParameterJdbcTemplate.query("select " + groupMappingSelectQuery + " from groups where groups.id in ( :ids )", parameters, GROUP_MAPPER); } catch(EmptyResultDataAccessException ex) { return new ArrayList<Group>(); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public List<Group> getAllMemberGroups(PerunSession sess, Member member) throws InternalErrorException { try { return jdbc.query("select distinct " + groupMappingSelectQuery + " from groups_members join groups on groups_members.group_id = groups.id " + " where groups_members.member_id=?", GROUP_MAPPER, member.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Group> getGroupsByAttribute(PerunSession sess, Attribute attribute) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from groups " + "join group_attr_values on groups.id=group_attr_values.group_id where group_attr_values.attr_id=? and " + "group_attr_values.attr_value=?", GROUP_MAPPER, attribute.getId(), BeansUtils.attributeValueToString(attribute)); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public List<Pair<Group,Resource>> getGroupResourcePairsByAttribute(PerunSession sess, Attribute attribute) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + ", " + ResourcesManagerImpl.resourceMappingSelectQuery + " from group_resource_attr_values " + "join groups on groups.id=group_resource_attr_values.group_id " + "join resources on resources.id=group_resource_attr_values.resource_id " + "where group_resource_attr_values.attr_id=? and group_resource_attr_values.attr_value=?", GROUP_RESOURCE_MAPPER, attribute.getId(), BeansUtils.attributeValueToString(attribute)); } catch (EmptyResultDataAccessException e) { return new ArrayList<Pair<Group, Resource>>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public boolean isGroupMember(PerunSession sess, Group group, Member member) throws InternalErrorException { try { return 1 <= jdbc.queryForInt("select count(1) from groups_members where group_id=? and member_id=?", group.getId(), member.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public boolean isDirectGroupMember(PerunSession sess, Group group, Member member) throws InternalErrorException { try { int count = jdbc.queryForInt("select count(1) from groups_members where group_id=? and member_id=? and membership_type = ?", group.getId(), member.getId(), MembershipType.DIRECT.getCode()); if (1 < count) throw new ConsistencyErrorException("There is more than one direct member in group" + group); return 1 == count; } catch (RuntimeException e) { throw new InternalErrorException(e); } } public void removeMember(PerunSession sess, Group group, Member member) throws InternalErrorException, NotGroupMemberException { if (member.getSourceGroupId() == null) { throw new InternalErrorException("sourceGroupId not set for member object"); } int ret; try { ret = jdbc.update("delete from groups_members where group_id=? and source_group_id=? and member_id=?", group.getId(), member.getSourceGroupId(), member.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } if(ret == 0) { throw new NotGroupMemberException(member); } else if(ret >= 1) { return; } else { throw new ConsistencyErrorException(member + " and " + group + " have " + ret + " rows in groups_members table"); } } @Override public List<User> getAdmins(PerunSession sess, Group group) throws InternalErrorException { try { Set<User> setOfAdmins = new HashSet<User>(); // direct admins setOfAdmins.addAll(jdbc.query("select " + UsersManagerImpl.userMappingSelectQuery + " from authz join users on authz.user_id=users.id " + "where authz.group_id=? and authz.role_id=(select id from roles where name='groupadmin')", UsersManagerImpl.USER_MAPPER, group.getId())); // admins through a group List<Group> listOfGroupAdmins = getGroupAdmins(sess, group); for(Group authorizedGroup : listOfGroupAdmins) { setOfAdmins.addAll(jdbc.query("select " + UsersManagerImpl.userMappingSelectQuery + " from users join members on users.id=members.user_id " + "join groups_members on groups_members.member_id=members.id where groups_members.group_id=?", UsersManagerImpl.USER_MAPPER, authorizedGroup.getId())); } return new ArrayList(setOfAdmins); } catch (EmptyResultDataAccessException e) { return new ArrayList<User>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<User> getDirectAdmins(PerunSession sess, Group group) throws InternalErrorException { try { return jdbc.query("select " + UsersManagerImpl.userMappingSelectQuery + " from authz join users on authz.user_id=users.id " + "where authz.group_id=? and authz.role_id=(select id from roles where name='groupadmin')", UsersManagerImpl.USER_MAPPER, group.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<User>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Group> getGroupAdmins(PerunSession sess, Group group) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from authz join groups on authz.authorized_group_id=groups.id " + "where authz.group_id=? and authz.role_id=(select id from roles where name='groupadmin')", GROUP_MAPPER, group.getId()); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public int getGroupsCount(PerunSession sess, Vo vo) throws InternalErrorException { try { return jdbc.queryForInt("select count(1) from groups where vo_id=?", vo.getId()); } catch(RuntimeException ex) { throw new InternalErrorException(ex); } } public int getVoId(PerunSession sess, Group group) throws InternalErrorException { try { return jdbc.queryForInt("select vo_id from groups where id=?", group.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public void checkGroupExists(PerunSession sess, Group group) throws InternalErrorException, GroupNotExistsException { if (!groupExists(sess, group)) throw new GroupNotExistsException("Group " + group); } public boolean groupExists(PerunSession sess, Group group) throws InternalErrorException { try { return 1 == jdbc.queryForInt("select 1 from groups where id=?", group.getId()); } catch(EmptyResultDataAccessException ex) { return false; } catch (RuntimeException ex) { throw new InternalErrorException(ex); } } /** * Gets all groups which have enabled synchronization. * * @param sess * @return list of groups to synchronize * @throws InternalErrorException */ public List<Group> getGroupsToSynchronize(PerunSession sess) throws InternalErrorException { try { // Get all groups which have defined return jdbc.query("select " + groupMappingSelectQuery + " from groups, attr_names, group_attr_values " + "where attr_names.attr_name=? and attr_names.id=group_attr_values.attr_id and group_attr_values.attr_value='true' and " + "group_attr_values.group_id=groups.id", GROUP_MAPPER, GroupsManager.GROUPSYNCHROENABLED_ATTRNAME); } catch (EmptyResultDataAccessException e) { return new ArrayList<Group>(); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Integer> getGroupApplicationIds(PerunSession sess, Group group) throws InternalErrorException { // get app ids for all applications try { return jdbc.query("select id from application where group_id=?", new RowMapper<Integer>() { @Override public Integer mapRow(ResultSet rs, int arg1) throws SQLException { return rs.getInt("id"); } },group.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Pair<String, String>> getApplicationReservedLogins(Integer appId) throws InternalErrorException { try { return jdbc.query("select namespace,login from application_reserved_logins where app_id=?", new RowMapper<Pair<String, String>>() { @Override public Pair<String, String> mapRow(ResultSet rs, int arg1) throws SQLException { return new Pair<String, String>(rs.getString("namespace"), rs.getString("login")); } }, appId); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public void deleteGroupReservedLogins(PerunSession sess, Group group) throws InternalErrorException { // remove all reserved logins first try { for (Integer appId : getGroupApplicationIds(sess, group)) { jdbc.update("delete from application_reserved_logins where app_id=?", appId); } } catch (RuntimeException e) { throw new InternalErrorException(e); } } public int getGroupsCount(PerunSession sess) throws InternalErrorException { try { return jdbc.queryForInt("select count(*) from groups"); } catch (RuntimeException ex) { throw new InternalErrorException(ex); } } @Override public List<Group> getGroupsWithAssignedExtSourceInVo(PerunSession sess, ExtSource source, Vo vo) throws InternalErrorException { try { return jdbc.query("select " + groupMappingSelectQuery + " from group_ext_sources g_exts inner join groups on g_exts.group_id=groups.id " + " where g_exts.ext_source_id=? and groups.vo_id=?", GROUP_MAPPER, source.getId(), vo.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public void removeGroupUnion(PerunSession sess, Group resultGroup, Group operandGroup) throws InternalErrorException, GroupRelationDoesNotExist { try { if (0 == jdbc.update("DELETE FROM groups_groups WHERE result_gid = ? AND operand_gid = ?", resultGroup.getId(), operandGroup.getId())) { throw new GroupRelationDoesNotExist("Union between " + resultGroup + " and " + operandGroup + " does not exist."); } } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public void removeResultGroupRelations(PerunSession sess, Group resultGroup) throws InternalErrorException { try { jdbc.update("DELETE FROM groups_groups WHERE result_gid = ?", resultGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public void saveGroupRelation(PerunSession sess, Group resultGroup, Group operandGroup, boolean parentFlag) throws InternalErrorException { try { jdbc.update("INSERT INTO groups_groups(result_gid, operand_gid, created_at, created_by, " + "modified_at, modified_by, parent_flag) VALUES(?,?," + Compatibility.getSysdate() + ",?," + Compatibility.getSysdate() + ",?,?)", resultGroup.getId(), operandGroup.getId(), sess.getPerunPrincipal().getActor(), sess.getPerunPrincipal().getActor(), parentFlag); } catch (RuntimeException e) { throw new InternalErrorException(e); } } public boolean isRelationRemovable(PerunSession sess, Group resultGroup, Group operandGroup) throws InternalErrorException { try { return 1 > jdbc.queryForInt("SELECT parent_flag"+Compatibility.castToInteger()+" FROM groups_groups WHERE result_gid=? AND operand_gid=?", resultGroup.getId(), operandGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public boolean isRelationBetweenGroups(Group group1, Group group2) throws InternalErrorException { try { return 1 <= jdbc.queryForInt("SELECT count(1) FROM groups_groups WHERE (result_gid = ? AND operand_gid = ?) OR (result_gid = ? AND operand_gid = ?)", group1.getId(), group2.getId(), group2.getId(), group1.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public boolean isOneWayRelationBetweenGroups(Group resultGroup, Group operandGroup) throws InternalErrorException { try { return 1 <= jdbc.queryForInt("SELECT count(1) FROM groups_groups WHERE result_gid = ? AND operand_gid = ?", resultGroup.getId(), operandGroup.getId()); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Group> getResultGroups(PerunSession sess, int groupId) throws InternalErrorException { try { return jdbc.query("SELECT " + groupMappingSelectQuery + " FROM groups_groups JOIN groups " + "ON groups.id = groups_groups.result_gid WHERE operand_gid=?", GROUP_MAPPER, groupId); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Group> getOperandGroups(PerunSession sess, int groupId) throws InternalErrorException { try { return jdbc.query("SELECT " + groupMappingSelectQuery + " FROM groups_groups JOIN groups " + "ON groups.id = groups_groups.operand_gid WHERE result_gid=?", GROUP_MAPPER, groupId); } catch (RuntimeException e) { throw new InternalErrorException(e); } } @Override public List<Integer> getResultGroupsIds(PerunSession sess, int groupId) throws InternalErrorException { try { return jdbc.queryForList("SELECT result_gid FROM groups_groups WHERE operand_gid=?", Integer.class, groupId); } catch (RuntimeException e) { throw new InternalErrorException(e); } } }
package rest; import java.util.List; import javax.servlet.ServletContext; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import tm.TM; import vos.Cliente; import vos.Menu; import vos.Pedido; @Path("Clientes") public class Services { /** * Atributo que usa la anotacion @Context para tener el ServletContext de la conexion actual. */ @Context private ServletContext context; /** * Metodo que retorna el path de la carpeta WEB-INF/ConnectionData en el deploy actual dentro del servidor. * @return path de la carpeta WEB-INF/ConnectionData en el deploy actual. */ private String getPath() { return context.getRealPath("WEB-INF/ConnectionData"); } private String doErrorMessage(Exception e){ return "{ \"ERROR\": \""+ e.getMessage() + "\"}" ; } /** * Metodo que expone servicio REST usando GET que da todos los Clientes de la base de datos. * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes * @return Json con todos los Clientes de la base de datos o json con * el error que se produjo */ @GET @Produces({ MediaType.APPLICATION_JSON }) public Response getClientes() { TM tm = new TM(getPath()); List<Cliente> Clientes; try { Clientes = tm.darClientes(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Clientes).build(); } /** * Metodo que expone servicio REST usando GET que busca el Cliente con el id que entra como parametro * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes/<<id>>" para la busqueda" * @param name - Nombre del Cliente a buscar que entra en la URL como parametro * @return Json con el/los Clientes encontrados con el nombre que entra como parametro o json con * el error que se produjo */ @GET @Path( "{id: \\d+}" ) @Produces( { MediaType.APPLICATION_JSON } ) public Response getCliente( @PathParam( "id" ) Long id ) { TM tm = new TM( getPath( ) ); try { Cliente v = tm.buscarClientePorId( id ); return Response.status( 200 ).entity( v ).build( ); } catch( Exception e ) { return Response.status( 500 ).entity( doErrorMessage( e ) ).build( ); } } /** * Metodo que expone servicio REST usando POST que agrega el Cliente que recibe en Json * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes/Cliente * @param Cliente - Cliente a agregar * @return Json con el Cliente que agrego o Json con el error que se produjo */ @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addCliente(Cliente Cliente) { TM tm = new TM(getPath()); try { tm.addCliente(Cliente); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Cliente).build(); } /** * Metodo que expone servicio REST usando POST que agrega los Clientes que recibe en Json * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes/varios * @param Clientes - Clientes a agregar. * @return Json con el Cliente que agrego o Json con el error que se produjo */ @POST @Path("/varios") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addCliente(List<Cliente> Clientes) { TM tm = new TM(getPath()); try { tm.addClientes(Clientes); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Clientes).build(); } /** * Metodo que expone servicio REST usando PUT que actualiza el Cliente que recibe en Json * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes * @param Cliente - Cliente a actualizar. * @return Json con el Cliente que actualizo o Json con el error que se produjo */ @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updateCliente(Cliente Cliente) { TM tm = new TM(getPath()); try { tm.updateCliente(Cliente); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Cliente).build(); } /** * Metodo que expone servicio REST usando DELETE que elimina el Cliente que recibe en Json * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes * @param Cliente - Cliente a aliminar. * @return Json con el Cliente que elimino o Json con el error que se produjo */ @DELETE @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response deleteCliente(Cliente Cliente) { TM tm = new TM(getPath()); try { tm.deleteCliente(Cliente); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Cliente).build(); } /** * Metodo que expone servicio REST usando GET que da todos los Clientes de la base de datos. * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes * @return Json con todos los Clientes de la base de datos o json con * el error que se produjo */ @GET @Path("/Pedidos") @Produces({ MediaType.APPLICATION_JSON }) public Response getPedidos() { TM tm = new TM(getPath()); List<Pedido> pedidos; try { pedidos = tm.darPedidos(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(pedidos).build(); } /** * Metodo que expone servicio REST usando POST que agrega el Cliente que recibe en Json * <b>URL: </b> http://"ip o nombre de host":8080/ClienteAndes/rest/Clientes/Cliente * @param pedido - Cliente a agregar * @return Json con el Cliente que agrego o Json con el error que se produjo */ @POST @Path("/Pedidos") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addPedido(Pedido pedido) { TM tm = new TM(getPath()); try { tm.addPedido(pedido); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(pedido).build(); } /** * Metodo que expone servicio REST usando GET que da todos los Menus de la base de datos. * <b>URL: </b> http://"ip o nombre de host":8080/MenuAndes/rest/Menus * @return Json con todos los Menus de la base de datos o json con * el error que se produjo */ @GET @Path("/Menu") @Produces({ MediaType.APPLICATION_JSON }) public Response getMenus() { TM tm = new TM(getPath()); List<Menu> Menus; try { Menus = tm.darMenus(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(Menus).build(); } }
/* * The MIT License (MIT) * * Copyright (c) 2015-2022 Elior "Mallowigi" Boukhobza * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * */ /* * Created by JFormDesigner on Fri Aug 13 16:21:48 IDT 2021 */ package com.mallowigi.config.select; import com.intellij.ide.BrowserUtil; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.ui.SearchTextField; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.util.ui.ColumnInfo; import com.mallowigi.config.AtomSettingsBundle; import com.mallowigi.config.associations.ui.columns.*; import com.mallowigi.config.associations.ui.internal.AssociationsTableItemEditor; import com.mallowigi.config.associations.ui.internal.AssociationsTableModelEditor; import com.mallowigi.config.ui.SettingsFormUI; import com.mallowigi.icons.associations.SelectedAssociations; import com.mallowigi.models.IconType; import net.miginfocom.swing.MigLayout; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.TitledBorder; import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ResourceBundle; @SuppressWarnings({"FieldCanBeLocal", "DuplicateStringLiteralInspection", "StringConcatenation", "UndesirableClassUsage", "InstanceVariableMayNotBeInitialized", "TransientFieldNotInitialized", "ClassWithTooManyFields", "OverlyLongMethod", "MethodMayBeStatic", "MethodOnlyUsedFromInnerClass", "SyntheticAccessorCall"}) public final class AtomSelectForm extends JPanel implements SettingsFormUI, Disposable { private final transient ColumnInfo[] fileColumns = { new EnabledColumnInfo(), new NameEditableColumnInfo(this, true), new PatternEditableColumnInfo(this, true), new FileIconEditableColumnInfo(this, true), new PriorityColumnInfo(this, true), new TouchedColumnInfo(), }; private final transient ColumnInfo[] folderColumns = { new EnabledColumnInfo(), new NameEditableColumnInfo(this, true), new PatternEditableColumnInfo(this, true), new FolderIconEditableColumnInfo(this, true), new PriorityColumnInfo(this, true), new TouchedColumnInfo(), }; // JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables // Generated using JFormDesigner non-commercial license private JLabel explanation; private JLabel customExplanation2; private LinkLabel link; private JTabbedPane tabbedPane; private JPanel fileAssociationsPanel; private SearchTextField fileSearch; private JPanel folderAssociationsPanel; private SearchTextField folderSearch; private JTextPane explanation2; private JButton resetButton; // JFormDesigner - End of variables declaration //GEN-END:variables private JComponent fileIconsTable; private JComponent folderIconsTable; private @Nullable AssociationsTableModelEditor fileAssociationsEditor; private @Nullable AssociationsTableModelEditor folderAssociationsEditor; @Override public void init() { initComponents(); createTables(); fileSearch.getTextEditor().getEmptyText().setText(AtomSettingsBundle.message("fileSearch.placeholder")); folderSearch.getTextEditor().getEmptyText().setText(AtomSettingsBundle.message("fileSearch.placeholder")); } @Override public JComponent getContent() { return this; } @Override public void afterStateSet() { // add after state set } @Override public void dispose() { fileAssociationsEditor = null; folderAssociationsEditor = null; } public void setFormState(final AtomSelectConfig config) { ApplicationManager.getApplication().invokeLater(() -> { if (fileAssociationsEditor != null) { fileAssociationsEditor.reset(config.getSelectedFileAssociations().getTheAssociations()); } if (folderAssociationsEditor != null) { folderAssociationsEditor.reset(config.getSelectedFolderAssociations().getTheAssociations()); } afterStateSet(); }); } @SuppressWarnings({"SimplifiableIfStatement", "DuplicatedCode"}) public boolean isModified(final AtomSelectConfig config) { boolean modified = false; if (fileAssociationsEditor != null) { modified = config.isFileIconsModified(fileAssociationsEditor.getModel().getItems()); } if (folderAssociationsEditor != null) { modified = modified || config.isFolderIconsModified(folderAssociationsEditor.getModel().getItems()); } return modified; } public SelectedAssociations getFileAssociations() { assert fileAssociationsEditor != null; return new SelectedAssociations(IconType.FILE, fileAssociationsEditor.getModel().getAllItems()); } public SelectedAssociations getFolderAssociations() { assert folderAssociationsEditor != null; return new SelectedAssociations(IconType.FOLDER, folderAssociationsEditor.getModel().getAllItems()); } @SuppressWarnings("unused") private void linkMouseClicked(final MouseEvent e) { BrowserUtil.browse(AtomSettingsBundle.message("SelectForm.link.text")); } @SuppressWarnings("ConfusingFloatingPointLiteral") private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents // Generated using JFormDesigner non-commercial license final ResourceBundle bundle = ResourceBundle.getBundle("messages.AtomFileIconsBundle"); explanation = new JLabel(); customExplanation2 = new JLabel(); link = new LinkLabel(); tabbedPane = new JTabbedPane(); fileAssociationsPanel = new JPanel(); fileSearch = new SearchTextField(); folderAssociationsPanel = new JPanel(); folderSearch = new SearchTextField(); explanation2 = new JTextPane(); resetButton = new JButton(); //======== this ======== setBorder(new TitledBorder(null, "Associations Editor", TitledBorder.CENTER, TitledBorder.TOP)); setLayout(new MigLayout( "hidemode 3,wrap", // columns "[grow,fill]", // rows "[]" + "[]" + "[shrink 0,fill]" + "[top]0" + "[]")); //---- explanation ---- explanation.setText(bundle.getString("SelectForm.explanation.text")); explanation.setFont(explanation.getFont().deriveFont(explanation.getFont().getSize() - 1f)); explanation.setForeground(UIManager.getColor("inactiveCaptionText")); add(explanation, "cell 0 0"); //---- customExplanation2 ---- customExplanation2.setText(bundle.getString("SelectForm.customExplanation2.text")); customExplanation2.setFont(customExplanation2.getFont().deriveFont(customExplanation2.getFont().getSize() - 1f)); customExplanation2.setForeground(UIManager.getColor("inactiveCaptionText")); add(customExplanation2, "cell 0 1,alignx left,growx 0"); //---- link ---- link.setText(bundle.getString("SelectForm.link.text")); link.setFont(link.getFont().deriveFont(link.getFont().getSize() - 1f)); link.setLabelFor(explanation); link.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(final MouseEvent e) { linkMouseClicked(e); } }); add(link, "cell 0 1"); //======== tabbedPane ======== { tabbedPane.setMinimumSize(null); //======== fileAssociationsPanel ======== { fileAssociationsPanel.setMinimumSize(null); fileAssociationsPanel.setPreferredSize(null); fileAssociationsPanel.setLayout(new MigLayout( "aligny top", // columns "0[grow,fill]0", // rows "rel[shrink 0,fill]rel")); fileAssociationsPanel.add(fileSearch, "cell 0 0"); } tabbedPane.addTab(bundle.getString("SelectForm.fileAssociationsPanel.tab.title"), fileAssociationsPanel); //======== folderAssociationsPanel ======== { folderAssociationsPanel.setLayout(new MigLayout( "aligny top", // columns "0[grow,fill]0", // rows "rel[shrink 0,fill]rel")); folderAssociationsPanel.add(folderSearch, "cell 0 0"); } tabbedPane.addTab(bundle.getString("SelectForm.folderAssociationsPanel.tab.title"), folderAssociationsPanel); } add(tabbedPane, "cell 0 2"); //---- explanation2 ---- explanation2.setText(bundle.getString("SelectForm.explanation2.text")); explanation2.setForeground(UIManager.getColor("inactiveCaptionText")); explanation2.setFont(explanation2.getFont().deriveFont(explanation2.getFont().getSize() - 1f)); add(explanation2, "cell 0 3"); //---- resetButton ---- resetButton.setText(bundle.getString("SelectForm.resetButton.text")); add(resetButton, "cell 0 4,alignx right,growx 0"); // JFormDesigner - End of component initialization //GEN-END:initComponents resetButton.addActionListener(this::resetButtonActionPerformed); } @SuppressWarnings("FeatureEnvy") private void resetButtonActionPerformed(final ActionEvent e) { final AtomSelectConfig config = AtomSelectConfig.getInstance(); config.reset(); ApplicationManager.getApplication().invokeLater(() -> { if (fileAssociationsEditor != null) { fileAssociationsEditor.reset(config.getSelectedFileAssociations().getTheAssociations()); } if (folderAssociationsEditor != null) { folderAssociationsEditor.reset(config.getSelectedFolderAssociations().getTheAssociations()); } afterStateSet(); }); } private void createTables() { createFileIconsTable(); createFolderIconsTable(); } /** * Create the file icons */ private void createFileIconsTable() { final AssociationsTableItemEditor itemEditor = new AssociationsTableItemEditor(); fileAssociationsEditor = new AssociationsTableModelEditor(fileColumns, itemEditor, AtomSettingsBundle.message("no.file.associations"), fileSearch); ApplicationManager.getApplication().invokeLater(() -> { fileIconsTable = fileAssociationsEditor.createComponent(); fileAssociationsPanel.add(fileIconsTable, "cell 0 1"); //NON-NLS }); } /** * Create the folder icons */ private void createFolderIconsTable() { final AssociationsTableItemEditor itemEditor = new AssociationsTableItemEditor(); folderAssociationsEditor = new AssociationsTableModelEditor(folderColumns, itemEditor, AtomSettingsBundle.message("no.folder.associations"), folderSearch); ApplicationManager.getApplication().invokeLater(() -> { folderIconsTable = folderAssociationsEditor.createComponent(); folderAssociationsPanel.add(folderIconsTable, "cell 0 1"); //NON-NLS }); } }
/* * #%L * ===================================================== * _____ _ ____ _ _ _ _ * |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | | * | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| | * | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ | * |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_| * \____/ * * ===================================================== * * Hochschule Hannover * (University of Applied Sciences and Arts, Hannover) * Faculty IV, Dept. of Computer Science * Ricklinger Stadtweg 118, 30459 Hannover, Germany * * Email: trust@f4-i.fh-hannover.de * Website: http://trust.f4.hs-hannover.de/ * * This file is part of irondetect, version 0.0.8, * implemented by the Trust@HsH research group at the Hochschule Hannover. * %% * Copyright (C) 2010 - 2015 Trust@HsH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * */ package de.hshannover.f4.trust.irondetect.model; import java.util.ArrayList; import java.util.Set; import org.apache.log4j.Logger; import de.hshannover.f4.trust.irondetect.util.BooleanOperator; import de.hshannover.f4.trust.irondetect.util.Pair; /** * @author jvieweg * */ public class Context { /** * BooleanOperator of first Element in List is null. Consecutive elements contain the BooleanOperators that link them to their predecessor. //TODO Is this a * good solution? Alternatives? */ private ArrayList<Pair<ContextParameterPol, BooleanOperator>> ctxParamSet; private String id; private boolean isSliding; private long slidingIntervall; private Logger logger = Logger.getLogger(Context.class); public Context(String id) { this.id = id; this.isSliding = false; this.slidingIntervall = 0; } public Context() { this(""); } /** * @return the ctxParamSet */ public ArrayList<Pair<ContextParameterPol, BooleanOperator>> getCtxParamSet() { return ctxParamSet; } /** * @param ctxParamSet the ctxParamSet to set */ public void setCtxParamSet(ArrayList<Pair<ContextParameterPol, BooleanOperator>> ctxParamSet) { this.ctxParamSet = ctxParamSet; for (Pair<ContextParameterPol, BooleanOperator> pair : ctxParamSet) { if (pair.getFirstElement().getType().getTypeId() == ContextParamType.SLIDING) { String[] timeString = pair.getFirstElement().getValue().split(":"); if (timeString.length != 3) { logger.error("Wrong time format for sliding context!"); this.isSliding = false; } else { this.isSliding = true; this.slidingIntervall = Long.parseLong(timeString[0]) * 3600 * 1000 + Long.parseLong(timeString[1]) * 60 * 1000 + Long.parseLong(timeString[2]) * 1000; } } } } /** * @return the id */ public String getId() { return id; } /** * @param id the id to set */ public void setId(String id) { this.id = id; } /** * @return the isSliding */ public boolean isSliding() { return isSliding; } /** * @return the slidingIntervall */ public long getSlidingIntervall() { return slidingIntervall; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder ctxParStr = new StringBuilder("contextParamSet="); for (Pair<ContextParameterPol, BooleanOperator> p : ctxParamSet) { ctxParStr.append(p.toString()); } return "Context [" + ctxParStr.toString() + ", " + super.toString() + "]"; } /** * * Checks if the given feature matches the given context. * * FIXME: Only first bool op matters * * @param f the feature * @param ctx the context * @return true if the feature matches the context, else false (default) */ public boolean match(Feature f) { // get the context parameters for the feature Set<ContextParameter> ctxParamsFeature = f.getContextParameters(); // if there is only one there is no boolean operator if (this.ctxParamSet.size() == 0) { return false; } logger.debug("Check for context match for Feature '" + f.getQualifiedId() + "', context: " + this); boolean result = false; // if there is only one there is no boolean operator if (this.ctxParamSet.size() == 1) { result = this.ctxParamSet.get(0).getFirstElement().check(ctxParamsFeature); logger.trace("check was " + result); return result; } BooleanOperator op = this.ctxParamSet.get(1).getSecondElement(); switch (op) { case AND: for (int i = 0; i < this.ctxParamSet.size(); i++) { result = this.ctxParamSet.get(i).getFirstElement().check(ctxParamsFeature); if (!result) { logger.trace("check was " + result); return result; } } break; case OR: for (int i = 0; i < this.ctxParamSet.size(); i++) { result = this.ctxParamSet.get(i).getFirstElement().check(ctxParamsFeature); if (result) { logger.trace("check was " + result); return result; } } break; default: logger.error("Only AND/OR supported at this time!"); break; } // return true if all are true return true; } }
/* * The MIT License (MIT) * * Copyright (c) 2016 Rojoss <http://jroossien.com> * Copyright (c) 2016 contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.jroossien.boxx.util.item; import com.jroossien.boxx.aliases.*; import com.jroossien.boxx.options.SingleOption; import com.jroossien.boxx.options.single.*; import org.bukkit.FireworkEffect; import org.bukkit.Material; import org.bukkit.block.banner.Pattern; import org.bukkit.command.CommandSender; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.ItemFlag; import org.bukkit.inventory.meta.*; import org.bukkit.potion.PotionData; import org.bukkit.potion.PotionEffect; import java.util.*; public class ItemTag { private static Map<String, ItemTag> BY_NAME = new HashMap<>(); private final String tag; private final String[] aliases; private final SingleOption option; private final ItemTagCallback callback; private final String setMethod; private final String getMethod; private final Class<? extends ItemMeta>[] metaClasses; private ItemTag(String tag, String[] aliases, SingleOption option, String setMethod, String getMethod, Class... metaClasses) { this.tag = tag; option.name(tag); this.aliases = aliases; this.option = option; this.callback = null; this.setMethod = setMethod; this.getMethod = getMethod; this.metaClasses = metaClasses; } private ItemTag(String tag, String[] aliases, SingleOption option, ItemTagCallback callback, Class... metaClasses) { this.tag = tag; option.name(tag); this.aliases = aliases; this.option = option; this.callback = callback; this.setMethod = null; this.getMethod = null; this.metaClasses = metaClasses; } public String getTag() { return tag; } public String[] getAliases() { return aliases; } public Class<? extends ItemMeta>[] getMeta() { return metaClasses; } public SingleOption getOption() { return option; } public String setMethod() { return setMethod; } public String getMethod() { return getMethod; } public ItemTagCallback getCallback() { return callback; } public boolean hasCallback() { return callback != null; } public static ItemTag fromString(String name) { return BY_NAME.get(name.toUpperCase().replace("_", "").replace(" ", "")); } public static List<ItemTag> getTags(ItemMeta meta) { List<ItemTag> tags = new ArrayList<ItemTag>(); for (ItemTag tag : ItemTag.values()) { for (Class<? extends ItemMeta> clazz : tag.getMeta()) { if (clazz.isAssignableFrom(meta.getClass())) { if (!tags.contains(tag)) { tags.add(tag); } } } } return tags; } public static Collection<ItemTag> values() { return BY_NAME.values(); } public static Map<String, List<String>> getTagsMap(ItemMeta meta) { Map<String, List<String>> tagMap = new HashMap<>(); Collection<ItemTag> tagList = meta == null ? values() : getTags(meta); for (ItemTag tag : tagList) { tagMap.put(tag.getTag(), tag.getAliases() == null || tag.getAliases().length < 1 ? new ArrayList<String>() : Arrays.asList(tag.getAliases())); } return tagMap; } public static ItemTag register(String tag, String[] aliases, SingleOption option, ItemTagCallback executeCallback, Class... entities) { return register(new ItemTag(tag, aliases, option, executeCallback, entities)); } private static ItemTag register(String tag, String[] aliases, SingleOption option, String setMethod, String getMethod, Class... entities) { return register(new ItemTag(tag, aliases, option, setMethod, getMethod, entities)); } private static ItemTag register(ItemTag tag) { String key = tag.getTag().toUpperCase().replace("_", "").replace(" ", ""); if (BY_NAME.containsKey(key)) { throw new IllegalArgumentException("There is already an ItemTag registered with the name '" + key + "'!"); } BY_NAME.put(key, tag); if (tag.getAliases() != null) { for (String alias : tag.getAliases()) { alias = alias.toUpperCase().replace("_", "").replace(" ", ""); if (!BY_NAME.containsKey(alias)) { BY_NAME.put(alias, tag); } } } return tag; } public static void registerDefaults() { //All items ItemTag.register("Name", new String[] {"DisplayName", "DName"}, new StringO(), "setName", "getDisplayName", ItemMeta.class); ItemTag.register("Lore", new String[] {"Description", "Desc"}, new StringO().def(""), "setLore", "getLoreString", ItemMeta.class); ItemTag.register("Enchant", new String[] {"Enchantment", "Ench", "E"}, new EnchantO(), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.enchant(((EnchantO)result).getValue()); return true; } @Override String onGet(EItem item) { String result = ""; for (Map.Entry<Enchantment, Integer> entry : item.getEnchantments().entrySet()) { result += "enchant:" + Enchantments.getName(entry.getKey()) + ":" + entry.getValue(); } return result.length() >= 8 ? result.substring(8) : ""; } }, ItemMeta.class, EnchantmentStorageMeta.class); //Leather Color ItemTag.register("Color", new String[] {"Clr", "Leather", "LColor", "LClr"}, new ColorO(), "setColor", "getColor", LeatherArmorMeta.class); //Skull ItemTag.register("Owner", new String[] {"Skull", "Player"}, new StringO().maxChars(16), "setOwner", "getOwner", SkullMeta.class); ItemTag.register("Texture", new String[] {"Tex", "SkullTexture", "SkullT", "SkullTex", "Skin", "SkullURL"}, new StringO(), "setTexture", "getTexture", SkullMeta.class); //Banners ItemTag.register("Base", new String[] {"BaseColor", "BaseClr", "BColor", "BClr", "BC"}, new StringO().match(DyeColors.getAliasMap()), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.setBaseColor(DyeColors.get(((StringO)result).getValue())); return true; } @Override String onGet(EItem item) { return DyeColors.getName(item.getBaseColor()); } }, BannerMeta.class); ItemTag.register("Pattern", new String[] {"Pat", "P"}, new MultiO(":", "{pattern}:{dyecolor}", new StringO().match(BannerPatterns.getAliasMap()), new StringO().match(DyeColors.getAliasMap())), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.addPattern(BannerPatterns.get((String)((MultiO)result).getValue()[0].getValue()), DyeColors.get((String)((MultiO)result).getValue()[1].getValue())); return true; } @Override String onGet(EItem item) { String result = ""; for (Pattern pattern : item.getPatterns()) { result += "pattern:" + BannerPatterns.getName(pattern.getPattern()) + ":" + DyeColors.getName(pattern.getColor()); } return result.length() >= 8 ? result.substring(8) : ""; } }, BannerMeta.class); //Firework ItemTag.register("FEffect", new String[] {"Firework", "FW", "FWEffect", "FE", "FWE"}, new FireworkO(), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.addEffect(((FireworkO)result).getValue()); return true; } @Override String onGet(EItem item) { if (item.getType() == Material.FIREWORK_CHARGE) { return FireworkO.serialize(item.getEffect()); } else { String result = ""; for (FireworkEffect effect : item.getEffects()) { result += "feffect:" + FireworkO.serialize(effect); } return result.length() >= 8 ? result.substring(8) : ""; } } }, FireworkMeta.class, FireworkEffectMeta.class); ItemTag.register("Power", new String[] {"Pwr", "Pow"}, new IntO(), "setPower", "getPower", FireworkMeta.class); //Potions ItemTag.register("Potion", new String[] {"Pot", "PotionType", "PotType", "PType"}, new StringO().match(PotionTypes.getAliasMap()), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.setPotion(new PotionData(PotionTypes.get(((StringO)result).getValue()))); return true; } @Override String onGet(EItem item) { return PotionTypes.getName(item.getPotion().getType()); } }, PotionMeta.class); ItemTag.register("PEffect", new String[] {"PotionE", "PE", "PotEffect", "PotE"}, new PotionO(), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.addPotionEffect(((PotionO)result).getValue()); return true; } @Override String onGet(EItem item) { String result = ""; for (PotionEffect effect : item.getPotionEffects()) { result += "peffect:" + PotionO.serialize(effect); } return result.length() >= 8 ? result.substring(8) : ""; } }, PotionMeta.class); //Books ItemTag.register("Author", new String[] {"BookAuthor", "Auth", "AU"}, new StringO(), "setAuthor", "getAuthor", BookMeta.class); ItemTag.register("Title", new String[] {"BookTitle", "Tit"}, new StringO(), "setTitle", "getTitle", BookMeta.class); ItemTag.register("Content", new String[] {"BookContent", "Cont", "CT", "Pages", "Page", "PA"}, new StringO(), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.setContent(((StringO)result).getValue()); return true; } @Override String onGet(EItem item) { return item.getContent(); } }, BookMeta.class); //Flags ItemTag.register("Flag", new String[] {"ItemFlag", "IFlag", "ItemF", "F", "HideFlag", "HFlag", "HideF", "Hide"}, new StringO().match(ItemFlags.getAliasMap()), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { item.addItemFlags(ItemFlags.get(((StringO)result).getValue())); return true; } @Override String onGet(EItem item) { String result = ""; for (ItemFlag flag : item.getItemFlags()) { result += "flag:" + ItemFlags.getName(flag); } return result.length() >= 5 ? result.substring(5) : ""; } }, ItemMeta.class); ItemTag.register("AllFlags", new String[] {"AllItemFlags", "AllIFlags", "AllItemF", "AllF", "AllHideFlags", "AllHFlags", "AllHideF", "HideAll"}, new BoolO().def(false), new ItemTagCallback() { @Override boolean onSet(CommandSender sender, EItem item, SingleOption result) { if (((BoolO)result).getValue()) { item.addItemFlags(); } else { item.removeItemFlags(); } return true; } @Override String onGet(EItem item) {return null;} }, ItemMeta.class); //Custom ItemTag.register("Glow", new String[] {"Glowing"}, new BoolO().def(false), "setGlowing", "isGlowing", ItemMeta.class); } }
package org.ketsu.filter; import java.io.Reader; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import javax.xml.stream.StreamFilter; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; /* * The MIT License (MIT) * * Copyright (c) 2014, Lauri Keel * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public abstract class StaXFilter { public static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); public static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); public final String fakeRoot = getFakeRoot(); /* * FIXME: does not remove blank tags with (only forbidden) attributes */ public abstract boolean isIgnoreBlankTag(String tag); public abstract boolean isAllowedTag(String tag); public abstract boolean isSelfClosingElement(String tag); public abstract boolean isAllowedAttribute(String tag, String attr, String val); public abstract boolean isAllowedAttribute(String tag, String prefix, String ns, String attr, String val); public String getFakeRoot() { return null; } public String filter(String what) throws XMLStreamException { Writer output = new StringWriter(what.length() + (fakeRoot != null ? fakeRoot.length()*2 + 5 : 0)); Reader input = new StringReader(fakeRoot != null ? ("<"+fakeRoot+">"+what+"</"+fakeRoot+">") : what); XMLStreamWriter writer = outputFactory.createXMLStreamWriter(output); XMLStreamReader reader = inputFactory.createXMLStreamReader(input); filter(inputFactory, reader, writer); String ret = output.toString(); if(fakeRoot != null) { int len = ret.length(); int frl = fakeRoot.length()+2; return ret.substring(frl, len-frl-1); } return ret; } public void filter(XMLInputFactory inputFactory, XMLStreamReader reader, XMLStreamWriter writer) throws XMLStreamException { reader = inputFactory.createFilteredReader(reader, new StreamFilter() { private int ignoreDepth = 0; @Override public boolean accept(XMLStreamReader reader) { if(reader.isStartElement() || reader.isEndElement()) { if(fakeRoot != null && reader.getLocalName().equals(fakeRoot)) { return false; } if(!isAllowedTag(reader.getLocalName())) { if(reader.isStartElement()) { ignoreDepth++; return false; } if(reader.isEndElement()) { ignoreDepth--; return false; } } } return (ignoreDepth == 0); } }); if(fakeRoot != null) { writer.writeStartElement(fakeRoot); } String started = null; while(reader.hasNext()) { int ev = reader.getEventType(); if(started != null && ev != XMLStreamConstants.END_ELEMENT) { writer.writeStartElement(started); started = null; } if(ev == XMLStreamConstants.START_ELEMENT) { int attrs = reader.getAttributeCount(); String tag = reader.getLocalName(); if(attrs == 0 && isIgnoreBlankTag(tag)) { started = tag; } else { writer.writeStartElement(tag); for(int i = 0; i < attrs; i++) { String ns = reader.getAttributeNamespace(i); String pf = reader.getAttributePrefix(i); if(pf == null || pf.isEmpty()) { String an = reader.getAttributeLocalName(i); String av = reader.getAttributeValue(i); if(isAllowedAttribute(tag, an, av)) { writer.writeAttribute(an, av); } } else { String an = reader.getAttributeLocalName(i); String av = reader.getAttributeValue(ns, an); if(isAllowedAttribute(tag, pf, ns, an, av)) { writer.writeAttribute(pf, ns, an, av); } } } } } else if(ev == XMLStreamConstants.END_ELEMENT) { if(started == null) { if(!isSelfClosingElement(reader.getLocalName())) { writer.writeCharacters(""); } writer.writeEndElement(); } started = null; } else if(ev == XMLStreamConstants.CHARACTERS) { writer.writeCharacters(reader.getText()); } reader.next(); } if(fakeRoot != null) { writer.writeEndElement(); } writer.writeEndDocument(); reader.close(); writer.close(); } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.http; import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Exception.streamError; import java.nio.charset.StandardCharsets; import com.google.common.net.MediaType; import com.linecorp.armeria.common.ContentTooLargeException; import com.linecorp.armeria.common.http.DefaultHttpRequest; import com.linecorp.armeria.common.http.HttpData; import com.linecorp.armeria.common.http.HttpHeaderNames; import com.linecorp.armeria.common.http.HttpHeaders; import com.linecorp.armeria.internal.InboundTrafficController; import com.linecorp.armeria.internal.http.ArmeriaHttpUtil; import com.linecorp.armeria.server.ServerConfig; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http2.DefaultHttp2Headers; import io.netty.handler.codec.http2.Http2ConnectionEncoder; import io.netty.handler.codec.http2.Http2Error; import io.netty.handler.codec.http2.Http2EventAdapter; import io.netty.handler.codec.http2.Http2Exception; import io.netty.handler.codec.http2.Http2Headers; import io.netty.handler.codec.http2.Http2Settings; import io.netty.handler.codec.http2.Http2Stream; import io.netty.util.collection.IntObjectHashMap; import io.netty.util.collection.IntObjectMap; final class Http2RequestDecoder extends Http2EventAdapter { private final ServerConfig cfg; private final Http2ConnectionEncoder writer; private final InboundTrafficController inboundTrafficController; private final IntObjectMap<DecodedHttpRequest> requests = new IntObjectHashMap<>(); private int nextId; Http2RequestDecoder(ServerConfig cfg, Channel channel, Http2ConnectionEncoder writer) { this.cfg = cfg; this.writer = writer; inboundTrafficController = new InboundTrafficController(channel); } @Override public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) { ctx.fireChannelRead(settings); } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endOfStream) throws Http2Exception { final HttpHeaders convertedHeaders = ArmeriaHttpUtil.toArmeria(headers); DecodedHttpRequest req = requests.get(streamId); if (req == null) { // Validate the 'content-length' header if exists. if (headers.contains(HttpHeaderNames.CONTENT_LENGTH)) { final long contentLength = headers.getLong(HttpHeaderNames.CONTENT_LENGTH, -1L); if (contentLength < 0) { writeErrorResponse(ctx, streamId, HttpResponseStatus.BAD_REQUEST); return; } } req = new DecodedHttpRequest(ctx.channel().eventLoop(), ++nextId, streamId, convertedHeaders, true, inboundTrafficController, cfg.defaultMaxRequestLength()); requests.put(streamId, req); ctx.fireChannelRead(req); } else { try { req.write(convertedHeaders); } catch (Throwable t) { req.close(t); throw connectionError(INTERNAL_ERROR, t, "failed to consume a HEADERS frame"); } } if (endOfStream) { req.close(); } } @Override public void onHeadersRead( ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream) throws Http2Exception { onHeadersRead(ctx, streamId, headers, padding, endOfStream); } @Override public void onStreamRemoved(Http2Stream stream) { requests.remove(stream.id()); } @Override public int onDataRead( ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception { final DecodedHttpRequest req = requests.get(streamId); if (req == null) { throw connectionError(PROTOCOL_ERROR, "received a DATA Frame for an unknown stream: %d", streamId); } final int dataLength = data.readableBytes(); if (dataLength == 0) { // Received an empty DATA frame if (endOfStream) { req.close(); } return padding; } req.increaseTransferredBytes(dataLength); final long maxContentLength = req.maxRequestLength(); if (maxContentLength > 0 && req.transferredBytes() > maxContentLength) { if (req.isOpen()) { req.close(ContentTooLargeException.get()); } if (isWritable(streamId)) { writeErrorResponse(ctx, streamId, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE); } else { // Cannot write to the stream. Just close it. final Http2Stream stream = writer.connection().stream(streamId); stream.close(); } } else if (req.isOpen()) { try { req.write(HttpData.of(data)); } catch (Throwable t) { req.close(t); throw connectionError(INTERNAL_ERROR, t, "failed to consume a DATA frame"); } if (endOfStream) { req.close(); } } // All bytes have been processed. return dataLength + padding; } private boolean isWritable(int streamId) { switch (writer.connection().stream(streamId).state()) { case OPEN: case HALF_CLOSED_REMOTE: return true; default: return false; } } private void writeErrorResponse(ChannelHandlerContext ctx, int streamId, HttpResponseStatus status) { final byte[] content = status.toString().getBytes(StandardCharsets.UTF_8); writer.writeHeaders( ctx, streamId, new DefaultHttp2Headers(false) .status(status.codeAsText()) .set(HttpHeaderNames.CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString()) .setInt(HttpHeaderNames.CONTENT_LENGTH, content.length), 0, false, ctx.voidPromise()); writer.writeData( ctx, streamId, Unpooled.wrappedBuffer(content), 0, true, ctx.voidPromise()); } @Override public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception { final DefaultHttpRequest req = requests.get(streamId); if (req == null) { throw connectionError(PROTOCOL_ERROR, "received a RST_STREAM frame for an unknown stream: %d", streamId); } req.close(streamError( streamId, Http2Error.valueOf(errorCode), "received a RST_STREAM frame")); } @Override public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception { throw connectionError(PROTOCOL_ERROR, "received a PUSH_PROMISE frame which only a server can send"); } }
package com.gentics.mesh.core.endpoint.schema; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_BRANCH_ASSIGN; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_CREATED; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_DELETED; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_MIGRATION_FINISHED; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_MIGRATION_START; import static com.gentics.mesh.core.rest.MeshEvent.SCHEMA_UPDATED; import static com.gentics.mesh.example.ExampleUuids.SCHEMA_VEHICLE_UUID; import static com.gentics.mesh.http.HttpConstants.APPLICATION_JSON; import static io.netty.handler.codec.http.HttpResponseStatus.CREATED; import static io.netty.handler.codec.http.HttpResponseStatus.NO_CONTENT; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.vertx.core.http.HttpMethod.DELETE; import static io.vertx.core.http.HttpMethod.GET; import static io.vertx.core.http.HttpMethod.POST; import javax.inject.Inject; import org.apache.commons.lang3.StringUtils; import com.gentics.mesh.auth.MeshAuthChainImpl; import com.gentics.mesh.context.InternalActionContext; import com.gentics.mesh.parameter.impl.GenericParametersImpl; import com.gentics.mesh.parameter.impl.PagingParametersImpl; import com.gentics.mesh.parameter.impl.SchemaUpdateParametersImpl; import com.gentics.mesh.parameter.impl.VersioningParametersImpl; import com.gentics.mesh.rest.InternalEndpointRoute; import com.gentics.mesh.router.route.AbstractInternalEndpoint; /** * Verticle for /api/v2/schemas endpoint */ public class SchemaEndpoint extends AbstractInternalEndpoint { private SchemaCrudHandler crudHandler; private SchemaLock schemaLock; public SchemaEndpoint() { super("schemas", null); } @Inject public SchemaEndpoint(MeshAuthChainImpl chain, SchemaCrudHandler crudHandler, SchemaLock schemaLock) { super("schemas", chain); this.crudHandler = crudHandler; this.schemaLock = schemaLock; } @Override public String getDescription() { return "Provides endpoints which allow the manipulation of schemas."; } @Override public void registerEndPoints() { secureAll(); addReadHandlers(); addDiffHandler(); addChangesHandler(); addCreateHandler(); addUpdateHandler(); addDeleteHandler(); } private void addChangesHandler() { // Endpoint readChanges = createEndpoint(); // readChanges.path("/:schemaUuid/changes"); // readChanges.method(GET); // readChanges.description("Return a list of changes "); // readChanges.produces(APPLICATION_JSON); // readChanges.exampleResponse(OK, schemaExamples.) // readChanges.handler(rc -> { // crudHandler.handleGetSchemaChanges(InternalActionContext.create(rc)); // }); InternalEndpointRoute endpoint = createRoute(); endpoint.path("/:schemaUuid/changes"); endpoint.addUriParameter("schemaUuid", "Uuid of the schema.", SCHEMA_VEHICLE_UUID); endpoint.method(POST); endpoint.description("Apply the posted changes to the schema. The schema migration will not automatically be started."); endpoint.produces(APPLICATION_JSON); endpoint.exampleRequest(schemaExamples.getSchemaChangesListModel()); endpoint.exampleResponse(OK, miscExamples.createMessageResponse(), "Schema changes have been applied."); endpoint.events(SCHEMA_UPDATED, SCHEMA_BRANCH_ASSIGN, SCHEMA_MIGRATION_START, SCHEMA_MIGRATION_FINISHED); endpoint.blockingHandler(rc -> { InternalActionContext ac = wrap(rc); String schemaUuid = ac.getParameter("schemaUuid"); crudHandler.handleApplySchemaChanges(ac, schemaUuid); }); } private void addCreateHandler() { InternalEndpointRoute endpoint = createRoute(); endpoint.path("/"); endpoint.method(POST); endpoint.description("Create a new schema."); endpoint.consumes(APPLICATION_JSON); endpoint.produces(APPLICATION_JSON); endpoint.exampleRequest(schemaExamples.getSchemaCreateRequest()); endpoint.exampleResponse(CREATED, schemaExamples.getSchemaResponse(), "Created schema."); endpoint.events(SCHEMA_CREATED); endpoint.blockingHandler(rc -> { InternalActionContext ac = wrap(rc); crudHandler.handleCreate(ac); }); } private void addDiffHandler() { InternalEndpointRoute diffEndpoint = createRoute(); diffEndpoint.path("/:schemaUuid/diff"); diffEndpoint.addUriParameter("schemaUuid", "Uuid of the schema.", SCHEMA_VEHICLE_UUID); diffEndpoint.method(POST); diffEndpoint.setMutating(false); diffEndpoint.description("Compare the given schema with the stored schema and create a changeset."); diffEndpoint.consumes(APPLICATION_JSON); diffEndpoint.produces(APPLICATION_JSON); diffEndpoint.exampleRequest(schemaExamples.getSchemaResponse()); diffEndpoint.exampleResponse(OK, schemaExamples.getSchemaChangesListModel(), "List of schema changes that were detected by comparing the posted schema and the current version."); diffEndpoint.blockingHandler(rc -> { InternalActionContext ac = wrap(rc); String uuid = ac.getParameter("schemaUuid"); crudHandler.handleDiff(ac, uuid); }); } private void addUpdateHandler() { InternalEndpointRoute endpoint = createRoute(); endpoint.path("/:schemaUuid"); endpoint.addUriParameter("schemaUuid", "Uuid of the schema.", SCHEMA_VEHICLE_UUID); endpoint.method(POST); endpoint.description("Update the schema."); endpoint.consumes(APPLICATION_JSON); endpoint.produces(APPLICATION_JSON); endpoint.addQueryParameters(SchemaUpdateParametersImpl.class); endpoint.exampleRequest(schemaExamples.getSchemaUpdateRequest()); endpoint.exampleResponse(OK, schemaExamples.getSchemaResponse(), "Updated schema."); endpoint.events(SCHEMA_UPDATED, SCHEMA_MIGRATION_START, SCHEMA_MIGRATION_FINISHED); endpoint.blockingHandler(rc -> { // Update operations should always be executed sequentially - never in parallel synchronized (schemaLock.mutex()) { InternalActionContext ac = wrap(rc); String uuid = ac.getParameter("schemaUuid"); crudHandler.handleUpdate(ac, uuid); } }, true); } private void addDeleteHandler() { InternalEndpointRoute endpoint = createRoute(); endpoint.path("/:schemaUuid"); endpoint.addUriParameter("schemaUuid", "Uuid of the schema.", SCHEMA_VEHICLE_UUID); endpoint.method(DELETE); endpoint.description("Delete the schema with the given uuid."); endpoint.produces(APPLICATION_JSON); endpoint.exampleResponse(NO_CONTENT, "Schema was successfully deleted."); endpoint.events(SCHEMA_DELETED); endpoint.blockingHandler(rc -> { InternalActionContext ac = wrap(rc); String uuid = ac.getParameter("schemaUuid"); crudHandler.handleDelete(ac, uuid); }); } private void addReadHandlers() { InternalEndpointRoute readOne = createRoute(); readOne.path("/:schemaUuid"); readOne.addUriParameter("schemaUuid", "Uuid of the schema.", SCHEMA_VEHICLE_UUID); readOne.method(GET); readOne.addQueryParameters(VersioningParametersImpl.class); readOne.description("Load the schema with the given uuid."); readOne.exampleResponse(OK, schemaExamples.getSchemaResponse(), "Loaded schema."); readOne.addQueryParameters(GenericParametersImpl.class); readOne.produces(APPLICATION_JSON); readOne.blockingHandler(rc -> { String uuid = rc.request().params().get("schemaUuid"); if (StringUtils.isEmpty(uuid)) { rc.next(); } else { InternalActionContext ac = wrap(rc); crudHandler.handleRead(ac, uuid); } }); InternalEndpointRoute readAll = createRoute(); readAll.path("/"); readAll.method(GET); readAll.description("Read multiple schemas and return a paged list response."); readAll.produces(APPLICATION_JSON); readAll.addQueryParameters(PagingParametersImpl.class); readAll.addQueryParameters(GenericParametersImpl.class); readAll.exampleResponse(OK, schemaExamples.getSchemaListResponse(), "Loaded list of schemas."); readAll.blockingHandler(rc -> { InternalActionContext ac = wrap(rc); crudHandler.handleReadList(ac); }); } }
package org.inaturalist.android; import java.io.File; import java.io.FileNotFoundException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.tinylog.Logger; import uk.co.senab.photoview.HackyViewPager; import uk.co.senab.photoview.PhotoViewAttacher; import com.bumptech.glide.Glide; import com.bumptech.glide.RequestBuilder; import com.bumptech.glide.load.DataSource; import com.bumptech.glide.load.engine.GlideException; import com.bumptech.glide.request.RequestListener; import com.bumptech.glide.request.RequestOptions; import com.bumptech.glide.request.target.BaseTarget; import com.bumptech.glide.request.target.SizeReadyCallback; import com.bumptech.glide.request.target.Target; import com.bumptech.glide.request.transition.Transition; import com.evernote.android.state.State; import com.livefront.bridge.Bridge; import com.yalantis.ucrop.UCrop; import com.yalantis.ucrop.UCropFragment; import android.app.Activity; import android.content.ContentUris; import android.content.ContentValues; import android.content.Intent; import android.database.Cursor; import android.graphics.BitmapFactory; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Bundle; import androidx.annotation.DrawableRes; import androidx.annotation.Nullable; import androidx.appcompat.widget.Toolbar; import androidx.viewpager.widget.PagerAdapter; import androidx.viewpager.widget.ViewPager; import androidx.viewpager.widget.ViewPager.LayoutParams; import androidx.appcompat.app.ActionBar; import androidx.appcompat.app.AppCompatActivity; import android.util.Pair; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.Toast; public class ObservationPhotosViewer extends AppCompatActivity { private static String TAG = "ObservationPhotosViewer"; private INaturalistApp mApp; private ActivityHelper mHelper; @State(AndroidStateBundlers.JSONObjectBundler.class) public JSONObject mObservation; private HackyViewPager mViewPager; public static final String IS_NEW_OBSERVATION = "is_new_observation"; public static final String OBSERVATION = "observation"; public static final String OBSERVATION_ID = "observation_id"; public static final String OBSERVATION_ID_INTERNAL = "observation_id_internal"; public static final String OBSERVATION_UUID = "observation_uuid"; public static final String CURRENT_PHOTO_INDEX = "current_photo_index"; public static final String READ_ONLY = "read_only"; public static final String IS_TAXON = "is_taxon"; public static final String REPLACED_PHOTOS = "replaced_photos"; public static final String SET_DEFAULT_PHOTO_INDEX = "set_default_photo_index"; public static final String DELETE_PHOTO_INDEX = "delete_photo_index"; public static final String DUPLICATE_PHOTO_INDEX = "duplicate_photo_index"; @State public boolean mIsNewObservation; @State public int mObservationId; @State public int mCurrentPhotoIndex; private View mDeletePhoto; private View mDuplicatePhoto; private View mEditPhoto; private View mActionContainer; @State public boolean mReadOnly; @State public int mObservationIdInternal; @State public String mObservationUUID; @State public boolean mIsTaxon; @State(AndroidStateBundlers.ListPairBundler.class) public List<Pair<Uri, Long>> mReplacedPhotos = new ArrayList<>(); private IdPicsPagerAdapter mAdapter; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bridge.restoreInstanceState(this, savedInstanceState); ActionBar actionBar = getSupportActionBar(); actionBar.setHomeButtonEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setLogo(R.drawable.ic_arrow_back); mApp = (INaturalistApp) getApplicationContext(); mApp.applyLocaleSettings(getBaseContext()); setContentView(R.layout.observation_photos); mDeletePhoto = findViewById(R.id.delete_photo); mDuplicatePhoto = findViewById(R.id.duplicate_photo); mEditPhoto = findViewById(R.id.edit_photo); mActionContainer = findViewById(R.id.action_container); mHelper = new ActivityHelper(this); Intent intent = getIntent(); try { if (savedInstanceState == null) { mIsNewObservation = intent.getBooleanExtra(IS_NEW_OBSERVATION, false); mCurrentPhotoIndex = intent.getIntExtra(CURRENT_PHOTO_INDEX, 0); if (!mIsNewObservation) { String observationString = intent.getStringExtra(OBSERVATION); if (observationString != null) mObservation = new JSONObject(observationString); } else { mObservationId = intent.getIntExtra(OBSERVATION_ID, 0); mObservationIdInternal = intent.getIntExtra(OBSERVATION_ID_INTERNAL, 0); mObservationUUID = intent.getStringExtra(OBSERVATION_UUID); } mReadOnly = intent.getBooleanExtra(READ_ONLY, false); mIsTaxon = intent.getBooleanExtra(IS_TAXON, false); } } catch (JSONException e) { Logger.tag(TAG).error(e); } if (mIsTaxon) { if (mApp.getShowScientificNameFirst()) { // Show scientific name first, before common name actionBar.setTitle(TaxonUtils.getTaxonScientificName(mApp, mObservation)); } else { actionBar.setTitle(TaxonUtils.getTaxonName(this, mObservation)); } } else { actionBar.setTitle(R.string.observation_photos); } mViewPager = (HackyViewPager) findViewById(R.id.id_pic_view_pager); if ((mObservation != null) && (!mIsNewObservation)) { mAdapter = new IdPicsPagerAdapter(this, mViewPager, mObservation, mIsTaxon); mViewPager.setAdapter(mAdapter); mEditPhoto.setVisibility(View.GONE); mDuplicatePhoto.setVisibility(View.GONE); mDeletePhoto.setVisibility(View.GONE); } else if (mIsNewObservation) { mAdapter = new IdPicsPagerAdapter(this, mViewPager, mObservationId, mObservationIdInternal, mObservationUUID); mViewPager.setAdapter(mAdapter); if (mReplacedPhotos.size() > 0) { // Update with any modified/cropped photos for (Pair<Uri, Long> replacedPhoto : mReplacedPhotos) { mAdapter.setImageUri(replacedPhoto.second.intValue(), replacedPhoto.first); } } mActionContainer.setVisibility(mReadOnly ? View.GONE : View.VISIBLE); mDeletePhoto.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { Intent data = new Intent(); data.putExtra(DELETE_PHOTO_INDEX, mViewPager.getCurrentItem()); if (mReplacedPhotos.size() > 0) { data.putExtra(REPLACED_PHOTOS, replacedPhotosToString()); } setResult(RESULT_OK, data); finish(); } }); mDuplicatePhoto.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { if (mAdapter.getCount() >= ObservationEditor.MAX_PHOTOS_PER_OBSERVATION) { mHelper.alert(String.format(getString(R.string.no_more_photos_allowed), ObservationEditor.MAX_PHOTOS_PER_OBSERVATION)); return; } Intent data = new Intent(); data.putExtra(DUPLICATE_PHOTO_INDEX, mViewPager.getCurrentItem()); if (mReplacedPhotos.size() > 0) { data.putExtra(REPLACED_PHOTOS, replacedPhotosToString()); } setResult(RESULT_OK, data); finish(); } }); mEditPhoto.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { editPhoto(mViewPager.getCurrentItem()); } }); } mViewPager.setCurrentItem(mCurrentPhotoIndex); } private void editPhoto(int photoIndex) { IdPicsPagerAdapter adapter = (IdPicsPagerAdapter) mViewPager.getAdapter(); String sourceImage = adapter.getImageUri(photoIndex); if (sourceImage == null) { Toast.makeText(getApplicationContext(), getString(R.string.couldnt_edit_photo), Toast.LENGTH_SHORT).show(); return; } Uri sourceUri = sourceImage.startsWith("http") ? Uri.parse(sourceImage) : Uri.fromFile(new File(sourceImage)); mCurrentPhotoIndex = photoIndex; Intent intent = new Intent(ObservationPhotosViewer.this, ObservationPhotoEditor.class); intent.putExtra(ObservationPhotoEditor.PHOTO_URI, sourceUri.toString()); startActivityForResult(intent, UCrop.REQUEST_CROP); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == UCrop.REQUEST_CROP) { if (resultCode == Activity.RESULT_OK) { // Replace current photo with edited photo Uri uri = data.getParcelableExtra(UCrop.EXTRA_OUTPUT_URI); IdPicsPagerAdapter adapter = (IdPicsPagerAdapter) mViewPager.getAdapter(); adapter.setImageUri(mCurrentPhotoIndex, uri); for (int i = 0; i < mReplacedPhotos.size(); i++) { Pair<Uri, Long> pair = mReplacedPhotos.get(i); if ((pair.second == mCurrentPhotoIndex) && (mCurrentPhotoIndex < mReplacedPhotos.size())) { // User already edited this photo before - just replace the edit mReplacedPhotos.set(mCurrentPhotoIndex, new Pair<Uri, Long>(uri, Long.valueOf(mCurrentPhotoIndex))); return; } } // New edit of this photo - save it mReplacedPhotos.add(new Pair<>(uri, Long.valueOf(mCurrentPhotoIndex))); } } } @Override public void onBackPressed() { checkForReplacedPhotos(); super.onBackPressed(); } private void checkForReplacedPhotos() { if (mReplacedPhotos.size() > 0) { Intent data = new Intent(); data.putExtra(REPLACED_PHOTOS, replacedPhotosToString()); setResult(RESULT_OK, data); } else { setResult(RESULT_CANCELED); } } private String replacedPhotosToString() { // On some devices (like older Android 6 Samsung), string representation of a list of pairs // is not the same as other devices - convert it to a standard representation) StringBuilder builder = new StringBuilder(); builder.append('['); for (int i = 0; i < mReplacedPhotos.size(); i++) { Pair<Uri, Long> pair = mReplacedPhotos.get(i); builder.append("Pair{"); builder.append(pair.first.toString()); builder.append(' '); builder.append(pair.second.toString()); if (i < mReplacedPhotos.size() - 1) { builder.append("}, "); } else { builder.append('}'); } } builder.append(']'); return builder.toString(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: checkForReplacedPhotos(); finish(); return true; case R.id.edit_photo_license: Long imageId = mAdapter.getImageId(mViewPager.getCurrentItem()); Uri uri = ContentUris.withAppendedId(ObservationPhoto.CONTENT_URI, imageId); Cursor c = getContentResolver().query(uri, ObservationPhoto.PROJECTION, null, null, null); ObservationPhoto photo = new ObservationPhoto(c); c.close(); LicenseUtils.showLicenseChooser(this, R.string.photo_license, photo.license, license -> { photo.license = license.value; ContentValues cv = photo.getContentValues(); getContentResolver().update(uri, cv, null, null); }); return true; case R.id.set_as_first: Intent data = new Intent(); data.putExtra(SET_DEFAULT_PHOTO_INDEX, mViewPager.getCurrentItem()); setResult(RESULT_OK, data); finish(); return true; default: return super.onOptionsItemSelected(item); } } @Override public boolean onCreateOptionsMenu(Menu menu) { if (mIsNewObservation && !mReadOnly) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.observation_photos_viewer_menu, menu); return true; } else { return super.onCreateOptionsMenu(menu); } } @Override protected void onSaveInstanceState(Bundle outState) { mCurrentPhotoIndex = mViewPager.getCurrentItem(); super.onSaveInstanceState(outState); Bridge.saveInstanceState(this, outState); } public static class IdPicsPagerAdapter extends PagerAdapter { private INaturalistApp mApp = null; public static interface OnZoomListener { void onZoomedIn(); void onZoomOriginal(); } int mDefaultTaxonIcon; List<String> mImages; List<ImageView> mImageViews; List<PhotoViewAttacher> mImageViewAttachers; List<Long> mPhotoIds; List<String> mImageThumbnails; Activity mActivity; ViewPager mViewPager; private OnClickListener mClickListener; private OnZoomListener mZoomListener = null; private Integer mObservationId = null; private Integer mInternalObservationId = null; private String mObservationUUID = null; public IdPicsPagerAdapter(Activity activity, ViewPager viewPager, int observationId, int _observationId, String uuid, OnClickListener listener) { this(activity, viewPager, observationId, _observationId, uuid); mClickListener = listener; } public void setOnZoomListener(OnZoomListener listener) { mZoomListener = listener; } // Load offline photos for a new observation public IdPicsPagerAdapter(Activity activity, ViewPager viewPager, int observationId, int _observationId, String uuid) { mActivity = activity; mViewPager = viewPager; mImages = new ArrayList<String>(); mImageViews = new ArrayList<>(); mImageViewAttachers = new ArrayList<>(); mPhotoIds = new ArrayList<>(); mImageThumbnails = new ArrayList<String>(); mObservationId = observationId; mInternalObservationId = _observationId; mObservationUUID = uuid; mApp = (INaturalistApp)activity.getApplication(); if (mObservationUUID == null) { Logger.tag(TAG).error("UUID is null!"); return; } Cursor imageCursor = activity.getContentResolver().query(ObservationPhoto.CONTENT_URI, ObservationPhoto.PROJECTION, "(observation_uuid=?) AND ((is_deleted = 0) OR (is_deleted IS NULL))", new String[]{mObservationUUID}, mApp.isLayoutRTL() ? ObservationPhoto.REVERSE_DEFAULT_SORT_ORDER : ObservationPhoto.DEFAULT_SORT_ORDER); imageCursor.moveToFirst(); if (imageCursor.getCount() == 0) { imageCursor.close(); return; } for (int i = 0; i < imageCursor.getCount(); i++) { mImageViews.add(null); mImageViewAttachers.add(null); } do { String photoFileName = imageCursor.getString(imageCursor.getColumnIndexOrThrow(ObservationPhoto.PHOTO_FILENAME)); String originalPhotoFileName = imageCursor.getString(imageCursor.getColumnIndexOrThrow(ObservationPhoto.ORIGINAL_PHOTO_FILENAME)); if ((originalPhotoFileName != null) && ((new File(originalPhotoFileName).exists()))) { // Use original full-resolution photo if possible photoFileName = originalPhotoFileName; } if ((photoFileName != null) && (!(new File(photoFileName).exists()))) { // Our local copy file was deleted photoFileName = null; } String imageUrl = imageCursor.getString(imageCursor.getColumnIndexOrThrow(ObservationPhoto.PHOTO_URL)); mImages.add(imageUrl != null ? imageUrl : photoFileName); mPhotoIds.add(imageCursor.getLong(imageCursor.getColumnIndexOrThrow(ObservationPhoto._ID))); if (imageUrl != null) { // Deduct the URL of the thumbnail String thumbnailUrl = imageUrl.substring(0, imageUrl.lastIndexOf('/')) + "/small" + imageUrl.substring(imageUrl.lastIndexOf('.')); mImageThumbnails.add(thumbnailUrl); } else { mImageThumbnails.add(null); } } while (imageCursor.moveToNext()); imageCursor.close(); } public IdPicsPagerAdapter(Activity activity, ViewPager viewPager, JSONObject observation, boolean isTaxon, OnClickListener listener) { this(activity, viewPager, observation, isTaxon); mClickListener = listener; } // Load online photos for an existing observation public IdPicsPagerAdapter(Activity activity, ViewPager viewPager, JSONObject observation, boolean isTaxon) { mActivity = activity; mViewPager = viewPager; mImages = new ArrayList<String>(); mImageViews = new ArrayList<>(); mImageViewAttachers = new ArrayList<>(); mPhotoIds = new ArrayList<>(); mImageThumbnails = new ArrayList<String>(); mDefaultTaxonIcon = TaxonUtils.observationIcon(observation); JSONArray photos = observation == null ? null : observation.optJSONArray(isTaxon ? "taxon_photos" : "observation_photos"); if ((photos != null) && (photos.length() > 0)) { for (int i = 0; i < photos.length(); i++) { mImageViews.add(null); mImageViewAttachers.add(null); mPhotoIds.add(null); } // Show the photos for (int i = 0; i < photos.length(); i++) { JSONObject photo = photos.optJSONObject(i); if (photo != null) { JSONObject innerPhoto = photo.optJSONObject("photo"); if (innerPhoto != null) { String photoUrl = innerPhoto.has("original_url") ? innerPhoto.optString("original_url") : innerPhoto.optString("large_url"); if ((photoUrl != null) && (photoUrl.length() > 0)) { mImages.add(photoUrl); mImageThumbnails.add(innerPhoto.has("thumb_url") ? innerPhoto.optString("thumb_url") : innerPhoto.optString("small_url")); } else { String url = innerPhoto.optString("url"); if (isTaxon) { // Taxon photos sometimes point to outside sources like Flickr, so we can't deduce thumbnail/original size URLs. mImages.add(url); mImageThumbnails.add(url); } else if ((url != null) && (url.length() > 0)) { String extension = url.substring(url.lastIndexOf(".")); // Deduce the original-sized URL if (url.substring(0, url.lastIndexOf('/')).endsWith("assets")) { // It's an assets default URL - e.g. https://www.inaturalist.org/assets/copyright-infringement-square.png mImages.add(url.substring(0, url.lastIndexOf('-') + 1) + "original" + extension); mImageThumbnails.add(url.substring(0, url.lastIndexOf('-') + 1) + "square" + extension); } else { // "Regular" observation photo mImages.add(url.substring(0, url.lastIndexOf('/') + 1) + "original" + extension); mImageThumbnails.add(url.substring(0, url.lastIndexOf('/') + 1) + "square" + extension); } } } } } } } else { // Show taxon icon mImages.add(null); mImageThumbnails.add(null); mImageViews.add(null); mImageViewAttachers.add(null); } } @Override public int getCount() { return mImages.size(); } public String getImageUri(int index) { if ((index < 0) || (index >= mImages.size())) return null; return mImages.get(index); } public Long getImageId(int index) { if ((index < 0) || (index >= mImages.size())) return null; return mPhotoIds.get(index); } // Sets a new image URI (assumes it's an offline image) - basically replaces existing photo public void setImageUri(int index, Uri uri) { if ((index < 0) || (index >= mImages.size())) return; if ((mInternalObservationId == null) || (mObservationId == null)) return; // Can't edit a read-only observation's photos // Refresh UI mImages.set(index, uri.getPath()); ImageView imageView = mImageViews.get(index); if (imageView != null) { imageView.setImageBitmap(BitmapFactory.decodeFile(uri.getPath())); mImageViewAttachers.get(index).update(); } } @Override public View instantiateItem(ViewGroup container, int position) { View layout = (View) mActivity.getLayoutInflater().inflate(R.layout.observation_photo, null, false); container.addView(layout, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); final ImageView imageView = (ImageView) layout.findViewById(R.id.id_pic); mImageViews.set(position, imageView); final ProgressBar loading = (ProgressBar) layout.findViewById(R.id.id_pic_loading); String imagePath = mImages.get(position); PhotoViewAttacher attacher = null; if (FileUtils.isLocal(imagePath)) { // Offline photo try { attacher = new PhotoViewAttacher(imageView); mImageViewAttachers.set(position, attacher); final PhotoViewAttacher finalAttacher2 = attacher; GlideApp.with(mActivity) .load(new File(imagePath)) .listener(new RequestListener<Drawable>() { @Override public boolean onLoadFailed(@Nullable GlideException e, Object model, Target<Drawable> target, boolean isFirstResource) { return false; } @Override public boolean onResourceReady(Drawable resource, Object model, Target<Drawable> target, DataSource dataSource, boolean isFirstResource) { imageView.setImageDrawable(resource); finalAttacher2.update(); return true; } }) .into(imageView); } catch (Exception e) { Logger.tag(TAG).error(e); } } else { // Online photo String imageUrl = mImages.get(position); if (imageUrl == null) { // Show a default taxon image imageView.setImageResource(mDefaultTaxonIcon); } else { loading.setVisibility(View.VISIBLE); imageView.setVisibility(View.INVISIBLE); attacher = new PhotoViewAttacher(imageView); mImageViewAttachers.set(position, attacher); // Deduce the original-sized URL String extension = imageUrl.substring(imageUrl.lastIndexOf('.')); // Deduce the original-sized URL if (imageUrl.substring(0, imageUrl.lastIndexOf('/')).endsWith("assets")) { // It's an assets default URL - e.g. https://www.inaturalist.org/assets/copyright-infringement-square.png imageUrl = imageUrl.substring(0, imageUrl.lastIndexOf('-') + 1) + "original" + extension; } else { // "Regular" observation photo imageUrl = imageUrl.substring(0, imageUrl.lastIndexOf('/') + 1) + "original" + extension; } // Show a photo String thumbnailUrl = mImageThumbnails.get(position); final PhotoViewAttacher finalAttacher = attacher; RequestBuilder<Drawable> imageRequest = Glide.with(mActivity) .load(imageUrl); if (thumbnailUrl != null) { // Load a scaled down version (thumbnail) of the image first RequestBuilder<Drawable> thumbnailRequest = Glide. with(mActivity). load(thumbnailUrl). apply(new RequestOptions().placeholder(mDefaultTaxonIcon)); imageRequest = imageRequest.thumbnail(thumbnailRequest); } else { imageRequest = imageRequest. apply(new RequestOptions().placeholder(mDefaultTaxonIcon)); ; } BaseTarget target = new BaseTarget<Drawable>() { @Override public void onResourceReady(Drawable bitmap, Transition<? super Drawable> transition) { imageView.setImageDrawable(bitmap); loading.setVisibility(View.GONE); imageView.setVisibility(View.VISIBLE); finalAttacher.update(); } @Override public void getSize(SizeReadyCallback cb) { cb.onSizeReady(SIZE_ORIGINAL, SIZE_ORIGINAL); } @Override public void removeCallback(SizeReadyCallback cb) {} }; imageRequest.into(target); } } if ((mClickListener != null) && (attacher != null)) { attacher.setOnPhotoTapListener(new PhotoViewAttacher.OnPhotoTapListener() { @Override public void onPhotoTap(View view, float x, float y) { mClickListener.onClick(view); } }); } if (attacher != null) { attacher.setMaximumScale(7.0f); final PhotoViewAttacher finalAttacher1 = attacher; attacher.setOnMatrixChangeListener(new PhotoViewAttacher.OnMatrixChangedListener() { @Override public void onMatrixChanged(RectF rect) { float scale = finalAttacher1.getScale(); if (mZoomListener != null) { if (scale > 1.0f) { mZoomListener.onZoomedIn(); } else { mZoomListener.onZoomOriginal(); } } } }); } return layout; } @Override public void destroyItem(ViewGroup container, int position, Object object) { container.removeView((View) object); } @Override public boolean isViewFromObject(View view, Object object) { return view == object; } } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2011, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.test.annotations.manytoone; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.junit.Test; import org.hibernate.Hibernate; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.test.annotations.Company; import org.hibernate.test.annotations.Customer; import org.hibernate.test.annotations.Discount; import org.hibernate.test.annotations.Flight; import org.hibernate.test.annotations.Passport; import org.hibernate.test.annotations.Ticket; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * @author Emmanuel Bernard */ public class ManyToOneTest extends BaseCoreFunctionalTestCase { @Test public void testEager() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Color c = new Color(); c.setName( "Yellow" ); s.persist( c ); Car car = new Car(); car.setBodyColor( c ); s.persist( car ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); car = (Car) s.get( Car.class, car.getId() ); tx.commit(); s.close(); assertNotNull( car ); assertNotNull( car.getBodyColor() ); assertEquals( "Yellow", car.getBodyColor().getName() ); } @Test public void testDefaultMetadata() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Color c = new Color(); c.setName( "Blue" ); s.persist( c ); Car car = new Car(); car.setBodyColor( c ); s.persist( car ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); car = (Car) s.get( Car.class, car.getId() ); assertNotNull( car ); assertNotNull( car.getBodyColor() ); assertEquals( c.getId(), car.getBodyColor().getId() ); tx.rollback(); s.close(); } @Test public void testCreate() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Flight firstOne = new Flight(); firstOne.setId( new Long( 1 ) ); firstOne.setName( "AF0101" ); firstOne.setDuration( new Long( 1000 ) ); Company frenchOne = new Company(); frenchOne.setName( "Air France" ); firstOne.setCompany( frenchOne ); s.persist( firstOne ); tx.commit(); s.close(); assertNotNull( "identity id should work", frenchOne.getId() ); s = openSession(); tx = s.beginTransaction(); firstOne = (Flight) s.get( Flight.class, new Long( 1 ) ); assertNotNull( firstOne.getCompany() ); assertEquals( frenchOne.getName(), firstOne.getCompany().getName() ); tx.commit(); s.close(); } @Test public void testCascade() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Discount discount = new Discount(); discount.setDiscount( 20.12 ); Customer customer = new Customer(); Collection discounts = new ArrayList(); discounts.add( discount ); customer.setName( "Quentin Tarantino" ); discount.setOwner( customer ); customer.setDiscountTickets( discounts ); s.persist( discount ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); discount = (Discount) s.get( Discount.class, discount.getId() ); assertNotNull( discount ); assertEquals( 20.12, discount.getDiscount(), 0.01 ); assertNotNull( discount.getOwner() ); customer = new Customer(); customer.setName( "Clooney" ); discount.setOwner( customer ); discounts = new ArrayList(); discounts.add( discount ); customer.setDiscountTickets( discounts ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); discount = (Discount) s.get( Discount.class, discount.getId() ); assertNotNull( discount ); assertNotNull( discount.getOwner() ); assertEquals( "Clooney", discount.getOwner().getName() ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); customer = (Customer) s.get( Customer.class, customer.getId() ); s.delete( customer ); tx.commit(); s.close(); } @Test public void testFetch() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Discount discount = new Discount(); discount.setDiscount( 20 ); Customer customer = new Customer(); Collection discounts = new ArrayList(); discounts.add( discount ); customer.setName( "Quentin Tarantino" ); discount.setOwner( customer ); customer.setDiscountTickets( discounts ); s.persist( discount ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); discount = (Discount) s.get( Discount.class, discount.getId() ); assertNotNull( discount ); assertFalse( Hibernate.isInitialized( discount.getOwner() ) ); tx.commit(); s = openSession(); tx = s.beginTransaction(); discount = (Discount) s.load( Discount.class, discount.getId() ); assertNotNull( discount ); assertFalse( Hibernate.isInitialized( discount.getOwner() ) ); tx.commit(); s = openSession(); tx = s.beginTransaction(); s.delete( s.get( Discount.class, discount.getId() ) ); tx.commit(); s.close(); } @Test public void testCompositeFK() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); ParentPk ppk = new ParentPk(); ppk.firstName = "John"; ppk.lastName = "Doe"; Parent p = new Parent(); p.age = 45; p.id = ppk; s.persist( p ); Child c = new Child(); c.parent = p; s.persist( c ); tx.commit(); s.close(); s = openSession(); tx = s.beginTransaction(); //FIXME: fix this when the small parser bug will be fixed Query q = s.createQuery( "from " + Child.class.getName() ); //+ " c where c.parent.id.lastName = :lastName"); //q.setString("lastName", p.id.lastName); List result = q.list(); assertEquals( 1, result.size() ); Child c2 = (Child) result.get( 0 ); assertEquals( c2.id, c.id ); tx.commit(); s.close(); } @Test public void testImplicitCompositeFk() throws Exception { Session s; Transaction tx; s = openSession(); tx = s.beginTransaction(); Node n1 = new Node(); n1.setDescription( "Parent" ); NodePk n1pk = new NodePk(); n1pk.setLevel( 1 ); n1pk.setName( "Root" ); n1.setId( n1pk ); Node n2 = new Node(); NodePk n2pk = new NodePk(); n2pk.setLevel( 2 ); n2pk.setName( "Level 1: A" ); n2.setParent( n1 ); n2.setId( n2pk ); s.persist( n2 ); tx.commit(); s = openSession(); tx = s.beginTransaction(); n2 = (Node) s.get( Node.class, n2pk ); assertNotNull( n2 ); assertNotNull( n2.getParent() ); assertEquals( 1, n2.getParent().getId().getLevel() ); tx.commit(); s.close(); } @Test public void testManyToOneNonPk() throws Exception { Session s = openSession(); Transaction tx = s.beginTransaction(); Order order = new Order(); order.setOrderNbr( "123" ); s.persist( order ); OrderLine ol = new OrderLine(); ol.setItem( "Mouse" ); ol.setOrder( order ); s.persist( ol ); s.flush(); s.clear(); ol = (OrderLine) s.get( OrderLine.class, ol.getId() ); assertNotNull( ol.getOrder() ); assertEquals( "123", ol.getOrder().getOrderNbr() ); assertTrue( ol.getOrder().getOrderLines().contains( ol ) ); tx.rollback(); s.close(); } @Test public void testManyToOneNonPkSecondaryTable() throws Exception { Session s = openSession(); Transaction tx = s.beginTransaction(); Order order = new Order(); order.setOrderNbr( "123" ); s.persist( order ); OrderLine ol = new OrderLine(); ol.setItem( "Mouse" ); ol.setReplacementOrder( order ); s.persist( ol ); s.flush(); s.clear(); ol = (OrderLine) s.get( OrderLine.class, ol.getId() ); assertNotNull( ol.getReplacementOrder() ); assertEquals( "123", ol.getReplacementOrder().getOrderNbr() ); assertFalse( ol.getReplacementOrder().getOrderLines().contains( ol ) ); tx.rollback(); s.close(); } @Test public void testTwoManyToOneNonPk() throws Exception { //2 many to one non pk pointing to the same referencedColumnName should not fail Session s = openSession(); Transaction tx = s.beginTransaction(); org.hibernate.test.annotations.manytoone.Customer customer = new org.hibernate.test.annotations.manytoone.Customer(); customer.userId="123"; org.hibernate.test.annotations.manytoone.Customer customer2 = new org.hibernate.test.annotations.manytoone.Customer(); customer2.userId="124"; s.persist( customer2 ); s.persist( customer ); Deal deal = new Deal(); deal.from = customer; deal.to = customer2; s.persist( deal ); s.flush(); s.clear(); deal = (Deal) s.get( Deal.class, deal.id ); assertNotNull( deal.from ); assertNotNull( deal.to ); tx.rollback(); s.close(); } @Test public void testFormulaOnOtherSide() throws Exception { Session s = openSession(); Transaction tx = s.beginTransaction(); Frame frame = new Frame(); frame.setName( "Prada" ); s.persist( frame ); Lens l = new Lens(); l.setFocal( 2.5f ); l.setFrame( frame ); s.persist( l ); Lens r = new Lens(); r.setFocal( 1.2f); r.setFrame( frame ); s.persist( r ); s.flush(); s.clear(); frame = (Frame) s.get( Frame.class, frame.getId() ); assertEquals( 2, frame.getLenses().size() ); assertTrue( frame.getLenses().iterator().next().getLength() <= 1/1.2f ); assertTrue( frame.getLenses().iterator().next().getLength() >= 1/2.5f ); tx.rollback(); s.close(); } @Override protected Class[] getAnnotatedClasses() { return new Class[]{ Deal.class, org.hibernate.test.annotations.manytoone.Customer.class, Car.class, Color.class, Flight.class, Company.class, Customer.class, Discount.class, Ticket.class, Passport.class, Parent.class, Child.class, Node.class, User.class, DistrictUser.class, Order.class, OrderLine.class, Frame.class, Lens.class }; } }
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov. // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package android.support.v4.view; import android.content.Context; import android.os.Handler; import android.os.Message; import android.view.MotionEvent; import android.view.VelocityTracker; import android.view.ViewConfiguration; // Referenced classes of package android.support.v4.view: // MotionEventCompat, VelocityTrackerCompat, GestureDetectorCompat static class init implements init { private class GestureHandler extends Handler { final GestureDetectorCompat.GestureDetectorCompatImplBase this$0; public void handleMessage(Message message) { message.what; JVM INSTR tableswitch 1 3: default 32 // 1 59 // 2 79 // 3 87; goto _L1 _L2 _L3 _L4 _L1: throw new RuntimeException((new StringBuilder()).append("Unknown message ").append(message).toString()); _L2: mListener.onShowPress(mCurrentDownEvent); _L6: return; _L3: dispatchLongPress(); return; _L4: if (mDoubleTapListener != null) { if (!mStillDown) { mDoubleTapListener.onSingleTapConfirmed(mCurrentDownEvent); return; } else { mDeferConfirmSingleTap = true; return; } } if (true) goto _L6; else goto _L5 _L5: } GestureHandler() { this$0 = GestureDetectorCompat.GestureDetectorCompatImplBase.this; super(); } GestureHandler(Handler handler) { this$0 = GestureDetectorCompat.GestureDetectorCompatImplBase.this; super(handler.getLooper()); } } private static final int DOUBLE_TAP_TIMEOUT = 0; private static final int LONGPRESS_TIMEOUT = 0; private static final int LONG_PRESS = 2; private static final int SHOW_PRESS = 1; private static final int TAP = 3; private static final int TAP_TIMEOUT = ViewConfiguration.getTapTimeout(); private boolean mAlwaysInBiggerTapRegion; private boolean mAlwaysInTapRegion; private MotionEvent mCurrentDownEvent; private boolean mDeferConfirmSingleTap; private android.view.atImplBase.DOUBLE_TAP_TIMEOUT mDoubleTapListener; private int mDoubleTapSlopSquare; private float mDownFocusX; private float mDownFocusY; private final Handler mHandler; private boolean mInLongPress; private boolean mIsDoubleTapping; private boolean mIsLongpressEnabled; private float mLastFocusX; private float mLastFocusY; private final android.view.atImplBase.DOUBLE_TAP_TIMEOUT mListener; private int mMaximumFlingVelocity; private int mMinimumFlingVelocity; private MotionEvent mPreviousUpEvent; private boolean mStillDown; private int mTouchSlopSquare; private VelocityTracker mVelocityTracker; private void cancel() { mHandler.removeMessages(1); mHandler.removeMessages(2); mHandler.removeMessages(3); mVelocityTracker.recycle(); mVelocityTracker = null; mIsDoubleTapping = false; mStillDown = false; mAlwaysInTapRegion = false; mAlwaysInBiggerTapRegion = false; mDeferConfirmSingleTap = false; if (mInLongPress) { mInLongPress = false; } } private void cancelTaps() { mHandler.removeMessages(1); mHandler.removeMessages(2); mHandler.removeMessages(3); mIsDoubleTapping = false; mAlwaysInTapRegion = false; mAlwaysInBiggerTapRegion = false; mDeferConfirmSingleTap = false; if (mInLongPress) { mInLongPress = false; } } private void dispatchLongPress() { mHandler.removeMessages(3); mDeferConfirmSingleTap = false; mInLongPress = true; mListener.mListener(mCurrentDownEvent); } private void init(Context context) { if (context == null) { throw new IllegalArgumentException("Context must not be null"); } if (mListener == null) { throw new IllegalArgumentException("OnGestureListener must not be null"); } else { mIsLongpressEnabled = true; ViewConfiguration viewconfiguration = ViewConfiguration.get(context); int i = viewconfiguration.getScaledTouchSlop(); int j = viewconfiguration.getScaledDoubleTapSlop(); mMinimumFlingVelocity = viewconfiguration.getScaledMinimumFlingVelocity(); mMaximumFlingVelocity = viewconfiguration.getScaledMaximumFlingVelocity(); mTouchSlopSquare = i * i; mDoubleTapSlopSquare = j * j; return; } } private boolean isConsideredDoubleTap(MotionEvent motionevent, MotionEvent motionevent1, MotionEvent motionevent2) { if (mAlwaysInBiggerTapRegion && motionevent2.getEventTime() - motionevent1.getEventTime() <= (long)DOUBLE_TAP_TIMEOUT) { int i = (int)motionevent.getX() - (int)motionevent2.getX(); int j = (int)motionevent.getY() - (int)motionevent2.getY(); if (i * i + j * j < mDoubleTapSlopSquare) { return true; } } return false; } public boolean isLongpressEnabled() { return mIsLongpressEnabled; } public boolean onTouchEvent(MotionEvent motionevent) { int k; float f2; float f3; boolean flag1; int i = motionevent.getAction(); if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(motionevent); boolean flag; int j; float f; float f1; int l; if ((i & 0xff) == 6) { flag = true; } else { flag = false; } if (flag) { j = MotionEventCompat.getActionIndex(motionevent); } else { j = -1; } f = 0.0F; f1 = 0.0F; k = MotionEventCompat.getPointerCount(motionevent); l = 0; while (l < k) { if (j != l) { f += MotionEventCompat.getX(motionevent, l); f1 += MotionEventCompat.getY(motionevent, l); } l++; } int i1; int j1; if (flag) { i1 = k - 1; } else { i1 = k; } f2 = f / (float)i1; f3 = f1 / (float)i1; j1 = i & 0xff; flag1 = false; j1; JVM INSTR tableswitch 0 6: default 200 // 0 392 // 1 911 // 2 663 // 3 1186 // 4 200 // 5 210 // 6 240; goto _L1 _L2 _L3 _L4 _L5 _L1 _L6 _L7 _L1: return flag1; _L6: mLastFocusX = f2; mDownFocusX = f2; mLastFocusY = f3; mDownFocusY = f3; cancelTaps(); return false; _L7: int j3; float f8; float f9; int l3; mLastFocusX = f2; mDownFocusX = f2; mLastFocusY = f3; mDownFocusY = f3; mVelocityTracker.computeCurrentVelocity(1000, mMaximumFlingVelocity); j3 = MotionEventCompat.getActionIndex(motionevent); int k3 = MotionEventCompat.getPointerId(motionevent, j3); f8 = VelocityTrackerCompat.getXVelocity(mVelocityTracker, k3); f9 = VelocityTrackerCompat.getYVelocity(mVelocityTracker, k3); l3 = 0; _L9: int i4 = l3; flag1 = false; if (i4 >= k) { continue; /* Loop/switch isn't completed */ } if (l3 != j3) { break; /* Loop/switch isn't completed */ } _L11: l3++; int j4; if (true) goto _L9; else goto _L8 _L8: if (f8 * VelocityTrackerCompat.getXVelocity(mVelocityTracker, j4 = MotionEventCompat.getPointerId(motionevent, l3)) + f9 * VelocityTrackerCompat.getYVelocity(mVelocityTracker, j4) >= 0.0F) goto _L11; else goto _L10 _L10: mVelocityTracker.clear(); return false; _L2: android.view.atImplBase atimplbase = mDoubleTapListener; boolean flag5 = false; if (atimplbase != null) { boolean flag6 = mHandler.hasMessages(3); if (flag6) { mHandler.removeMessages(3); } if (mCurrentDownEvent != null && mPreviousUpEvent != null && flag6 && isConsideredDoubleTap(mCurrentDownEvent, mPreviousUpEvent, motionevent)) { mIsDoubleTapping = true; flag5 = false | mDoubleTapListener.mDoubleTapListener(mCurrentDownEvent) | mDoubleTapListener.(motionevent); } else { mHandler.sendEmptyMessageDelayed(3, DOUBLE_TAP_TIMEOUT); flag5 = false; } } mLastFocusX = f2; mDownFocusX = f2; mLastFocusY = f3; mDownFocusY = f3; if (mCurrentDownEvent != null) { mCurrentDownEvent.recycle(); } mCurrentDownEvent = MotionEvent.obtain(motionevent); mAlwaysInTapRegion = true; mAlwaysInBiggerTapRegion = true; mStillDown = true; mInLongPress = false; mDeferConfirmSingleTap = false; if (mIsLongpressEnabled) { mHandler.removeMessages(2); mHandler.sendEmptyMessageAtTime(2, mCurrentDownEvent.getDownTime() + (long)TAP_TIMEOUT + (long)LONGPRESS_TIMEOUT); } mHandler.sendEmptyMessageAtTime(1, mCurrentDownEvent.getDownTime() + (long)TAP_TIMEOUT); return flag5 | mListener.mListener(motionevent); _L4: float f6; float f7; boolean flag3 = mInLongPress; flag1 = false; if (flag3) { continue; /* Loop/switch isn't completed */ } f6 = mLastFocusX - f2; f7 = mLastFocusY - f3; if (mIsDoubleTapping) { return false | mDoubleTapListener.(motionevent); } if (!mAlwaysInTapRegion) { break; /* Loop/switch isn't completed */ } int j2 = (int)(f2 - mDownFocusX); int k2 = (int)(f3 - mDownFocusY); int l2 = j2 * j2 + k2 * k2; int i3 = mTouchSlopSquare; flag1 = false; if (l2 > i3) { flag1 = mListener.mListener(mCurrentDownEvent, motionevent, f6, f7); mLastFocusX = f2; mLastFocusY = f3; mAlwaysInTapRegion = false; mHandler.removeMessages(3); mHandler.removeMessages(1); mHandler.removeMessages(2); } if (l2 > mTouchSlopSquare) { mAlwaysInBiggerTapRegion = false; return flag1; } if (true) goto _L1; else goto _L12 _L12: int i2; if (Math.abs(f6) >= 1.0F) { break; /* Loop/switch isn't completed */ } i2 = Math.abs(f7) != 1.0F; flag1 = false; if (i2 < 0) goto _L1; else goto _L13 _L13: boolean flag4 = mListener.mListener(mCurrentDownEvent, motionevent, f6, f7); mLastFocusX = f2; mLastFocusY = f3; return flag4; _L3: MotionEvent motionevent1; mStillDown = false; motionevent1 = MotionEvent.obtain(motionevent); if (!mIsDoubleTapping) goto _L15; else goto _L14 _L14: boolean flag2 = false | mDoubleTapListener.(motionevent); _L18: if (mPreviousUpEvent != null) { mPreviousUpEvent.recycle(); } mPreviousUpEvent = motionevent1; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } mIsDoubleTapping = false; mDeferConfirmSingleTap = false; mHandler.removeMessages(1); mHandler.removeMessages(2); return flag2; _L15: if (!mInLongPress) goto _L17; else goto _L16 _L16: mHandler.removeMessages(3); mInLongPress = false; flag2 = false; goto _L18 _L17: if (!mAlwaysInTapRegion) goto _L20; else goto _L19 _L19: flag2 = mListener.mListener(motionevent); if (mDeferConfirmSingleTap && mDoubleTapListener != null) { mDoubleTapListener.rmed(motionevent); } goto _L18 _L20: float f4; float f5; VelocityTracker velocitytracker = mVelocityTracker; int k1 = MotionEventCompat.getPointerId(motionevent, 0); velocitytracker.computeCurrentVelocity(1000, mMaximumFlingVelocity); f4 = VelocityTrackerCompat.getYVelocity(velocitytracker, k1); f5 = VelocityTrackerCompat.getXVelocity(velocitytracker, k1); if (Math.abs(f4) > (float)mMinimumFlingVelocity) goto _L22; else goto _L21 _L21: int l1; l1 = Math.abs(f5) != (float)mMinimumFlingVelocity; flag2 = false; if (l1 <= 0) goto _L18; else goto _L22 _L22: flag2 = mListener.mListener(mCurrentDownEvent, motionevent, f5, f4); goto _L18 _L5: cancel(); return false; } public void setIsLongpressEnabled(boolean flag) { mIsLongpressEnabled = flag; } public void setOnDoubleTapListener(android.view.atImplBase atimplbase) { mDoubleTapListener = atimplbase; } static { LONGPRESS_TIMEOUT = ViewConfiguration.getLongPressTimeout(); DOUBLE_TAP_TIMEOUT = ViewConfiguration.getDoubleTapTimeout(); } /* static boolean access$502(GestureHandler gesturehandler, boolean flag) { gesturehandler.mDeferConfirmSingleTap = flag; return flag; } */ public GestureHandler.this._cls0(Context context, android.view.atImplBase atimplbase, Handler handler) { if (handler != null) { mHandler = new GestureHandler(handler); } else { mHandler = new GestureHandler(); } mListener = atimplbase; if (atimplbase instanceof android.view.atImplBase.mListener) { setOnDoubleTapListener((android.view.atImplBase.setOnDoubleTapListener)atimplbase); } init(context); } }
/** * Copyright (C) 2015 DataTorrent, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datatorrent.stram.debug; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import net.engio.mbassy.listener.Handler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datatorrent.api.*; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.Context.PortContext; import com.datatorrent.api.Operator.InputPort; import com.datatorrent.api.Operator.OutputPort; import com.datatorrent.api.Stats.OperatorStats; import com.datatorrent.api.Stats.OperatorStats.PortStats; import com.datatorrent.api.StatsListener.OperatorRequest; import com.datatorrent.stram.StreamingContainerManager; import com.datatorrent.stram.api.ContainerContext; import com.datatorrent.stram.api.ContainerEvent.ContainerStatsEvent; import com.datatorrent.stram.api.ContainerEvent.NodeActivationEvent; import com.datatorrent.stram.api.ContainerEvent.NodeDeactivationEvent; import com.datatorrent.stram.api.RequestFactory; import com.datatorrent.stram.api.RequestFactory.RequestDelegate; import com.datatorrent.stram.api.StramToNodeStartRecordingRequest; import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerStats; import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.OperatorHeartbeat; import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.StramToNodeRequest; import com.datatorrent.stram.engine.Node; import com.datatorrent.stram.plan.logical.LogicalPlan; import com.datatorrent.stram.plan.logical.Operators.PortContextPair; import com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor; import com.datatorrent.stram.util.SharedPubSubWebSocketClient; /** * <p>TupleRecorderCollection class.</p> * * @since 0.3.5 */ public class TupleRecorderCollection extends HashMap<OperatorIdPortNamePair, TupleRecorder> implements Component<Context> { private int tupleRecordingPartFileSize; private String gatewayAddress; private boolean gatewayUseSsl = false; private String gatewayUserName; private String gatewayPassword; private long tupleRecordingPartFileTimeMillis; private String appPath; private String appId; private SharedPubSubWebSocketClient wsClient; private Map<Class<?>, Class<? extends StringCodec<?>>> codecs; public TupleRecorder getTupleRecorder(int operId, String portName) { //logger.debug("attempting to get tuple recorder for {} on {}", new OperatorIdPortNamePair(operId, portName), System.identityHashCode(this)); return get(new OperatorIdPortNamePair(operId, portName)); } @Override public void setup(Context ctx) { tupleRecordingPartFileSize = ctx.getValue(LogicalPlan.TUPLE_RECORDING_PART_FILE_SIZE); tupleRecordingPartFileTimeMillis = ctx.getValue(LogicalPlan.TUPLE_RECORDING_PART_FILE_TIME_MILLIS); appId = ctx.getValue(LogicalPlan.APPLICATION_ID); gatewayAddress = ctx.getValue(LogicalPlan.GATEWAY_CONNECT_ADDRESS); gatewayUseSsl = ctx.getValue(LogicalPlan.GATEWAY_USE_SSL); gatewayUserName = ctx.getValue(LogicalPlan.GATEWAY_USER_NAME); gatewayPassword = ctx.getValue(LogicalPlan.GATEWAY_PASSWORD); appPath = ctx.getValue(LogicalPlan.APPLICATION_PATH); codecs = ctx.getAttributes().get(Context.DAGContext.STRING_CODECS); RequestDelegateImpl impl = new RequestDelegateImpl(); RequestFactory rf = ctx.getValue(ContainerContext.REQUEST_FACTORY); if (rf == null) { logger.warn("No request factory defined, recording is disabled!"); } else { rf.registerDelegate(StramToNodeRequest.RequestType.START_RECORDING, impl); rf.registerDelegate(StramToNodeRequest.RequestType.STOP_RECORDING, impl); rf.registerDelegate(StramToNodeRequest.RequestType.SYNC_RECORDING, impl); } } @Override public void teardown() { for (TupleRecorder entry : values()) { entry.teardown(); } if (wsClient != null) { // SPOI-1328: clean up IO threads or else process won't exit wsClient.teardown(); } clear(); } /** * Return the name of the stream which is operatorId.portname. * * @param operatorId id of the operator to which the port belongs. * @param portname name of port to which the stream is connected. * @return Stream Id if connected, null otherwise. */ public final String getDeclaredStreamId(int operatorId, String portname) { return String.valueOf(operatorId).concat(Component.CONCAT_SEPARATOR).concat(portname); } private void startRecording(String id, final Node<?> node, int operatorId, final String portName, long numWindows) { PortMappingDescriptor descriptor = node.getPortMappingDescriptor(); OperatorIdPortNamePair operatorIdPortNamePair = new OperatorIdPortNamePair(operatorId, portName); // check any recording conflict boolean conflict = false; if (containsKey(new OperatorIdPortNamePair(operatorId, null))) { conflict = true; } else if (portName == null) { for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : descriptor.inputPorts.entrySet()) { if (containsKey(new OperatorIdPortNamePair(operatorId, entry.getKey()))) { conflict = true; break; } } for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : descriptor.outputPorts.entrySet()) { if (containsKey(new OperatorIdPortNamePair(operatorId, entry.getKey()))) { conflict = true; break; } } } else { if (containsKey(operatorIdPortNamePair)) { conflict = true; } } if (!conflict) { logger.debug("Executing start recording request for {}", operatorIdPortNamePair); if (gatewayAddress != null && wsClient == null) { synchronized (this) { if (wsClient == null) { try { wsClient = new SharedPubSubWebSocketClient((gatewayUseSsl ? "wss://" : "ws://") + gatewayAddress + "/pubsub", 500); if (gatewayUserName != null && gatewayPassword != null) { wsClient.setLoginUrl((gatewayUseSsl ? "https://" : "http://") + gatewayAddress + StreamingContainerManager.GATEWAY_LOGIN_URL_PATH); wsClient.setUserName(gatewayUserName); wsClient.setPassword(gatewayPassword); } wsClient.setup(); } catch (Exception ex) { logger.warn("Error initializing websocket", ex); } } } } TupleRecorder tupleRecorder = new TupleRecorder(id, appId); tupleRecorder.setWebSocketClient(wsClient); HashMap<String, Sink<Object>> sinkMap = new HashMap<String, Sink<Object>>(); for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : descriptor.inputPorts.entrySet()) { String streamId = getDeclaredStreamId(operatorId, entry.getKey()); if (streamId == null) { streamId = portName + "_implicit_stream"; } if (entry.getValue().context != null && (portName == null || entry.getKey().equals(portName))) { logger.debug("Adding recorder sink to input port {}, stream {}", entry.getKey(), streamId); tupleRecorder.addInputPortInfo(entry.getKey(), streamId); sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey())); } } for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : descriptor.outputPorts.entrySet()) { String streamId = getDeclaredStreamId(operatorId, entry.getKey()); if (streamId == null) { streamId = portName + "_implicit_stream"; } if (portName == null || entry.getKey().equals(portName)) { logger.debug("Adding recorder sink to output port {}, stream {}", entry.getKey(), streamId); tupleRecorder.addOutputPortInfo(entry.getKey(), streamId); sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey())); } } if (!sinkMap.isEmpty()) { logger.debug("Started recording on {} through {}", operatorIdPortNamePair, System.identityHashCode(this)); String basePath = appPath + "/recordings/" + operatorId + "/" + tupleRecorder.getId(); tupleRecorder.getStorage().setBasePath(basePath); tupleRecorder.getStorage().setBytesPerPartFile(tupleRecordingPartFileSize); tupleRecorder.getStorage().setMillisPerPartFile(tupleRecordingPartFileTimeMillis); node.addSinks(sinkMap); tupleRecorder.setup(node.getOperator(), codecs); put(operatorIdPortNamePair, tupleRecorder); if (numWindows > 0) { tupleRecorder.setNumWindows(numWindows, new Runnable() { @Override public void run() { node.context.request(new OperatorRequest() { @Override public StatsListener.OperatorResponse execute(Operator operator, int operatorId, long windowId) throws IOException { stopRecording(node, operatorId, portName); return null; } }); } }); } } else { logger.warn("Tuple recording request ignored because operator is not connected on the specified port."); } } else { logger.error("Operator id {} is already being recorded.", operatorId); } } private void stopRecording(Node<?> node, int operatorId, String portName) { OperatorIdPortNamePair operatorIdPortNamePair = new OperatorIdPortNamePair(operatorId, portName); if (containsKey(operatorIdPortNamePair)) { logger.debug("Executing stop recording request for {}", operatorIdPortNamePair); TupleRecorder tupleRecorder = get(operatorIdPortNamePair); if (tupleRecorder != null) { node.removeSinks(tupleRecorder.getSinkMap()); tupleRecorder.teardown(); logger.debug("Stopped recording for {}", operatorIdPortNamePair); remove(operatorIdPortNamePair); } } // this should be looked at again when we redesign how we handle recordings with ports in a cleaner way else if (portName == null) { Iterator<Map.Entry<OperatorIdPortNamePair, TupleRecorder>> iterator = entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<OperatorIdPortNamePair, TupleRecorder> entry = iterator.next(); if (operatorId == entry.getKey().operatorId) { TupleRecorder tupleRecorder = entry.getValue(); if (tupleRecorder != null) { node.removeSinks(tupleRecorder.getSinkMap()); tupleRecorder.teardown(); logger.debug("Stopped recording for operator/port {}", operatorIdPortNamePair); iterator.remove(); } } } } else { logger.error("Operator/port {} is not being recorded.", operatorIdPortNamePair); } } private void syncRecording(Node<?> node, int operatorId, String portName) { OperatorIdPortNamePair operatorIdPortNamePair = new OperatorIdPortNamePair(operatorId, portName); if (containsKey(operatorIdPortNamePair)) { logger.debug("Executing sync recording request for {}", operatorIdPortNamePair); TupleRecorder tupleRecorder = get(operatorIdPortNamePair); if (tupleRecorder != null) { tupleRecorder.getStorage().requestSync(); logger.debug("Requested sync recording for operator/port {}", operatorIdPortNamePair); } } // this should be looked at again when we redesign how we handle recordings with ports in a cleaner way else if (portName == null) { Iterator<Map.Entry<OperatorIdPortNamePair, TupleRecorder>> iterator = entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<OperatorIdPortNamePair, TupleRecorder> entry = iterator.next(); if (operatorId == entry.getKey().operatorId) { TupleRecorder tupleRecorder = entry.getValue(); if (tupleRecorder != null) { tupleRecorder.getStorage().requestSync(); logger.debug("Requested sync recording for operator/port {}", operatorIdPortNamePair); } } } } else { logger.error("(SYNC_RECORDING) Operator/port {} is not being recorded.", operatorIdPortNamePair); } } @Handler public void activated(NodeActivationEvent nae) { Node<?> node = nae.getNode(); if (node.context.getValue(OperatorContext.AUTO_RECORD)) { startRecording(null, node, node.getId(), null, 0); } else { for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : node.getPortMappingDescriptor().inputPorts.entrySet()) { if (entry.getValue().context != null && entry.getValue().context.getValue(PortContext.AUTO_RECORD)) { startRecording(null, node, node.getId(), entry.getKey(), 0); } } for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : node.getPortMappingDescriptor().outputPorts.entrySet()) { if (entry.getValue().context != null && entry.getValue().context.getValue(PortContext.AUTO_RECORD)) { startRecording(null, node, node.getId(), entry.getKey(), 0); } } } } @Handler public void deactivated(NodeDeactivationEvent nde) { Node<?> node = nde.getNode(); stopRecording(node, node.getId(), null); } @Handler public void collected(ContainerStatsEvent cse) { ContainerStats stats = cse.getContainerStats(); for (OperatorHeartbeat node : stats.operators) { for (OperatorStats os : node.windowStats) { if (os.inputPorts != null) { for (PortStats ps : os.inputPorts) { ps.recordingId = null; } } if (os.outputPorts != null) { for (PortStats ps : os.outputPorts) { ps.recordingId = null; } } } } for (OperatorHeartbeat node : stats.operators) { String recordingId; TupleRecorder tupleRecorder = get(new OperatorIdPortNamePair(node.nodeId, null)); if (tupleRecorder == null) { recordingId = null; for (Map.Entry<OperatorIdPortNamePair, TupleRecorder> entry : this.entrySet()) { if (entry.getKey().operatorId == node.nodeId) { for (OperatorStats os : node.windowStats) { if (os.inputPorts != null) { for (PortStats ps : os.inputPorts) { if (ps.id.equals(entry.getKey().portName)) { ps.recordingId = entry.getValue().getId(); break; } } } if (os.outputPorts != null) { for (PortStats ps : os.outputPorts) { if (ps.id.equals(entry.getKey().portName)) { ps.recordingId = entry.getValue().getId(); break; } } } } } } } else { recordingId = tupleRecorder.getId(); } for (OperatorStats os : node.windowStats) { os.recordingId = recordingId; } } } private class RequestDelegateImpl implements RequestDelegate { @Override public OperatorRequest getRequestExecutor(final Node<?> node, final StramToNodeRequest snr) { switch (snr.getRequestType()) { case START_RECORDING: return new OperatorRequest() { @Override public StatsListener.OperatorResponse execute(Operator operator, int operatorId, long windowId) throws IOException { StramToNodeStartRecordingRequest r = (StramToNodeStartRecordingRequest) snr; startRecording(r.getId(), node, operatorId, r.getPortName(), r.getNumWindows()); return null; } @Override public String toString() { return "Start Recording"; } }; case STOP_RECORDING: return new OperatorRequest() { @Override public StatsListener.OperatorResponse execute(Operator operator, int operatorId, long windowId) throws IOException { stopRecording(node, operatorId, snr.getPortName()); return null; } @Override public String toString() { return "Stop Recording"; } }; case SYNC_RECORDING: return new OperatorRequest() { @Override public StatsListener.OperatorResponse execute(Operator operator, int operatorId, long windowId) throws IOException { syncRecording(node, operatorId, snr.getPortName()); return null; } @Override public String toString() { return "Recording Request"; } }; default: throw new UnsupportedOperationException("Unknown request type " + snr.requestType); } } } private static final long serialVersionUID = 201309112123L; private static final Logger logger = LoggerFactory.getLogger(TupleRecorderCollection.class); }
package org.cobraparser.html.js; import java.util.ArrayList; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.cobraparser.html.domimpl.NodeImpl; import org.cobraparser.html.js.Window.JSRunnableTask; import org.mozilla.javascript.Function; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.events.EventException; import org.w3c.dom.events.EventListener; public final class EventTargetManager { private final Map<NodeImpl, Map<String, List<EventListener>>> nodeOnEventListeners = new IdentityHashMap<>(); private final Window window; public EventTargetManager(final Window window) { this.window = window; } public void addEventListener(final NodeImpl node, final String type, final EventListener listener, final boolean useCapture) { final List<EventListener> handlerList = getListenerList(type, node, true); handlerList.add(listener); } private List<EventListener> getListenerList(final String type, final NodeImpl node, final boolean createIfNotExist) { final Map<String, List<EventListener>> onEventListeners = getEventListeners(node, createIfNotExist); if (onEventListeners != null) { if (onEventListeners.containsKey(type)) { return onEventListeners.get(type); } else if (createIfNotExist) { final List<EventListener> handlerList = new ArrayList<>(); onEventListeners.put(type, handlerList); return handlerList; } else { return null; } } else { return null; } } private Map<String, List<EventListener>> getEventListeners(final NodeImpl node, final boolean createIfNotExist) { if (nodeOnEventListeners.containsKey(node)) { return nodeOnEventListeners.get(node); } else { if (createIfNotExist) { final Map<String, List<EventListener>> onEventListeners = new HashMap<>(); nodeOnEventListeners.put(node, onEventListeners); return onEventListeners; } else { return null; } } } public void removeEventListener(final NodeImpl node, final String type, final EventListener listener, final boolean useCapture) { final Map<String, List<EventListener>> onEventListeners = getEventListeners(node, false); if (onEventListeners != null) { if (onEventListeners.containsKey(type)) { onEventListeners.get(type).remove(listener); } } } private List<Function> getFunctionList(final String type, final NodeImpl node, final boolean createIfNotExist) { final Map<String, List<Function>> onEventListeners = getEventFunctions(node, createIfNotExist); if (onEventListeners != null) { if (onEventListeners.containsKey(type)) { return onEventListeners.get(type); } else if (createIfNotExist) { final List<Function> handlerList = new ArrayList<>(); onEventListeners.put(type, handlerList); return handlerList; } else { return null; } } else { return null; } } private Map<String, List<Function>> getEventFunctions(final NodeImpl node, final boolean createIfNotExist) { if (nodeOnEventFunctions.containsKey(node)) { return nodeOnEventFunctions.get(node); } else { if (createIfNotExist) { final Map<String, List<Function>> onEventListeners = new HashMap<>(); nodeOnEventFunctions.put(node, onEventListeners); return onEventListeners; } else { return null; } } } public boolean dispatchEvent(final NodeImpl node, final Event evt) throws EventException { // dispatchEventToHandlers(node, evt, onEventListeners.get(evt.getType())); // dispatchEventToJSHandlers(node, evt, onEventHandlers.get(evt.getType())); // TODO: Event Bubbling // TODO: get Window into the propagation path final List<NodeImpl> propagationPath = getPropagationPath(node); // TODO: Capture phase, and distinction between target phase and bubbling phase evt.setPhase(org.w3c.dom.events.Event.AT_TARGET); // TODO: The JS Task should be added with the correct base URL window.addJSTask(new JSRunnableTask(0, "Event dispatch for " + evt, () -> { for (int i = 0; (i < propagationPath.size()) && !evt.isPropagationStopped(); i++) { final NodeImpl currNode = propagationPath.get(i); // System.out.println("Dipatching " + i + " to: " + currNode); // TODO: Make request manager checks here. dispatchEventToHandlers(currNode, evt); dispatchEventToJSHandlers(currNode, evt); evt.setPhase(org.w3c.dom.events.Event.BUBBLING_PHASE); } } )); // dispatchEventToHandlers(node, evt); // dispatchEventToJSHandlers(node, evt); return false; } private static List<NodeImpl> getPropagationPath(NodeImpl node) { final List<NodeImpl> nodes = new LinkedList<>(); while (node != null) { if ((node instanceof Element) || (node instanceof Document)) { // TODO || node instanceof Window) { nodes.add(node); } node = (NodeImpl) node.getParentNode(); } // TODO // nodes.add(window); return nodes; } // private void dispatchEventToHandlers(final NodeImpl node, final Event event, final List<EventListener> handlers) { private void dispatchEventToHandlers(final NodeImpl node, final Event event) { final List<EventListener> handlers = getListenerList(event.getType(), node, false); if (handlers != null) { // We clone the collection and check if original collection still contains // the handler before dispatching // This is to avoid ConcurrentModificationException during dispatch final ArrayList<EventListener> handlersCopy = new ArrayList<>(handlers); for (final EventListener h : handlersCopy) { // TODO: Not sure if we should stop calling handlers after propagation is stopped // if (event.isPropagationStopped()) { // return; // } if (handlers.contains(h)) { // window.addJSTask(new JSRunnableTask(0, "Event dispatch for: " + event, new Runnable(){ // public void run() { h.handleEvent(event); // } // })); // h.handleEvent(event); // Executor.executeFunction(node, h, event); } } } } // protected void dispatchEventToJSHandlers(final NodeImpl node, final Event event, final List<Function> handlers) { protected void dispatchEventToJSHandlers(final NodeImpl node, final Event event) { final List<Function> handlers = getFunctionList(event.getType(), node, false); if (handlers != null) { // We clone the collection and check if original collection still contains // the handler before dispatching // This is to avoid ConcurrentModificationException during dispatch final ArrayList<Function> handlersCopy = new ArrayList<>(handlers); for (final Function h : handlersCopy) { // TODO: Not sure if we should stop calling handlers after propagation is stopped // if (event.isPropagationStopped()) { // return; // } if (handlers.contains(h)) { // window.addJSTask(new JSRunnableTask(0, "Event dispatch for " + event, new Runnable(){ // public void run() { Executor.executeFunction(node, h, event, window.getContextFactory()); // } // })); // Executor.executeFunction(node, h, event); } } } } // private final Map<String, List<Function>> onEventHandlers = new HashMap<>(); private final Map<NodeImpl, Map<String, List<Function>>> nodeOnEventFunctions = new IdentityHashMap<>(); public void addEventListener(final NodeImpl node, final String type, final Function listener) { addEventListener(node, type, listener, false); } public void addEventListener(final NodeImpl node, final String type, final Function listener, final boolean useCapture) { // TODO // System.out.println("node by name: " + node.getNodeName() + " adding Event listener of type: " + type); /* List<Function> handlerList = null; if (onEventHandlers.containsKey(type)) { handlerList = onEventHandlers.get(type); } else { handlerList = new ArrayList<>(); onEventHandlers.put(type, handlerList); }*/ // final Map<String, List<Function>> handlerList = getEventFunctions(node, true); final List<Function> handlerList = getFunctionList(type, node, true); handlerList.add(listener); } public void removeEventListener(final NodeImpl node, final String type, final Function listener, final boolean useCapture) { final Map<String, List<Function>> onEventListeners = getEventFunctions(node, false); if (onEventListeners != null) { if (onEventListeners.containsKey(type)) { onEventListeners.get(type).remove(listener); } } } public void reset() { nodeOnEventFunctions.clear(); nodeOnEventListeners.clear(); } }
/* * Copyright 2011 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.japi.checker.maven.plugin; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.metadata.ArtifactMetadataSource; import org.apache.maven.artifact.resolver.ArtifactNotFoundException; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.resolver.ArtifactCollector; import org.apache.maven.artifact.resolver.ArtifactResolutionException; import org.apache.maven.artifact.resolver.ArtifactResolver; import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException; import org.apache.maven.artifact.versioning.VersionRange; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectBuilder; import org.apache.maven.project.ProjectBuildingException; import org.apache.maven.project.artifact.InvalidDependencyVersionException; import com.googlecode.japi.checker.BCChecker; import com.googlecode.japi.checker.MuxReporter; import com.googlecode.japi.checker.Rule; import com.googlecode.japi.checker.SeverityCountReporter; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * Goal which check the backward compatibility between generated * artifact and a reference artifact. * * @goal check * @phase verify * @requiresDependencyResolution compile * @threadSafe */ public class BackwardCompatibilityCheckerMojo extends AbstractMojo { /** * Location of the file. * @parameter expression="${project.build.directory}" * @required */ protected File outputDirectory; /** * @parameter default-value="${project.artifact}" * @required * @readonly */ private Artifact artifact; /** * @parameter * @required */ private List<String> rules; /** * Reference version * @parameter */ private ArtifactItem reference; /** * @parameter expression="${project}" * @readonly * @required */ private MavenProject project; /** * Used to look up Artifacts in the remote repository. * * @component role="org.apache.maven.artifact.factory.ArtifactFactory" * @readonly * @reauired */ private ArtifactFactory factory; /** * Used to look up Artifacts in the remote repository. * * @component * @readonly * @required */ private ArtifactResolver resolver; /** * ArtifactRepository of the localRepository. To obtain the directory of localRepository in unit tests use * System.setProperty( "localRepository"). * * @parameter expression="${localRepository}" * @required * @readonly */ private ArtifactRepository localRepository; /** * List of Remote Repositories used by the resolver * * @parameter expression="${project.remoteArtifactRepositories}" * @readonly * @required */ protected List<ArtifactRepository> remoteRepos; /** * The artifact collector to use. * * @component role="org.apache.maven.artifact.resolver.ArtifactCollector" * @required * @readonly */ protected ArtifactCollector artifactCollector; /** * The artifact metadata source to use. * * @component role="org.apache.maven.artifact.metadata.ArtifactMetadataSource" roleHint="maven" * @readonly */ protected ArtifactMetadataSource artifactMetadataSource; /** * @parameter */ protected String excludedPackages; private boolean isNewMinorVersion; /** * * @component */ private MavenProjectBuilder projectBuilder; /** * {@inheritDoc} */ public void execute() throws MojoExecutionException, MojoFailureException { if (artifact == null) { throw new MojoExecutionException("Artifact is null."); } if (artifact.getFile() != null && artifact.getFile().exists()) { String artifactVersion = artifact.getVersion(); if (reference == null) { reference = new ArtifactItem(); reference.setArtifactId(artifact.getArtifactId()); reference.setGroupId(artifact.getGroupId()); reference.setVersion(getReferenceVersion(artifactVersion)); reference.setType(artifact.getType()); } if (artifactVersion.contains(".0.0")) { this.getLog().info("New major version detected - compatibility check will be skipped."); return; // this is a new major version - nothing to check, return. } else if (artifactVersion.contains("-SNAPSHOT")) { if (artifactVersion.substring(0, artifactVersion.indexOf('-')).endsWith(".0")) { isNewMinorVersion = true; } } // it's a snapshot else { if (artifactVersion.endsWith(".0")) { isNewMinorVersion = true; } } // release version // Retrieving the reference artifact. updateArtifact(reference); Artifact referenceArtifact = reference.getArtifact(); try { // Creating a new checker which compare the generated artifact against the provided reference. BCChecker checker = new BCChecker(); for (Artifact artifact : ((List<Artifact>)project.getCompileArtifacts())) { this.getLog().debug("Adding new artifact dependency: " + artifact.getFile().toString()); checker.addToNewArtifactClasspath(artifact.getFile()); } for (Artifact artifact : this.getDependencyList(reference.getGroupId(), reference.getArtifactId(), reference.getVersion())) { this.getLog().debug("Adding reference dependency: " + artifact.getFile().toString()); checker.addToReferenceClasspath(artifact.getFile()); } // configuring the reporting redirection MuxReporter mux = new MuxReporter(); mux.add(new LogReporter(this.getLog())); SeverityCountReporter ec = new SeverityCountReporter(); mux.add(ec); // Running the check... this.getLog().info("Checking backward compatibility of " + artifact.toString() + " against " + referenceArtifact.toString()); checker.setReporter(mux); checker.setRules(getRuleInstances()); checker.setExcludedPackages(excludedPackages); checker.setNewMinorVersion(isNewMinorVersion); checker.checkBacwardCompatibility(referenceArtifact.getFile(), artifact.getFile()); if (ec.hasSeverity()) { getLog().error("You have " + ec.getCount() + " backward compatibility issues."); throw new MojoFailureException("You have " + ec.getCount() + " backward compatibility issues."); } else { getLog().info("No backward compatibility issue found."); } } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (IllegalArgumentException e) { throw new MojoExecutionException(e.getMessage(), e); } } else { throw new MojoExecutionException("Could not find the artifact: " + artifact.toString()); } } private String getReferenceVersion(String version) throws MojoFailureException { String refVersion = version; if (refVersion.contains("SNAPSHOT")) { refVersion = refVersion.substring(0, refVersion.lastIndexOf("-")); } String[] versions = refVersion.split("\\."); if (versions.length != 3) { throw new MojoFailureException("Your version " + version + " does not match Maven versioning conventions!"); } if (versions[2].equals("0")) { int minorVersion = Integer.valueOf(versions[1]); minorVersion--; versions[1] = String.valueOf(minorVersion); } else { int microVersion = Integer.valueOf(versions[2]); microVersion--; versions[2] = String.valueOf(microVersion); } return versions[0] + "." + versions[1] + "." + versions[2]; } protected List<Artifact> getDependencyList(String groupId, String artifactId, String version) throws MojoExecutionException { try { RuntimeDependencyResolver resolver = new RuntimeDependencyResolver(factory, this.resolver, artifactMetadataSource, localRepository, remoteRepos); Set<Artifact> artifactSet = resolver.transitivelyResolvePomDependencies(projectBuilder, groupId, artifactId, version, true); return new ArrayList<Artifact>(artifactSet); } catch (MalformedURLException e) { throw new MojoExecutionException("Cannot solve reference artifact: ", e); } catch (ArtifactResolutionException e) { throw new MojoExecutionException("Cannot solve reference artifact: ", e); } catch (ArtifactNotFoundException e) { throw new MojoExecutionException("Cannot solve reference artifact: ", e); } catch (ProjectBuildingException e) { throw new MojoExecutionException("Cannot solve reference artifact: ", e); } catch (InvalidDependencyVersionException e) { throw new MojoExecutionException("Cannot solve reference artifact: ", e); } } private List<Rule> getRuleInstances() throws MojoExecutionException { List<Rule> rules = new ArrayList<Rule>(); for (String classname : this.rules) { try { @SuppressWarnings("unchecked") Class<Rule> clazz = (Class<Rule>)this.getClass().getClassLoader().loadClass(classname); rules.add(clazz.newInstance()); } catch (ClassNotFoundException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (InstantiationException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (IllegalAccessException e) { throw new MojoExecutionException(e.getMessage(), e); } } return rules; } /** * @return Returns the factory. */ public ArtifactFactory getFactory() { return this.factory; } /** * @param factory The factory to set. */ public void setFactory( ArtifactFactory factory ) { this.factory = factory; } /** * @return Returns the resolver. */ public ArtifactResolver getResolver() { return this.resolver; } /** * @param resolver The resolver to set. */ public void setResolver(ArtifactResolver resolver) { this.resolver = resolver; } public ArtifactRepository getLocalRepository() { return localRepository; } public void setLocalRepository(ArtifactRepository localRepository) { this.localRepository = localRepository; } /** * Resolves the Artifact from the remote repository if necessary. If no version is specified, it will be retrieved * from the dependency list or from the DependencyManagement section of the pom. * * @param artifactItem containing information about artifact from plugin configuration. * @return Artifact object representing the specified file. * @throws MojoExecutionException with a message if the version can't be found in DependencyManagement. */ protected void updateArtifact(ArtifactItem artifactItem) throws MojoExecutionException { if (artifactItem.getArtifact() != null) { return; } VersionRange vr; try { vr = VersionRange.createFromVersionSpec( artifactItem.getVersion() ); } catch ( InvalidVersionSpecificationException e ) { vr = VersionRange.createFromVersion( artifactItem.getVersion() ); } Artifact artifact = getFactory().createDependencyArtifact(artifactItem.getGroupId(), artifactItem.getArtifactId(), vr, artifactItem.getType(), null, Artifact.SCOPE_COMPILE); try { getResolver().resolve(artifact, remoteRepos, localRepository); } catch ( ArtifactResolutionException e ) { throw new MojoExecutionException( "Unable to resolve artifact.", e ); } catch ( ArtifactNotFoundException e ) { throw new MojoExecutionException( "Unable to find artifact.", e ); } artifactItem.setArtifact(artifact); } public void setProject(MavenProject project) { this.project = project; } public MavenProject getProject() { return this.project; } }
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp; import com.squareup.okhttp.internal.Util; import com.squareup.okhttp.internal.http.RawHeaders; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.List; import java.util.Set; /** * An HTTP response. Instances of this class are not immutable: the response * body is a one-shot value that may be consumed only once. All other properties * are immutable. * * <h3>Warning: Experimental OkHttp 2.0 API</h3> * This class is in beta. APIs are subject to change! */ public final class Response { private final Request request; private final int code; private final RawHeaders headers; private final Body body; private final Response redirectedBy; private Response(Builder builder) { this.request = builder.request; this.code = builder.code; this.headers = new RawHeaders(builder.headers); this.body = builder.body; this.redirectedBy = builder.redirectedBy; } /** * The wire-level request that initiated this HTTP response. This is usually * <strong>not</strong> the same request instance provided to the HTTP client: * <ul> * <li>It may be transformed by the HTTP client. For example, the client * may have added its own {@code Content-Encoding} header to enable * response compression. * <li>It may be the request generated in response to an HTTP redirect. * In this case the request URL may be different than the initial * request URL. * </ul> */ public Request request() { return request; } public int code() { return code; } public String header(String name) { return header(name, null); } public String header(String name, String defaultValue) { String result = headers.get(name); return result != null ? result : defaultValue; } public List<String> headers(String name) { return headers.values(name); } public Set<String> headerNames() { return headers.names(); } public int headerCount() { return headers.length(); } public String headerName(int index) { return headers.getFieldName(index); } public String headerValue(int index) { return headers.getValue(index); } public Body body() { return body; } /** * Returns the response for the HTTP redirect that triggered this response, or * null if this response wasn't triggered by an automatic redirect. The body * of the returned response should not be read because it has already been * consumed by the redirecting client. */ public Response redirectedBy() { return redirectedBy; } public abstract static class Body { public String contentType() { return null; } public long contentLength() { return -1; } public abstract InputStream byteStream() throws IOException; public byte[] bytes() throws IOException { long contentLength = contentLength(); if (contentLength > Integer.MAX_VALUE) { throw new IOException("Cannot buffer entire body for content length: " + contentLength); } if (contentLength != -1) { byte[] content = new byte[(int) contentLength]; InputStream in = byteStream(); Util.readFully(in, content); if (in.read() != -1) throw new IOException("Content-Length and stream length disagree"); return content; } else { ByteArrayOutputStream out = new ByteArrayOutputStream(); Util.copy(byteStream(), out); return out.toByteArray(); } } /** * Returns the response bytes as a UTF-8 character stream. Do not call this * method if the response content is not a UTF-8 character stream. */ public Reader charStream() throws IOException { // TODO: parse content-type. return new InputStreamReader(byteStream(), "UTF-8"); } /** * Returns the response bytes as a UTF-8 string. Do not call this method if * the response content is not a UTF-8 character stream. */ public String string() throws IOException { // TODO: parse content-type. return new String(bytes(), "UTF-8"); } } public interface Receiver { void onFailure(Failure failure); void onResponse(Response response) throws IOException; } public static class Builder { private final Request request; private final int code; private final RawHeaders headers = new RawHeaders(); private Body body; private Response redirectedBy; public Builder(Request request, int code) { if (request == null) throw new IllegalArgumentException("request == null"); if (code <= 0) throw new IllegalArgumentException("code <= 0"); this.request = request; this.code = code; } /** * Sets the header named {@code name} to {@code value}. If this request * already has any headers with that name, they are all replaced. */ public Builder header(String name, String value) { headers.set(name, value); return this; } /** * Adds a header with {@code name} and {@code value}. Prefer this method for * multiply-valued headers like "Set-Cookie". */ public Builder addHeader(String name, String value) { headers.add(name, value); return this; } public Builder body(Body body) { this.body = body; return this; } public Builder redirectedBy(Response redirectedBy) { this.redirectedBy = redirectedBy; return this; } public Response build() { if (request == null) throw new IllegalStateException("Response has no request."); if (code == -1) throw new IllegalStateException("Response has no code."); return new Response(this); } } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.collector.cluster.zookeeper; import java.net.InetSocketAddress; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.navercorp.pinpoint.collector.cluster.WebCluster; import com.navercorp.pinpoint.collector.cluster.WorkerState; import com.navercorp.pinpoint.collector.cluster.WorkerStateContext; import com.navercorp.pinpoint.collector.cluster.zookeeper.exception.ConnectionException; import com.navercorp.pinpoint.common.util.NetUtils; import com.navercorp.pinpoint.common.util.PinpointThreadFactory; /** * @author koo.taejin */ public class ZookeeperWebClusterManager implements Runnable { // it is okay for the collector to retry indefinitely, as long as RETRY_INTERVAL is set reasonably private static final int DEFAULT_RETRY_INTERVAL = 60000; private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final GetAndRegisterTask getAndRegisterTask = new GetAndRegisterTask(); private final StopTask stopTask = new StopTask(); private final ZookeeperClient client; private final WebCluster webCluster; private final String zNodePath; private final AtomicBoolean retryMode = new AtomicBoolean(false); private final BlockingQueue<Task> queue = new LinkedBlockingQueue<Task>(1); private final WorkerStateContext workerState; private final Thread workerThread; // private final Timer timer; // Register Worker + Job // synchronize current status with Zookeeper when an event(job) is triggered. // (the number of events does not matter as long as a single event is triggered - subsequent events may be ignored) public ZookeeperWebClusterManager(ZookeeperClient client, String zookeeperClusterPath, String serverIdentifier, WebCluster webCluster) { this.client = client; this.webCluster = webCluster; this.zNodePath = zookeeperClusterPath; this.workerState = new WorkerStateContext(); final ThreadFactory threadFactory = new PinpointThreadFactory(this.getClass().getSimpleName(), true); this.workerThread = threadFactory.newThread(this); } public void start() { switch (this.workerState.getCurrentState()) { case NEW: if (this.workerState.changeStateInitializing()) { logger.info("{} initialization started.", this.getClass().getSimpleName()); this.workerThread.start(); workerState.changeStateStarted(); logger.info("{} initialization completed.", this.getClass().getSimpleName()); break; } case INITIALIZING: logger.info("{} already initializing.", this.getClass().getSimpleName()); break; case STARTED: logger.info("{} already started.", this.getClass().getSimpleName()); break; case DESTROYING: throw new IllegalStateException("Already destroying."); case STOPPED: throw new IllegalStateException("Already stopped."); case ILLEGAL_STATE: throw new IllegalStateException("Invalid State."); } } public void stop() { if (!(this.workerState.changeStateDestroying())) { WorkerState state = this.workerState.getCurrentState(); logger.info("{} already {}.", this.getClass().getSimpleName(), state.toString()); return; } logger.info("{} destorying started.", this.getClass().getSimpleName()); final boolean stopOffer = queue.offer(stopTask); if (!stopOffer) { logger.warn("Insert stopTask failed."); } boolean interrupted = false; while (this.workerThread.isAlive()) { this.workerThread.interrupt(); try { this.workerThread.join(100L); } catch (InterruptedException e) { interrupted = true; } } this.workerState.changeStateStopped(); logger.info("{} destorying completed.", this.getClass().getSimpleName()); } public void handleAndRegisterWatcher(String path) { if (workerState.isStarted()) { if (zNodePath.equals(path)) { final boolean offerSuccess = queue.offer(getAndRegisterTask); if (!offerSuccess) { logger.info("Message Queue is Full."); } } else { logger.info("Invald Path {}.", path); } } else { WorkerState state = this.workerState.getCurrentState(); logger.info("{} invalid state {}.", this.getClass().getSimpleName(), state.toString()); } } @Override public void run() { // if the node does not exist, create a node and retry. // retry on timeout as well. while (workerState.isStarted()) { Task task = null; try { task = queue.poll(DEFAULT_RETRY_INTERVAL, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { logger.debug(e.getMessage(), e); } if (!workerState.isStarted()) { break; } if (task == null) { if (retryMode.get()) { boolean success = getAndRegisterTask.handleAndRegisterWatcher0(); if (success) { retryMode.compareAndSet(true, false); } } } else if (task instanceof GetAndRegisterTask) { boolean success = ((GetAndRegisterTask) task).handleAndRegisterWatcher0(); if (!success) { retryMode.compareAndSet(false, true); } } else if (task instanceof StopTask) { break; } } logger.info("{} stopped", this.getClass().getSimpleName()); } interface Task { } class GetAndRegisterTask implements Task { private boolean handleAndRegisterWatcher0() { boolean needNotRetry = false; try { if (!client.exists(zNodePath)) { client.createPath(zNodePath, true); } List<String> childNodeList = client.getChildrenNode(zNodePath, true); List<InetSocketAddress> clusterAddressList = NetUtils.toInetSocketAddressLIst(childNodeList); List<InetSocketAddress> addressList = webCluster.getWebClusterList(); logger.info("Handle register and remove Task. Current Address List = {}, Cluster Address List = {}", addressList, clusterAddressList); for (InetSocketAddress clusterAddress : clusterAddressList) { if (!addressList.contains(clusterAddress)) { webCluster.connectPointIfAbsent(clusterAddress); } } for (InetSocketAddress address : addressList) { if (!clusterAddressList.contains(address)) { webCluster.disconnectPoint(address); } } needNotRetry = true; return needNotRetry; } catch (Exception e) { if (!(e instanceof ConnectionException)) { needNotRetry = true; } } return needNotRetry; } } static class StopTask implements Task { } }
package cherry.common.db.gen.mapper; import static org.apache.ibatis.jdbc.SqlBuilder.BEGIN; import static org.apache.ibatis.jdbc.SqlBuilder.DELETE_FROM; import static org.apache.ibatis.jdbc.SqlBuilder.FROM; import static org.apache.ibatis.jdbc.SqlBuilder.INSERT_INTO; import static org.apache.ibatis.jdbc.SqlBuilder.ORDER_BY; import static org.apache.ibatis.jdbc.SqlBuilder.SELECT; import static org.apache.ibatis.jdbc.SqlBuilder.SELECT_DISTINCT; import static org.apache.ibatis.jdbc.SqlBuilder.SET; import static org.apache.ibatis.jdbc.SqlBuilder.SQL; import static org.apache.ibatis.jdbc.SqlBuilder.UPDATE; import static org.apache.ibatis.jdbc.SqlBuilder.VALUES; import static org.apache.ibatis.jdbc.SqlBuilder.WHERE; import cherry.common.db.gen.dto.MailQueue; import cherry.common.db.gen.dto.MailQueueCriteria.Criteria; import cherry.common.db.gen.dto.MailQueueCriteria.Criterion; import cherry.common.db.gen.dto.MailQueueCriteria; import java.util.List; import java.util.Map; public class MailQueueSqlProvider { /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String countByExample(MailQueueCriteria example) { BEGIN(); SELECT("count(*)"); FROM("MAIL_QUEUE"); applyWhere(example, false); return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String deleteByExample(MailQueueCriteria example) { BEGIN(); DELETE_FROM("MAIL_QUEUE"); applyWhere(example, false); return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String insertSelective(MailQueue record) { BEGIN(); INSERT_INTO("MAIL_QUEUE"); if (record.getMailId() != null) { VALUES("MAIL_ID", "#{mailId,jdbcType=BIGINT}"); } if (record.getScheduledAt() != null) { VALUES("SCHEDULED_AT", "#{scheduledAt,jdbcType=TIMESTAMP}"); } if (record.getUpdatedAt() != null) { VALUES("UPDATED_AT", "#{updatedAt,jdbcType=TIMESTAMP}"); } if (record.getCreatedAt() != null) { VALUES("CREATED_AT", "#{createdAt,jdbcType=TIMESTAMP}"); } if (record.getLockVersion() != null) { VALUES("LOCK_VERSION", "#{lockVersion,jdbcType=INTEGER}"); } if (record.getDeletedFlg() != null) { VALUES("DELETED_FLG", "#{deletedFlg,jdbcType=INTEGER}"); } return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String selectByExample(MailQueueCriteria example) { BEGIN(); if (example != null && example.isDistinct()) { SELECT_DISTINCT("ID"); } else { SELECT("ID"); } SELECT("MAIL_ID"); SELECT("SCHEDULED_AT"); SELECT("UPDATED_AT"); SELECT("CREATED_AT"); SELECT("LOCK_VERSION"); SELECT("DELETED_FLG"); FROM("MAIL_QUEUE"); applyWhere(example, false); if (example != null && example.getOrderByClause() != null) { ORDER_BY(example.getOrderByClause()); } return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String updateByExampleSelective(Map<String, Object> parameter) { MailQueue record = (MailQueue) parameter.get("record"); MailQueueCriteria example = (MailQueueCriteria) parameter.get("example"); BEGIN(); UPDATE("MAIL_QUEUE"); if (record.getId() != null) { SET("ID = #{record.id,jdbcType=BIGINT}"); } if (record.getMailId() != null) { SET("MAIL_ID = #{record.mailId,jdbcType=BIGINT}"); } if (record.getScheduledAt() != null) { SET("SCHEDULED_AT = #{record.scheduledAt,jdbcType=TIMESTAMP}"); } if (record.getUpdatedAt() != null) { SET("UPDATED_AT = #{record.updatedAt,jdbcType=TIMESTAMP}"); } if (record.getCreatedAt() != null) { SET("CREATED_AT = #{record.createdAt,jdbcType=TIMESTAMP}"); } if (record.getLockVersion() != null) { SET("LOCK_VERSION = #{record.lockVersion,jdbcType=INTEGER}"); } if (record.getDeletedFlg() != null) { SET("DELETED_FLG = #{record.deletedFlg,jdbcType=INTEGER}"); } applyWhere(example, true); return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String updateByExample(Map<String, Object> parameter) { BEGIN(); UPDATE("MAIL_QUEUE"); SET("ID = #{record.id,jdbcType=BIGINT}"); SET("MAIL_ID = #{record.mailId,jdbcType=BIGINT}"); SET("SCHEDULED_AT = #{record.scheduledAt,jdbcType=TIMESTAMP}"); SET("UPDATED_AT = #{record.updatedAt,jdbcType=TIMESTAMP}"); SET("CREATED_AT = #{record.createdAt,jdbcType=TIMESTAMP}"); SET("LOCK_VERSION = #{record.lockVersion,jdbcType=INTEGER}"); SET("DELETED_FLG = #{record.deletedFlg,jdbcType=INTEGER}"); MailQueueCriteria example = (MailQueueCriteria) parameter.get("example"); applyWhere(example, true); return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ public String updateByPrimaryKeySelective(MailQueue record) { BEGIN(); UPDATE("MAIL_QUEUE"); if (record.getMailId() != null) { SET("MAIL_ID = #{mailId,jdbcType=BIGINT}"); } if (record.getScheduledAt() != null) { SET("SCHEDULED_AT = #{scheduledAt,jdbcType=TIMESTAMP}"); } if (record.getUpdatedAt() != null) { SET("UPDATED_AT = #{updatedAt,jdbcType=TIMESTAMP}"); } if (record.getCreatedAt() != null) { SET("CREATED_AT = #{createdAt,jdbcType=TIMESTAMP}"); } if (record.getLockVersion() != null) { SET("LOCK_VERSION = #{lockVersion,jdbcType=INTEGER}"); } if (record.getDeletedFlg() != null) { SET("DELETED_FLG = #{deletedFlg,jdbcType=INTEGER}"); } WHERE("ID = #{id,jdbcType=BIGINT}"); return SQL(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table MAIL_QUEUE * * @mbggenerated */ protected void applyWhere(MailQueueCriteria example, boolean includeExamplePhrase) { if (example == null) { return; } String parmPhrase1; String parmPhrase1_th; String parmPhrase2; String parmPhrase2_th; String parmPhrase3; String parmPhrase3_th; if (includeExamplePhrase) { parmPhrase1 = "%s #{example.oredCriteria[%d].allCriteria[%d].value}"; parmPhrase1_th = "%s #{example.oredCriteria[%d].allCriteria[%d].value,typeHandler=%s}"; parmPhrase2 = "%s #{example.oredCriteria[%d].allCriteria[%d].value} and #{example.oredCriteria[%d].criteria[%d].secondValue}"; parmPhrase2_th = "%s #{example.oredCriteria[%d].allCriteria[%d].value,typeHandler=%s} and #{example.oredCriteria[%d].criteria[%d].secondValue,typeHandler=%s}"; parmPhrase3 = "#{example.oredCriteria[%d].allCriteria[%d].value[%d]}"; parmPhrase3_th = "#{example.oredCriteria[%d].allCriteria[%d].value[%d],typeHandler=%s}"; } else { parmPhrase1 = "%s #{oredCriteria[%d].allCriteria[%d].value}"; parmPhrase1_th = "%s #{oredCriteria[%d].allCriteria[%d].value,typeHandler=%s}"; parmPhrase2 = "%s #{oredCriteria[%d].allCriteria[%d].value} and #{oredCriteria[%d].criteria[%d].secondValue}"; parmPhrase2_th = "%s #{oredCriteria[%d].allCriteria[%d].value,typeHandler=%s} and #{oredCriteria[%d].criteria[%d].secondValue,typeHandler=%s}"; parmPhrase3 = "#{oredCriteria[%d].allCriteria[%d].value[%d]}"; parmPhrase3_th = "#{oredCriteria[%d].allCriteria[%d].value[%d],typeHandler=%s}"; } StringBuilder sb = new StringBuilder(); List<Criteria> oredCriteria = example.getOredCriteria(); boolean firstCriteria = true; for (int i = 0; i < oredCriteria.size(); i++) { Criteria criteria = oredCriteria.get(i); if (criteria.isValid()) { if (firstCriteria) { firstCriteria = false; } else { sb.append(" or "); } sb.append('('); List<Criterion> criterions = criteria.getAllCriteria(); boolean firstCriterion = true; for (int j = 0; j < criterions.size(); j++) { Criterion criterion = criterions.get(j); if (firstCriterion) { firstCriterion = false; } else { sb.append(" and "); } if (criterion.isNoValue()) { sb.append(criterion.getCondition()); } else if (criterion.isSingleValue()) { if (criterion.getTypeHandler() == null) { sb.append(String.format(parmPhrase1, criterion.getCondition(), i, j)); } else { sb.append(String.format(parmPhrase1_th, criterion.getCondition(), i, j,criterion.getTypeHandler())); } } else if (criterion.isBetweenValue()) { if (criterion.getTypeHandler() == null) { sb.append(String.format(parmPhrase2, criterion.getCondition(), i, j, i, j)); } else { sb.append(String.format(parmPhrase2_th, criterion.getCondition(), i, j, criterion.getTypeHandler(), i, j, criterion.getTypeHandler())); } } else if (criterion.isListValue()) { sb.append(criterion.getCondition()); sb.append(" ("); List<?> listItems = (List<?>) criterion.getValue(); boolean comma = false; for (int k = 0; k < listItems.size(); k++) { if (comma) { sb.append(", "); } else { comma = true; } if (criterion.getTypeHandler() == null) { sb.append(String.format(parmPhrase3, i, j, k)); } else { sb.append(String.format(parmPhrase3_th, i, j, k, criterion.getTypeHandler())); } } sb.append(')'); } } sb.append(')'); } } if (sb.length() > 0) { WHERE(sb.toString()); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.roots.impl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.fileTypes.ex.FileTypeManagerEx; import com.intellij.openapi.module.ModifiableModuleModel; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleTypeId; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.ex.ProjectRootManagerEx; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import com.intellij.testFramework.ExtensionTestUtil; import com.intellij.testFramework.HeavyPlatformTestCase; import com.intellij.testFramework.PsiTestUtil; import com.intellij.testFramework.VfsTestUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.java.JavaResourceRootType; import org.jetbrains.jps.model.java.JavaSourceRootType; import org.jetbrains.jps.model.module.JpsModuleSourceRootType; import java.io.File; import java.io.IOException; import java.util.*; @HeavyPlatformTestCase.WrapInCommand public class DirectoryIndexTest extends DirectoryIndexTestCase { private Module myModule2, myModule3; private VirtualFile myRootVFile; private VirtualFile myModule1Dir, myModule2Dir, myModule3Dir; private VirtualFile mySrcDir1, mySrcDir2; private SourceFolder mySrcDir1Folder, mySrcDir2Folder; private VirtualFile myTestSrc1; private SourceFolder myTestSrc1Folder; private VirtualFile myPack1Dir, myPack2Dir; private VirtualFile myFileLibDir, myFileLibSrc, myFileLibCls; private VirtualFile myLibAdditionalOutsideDir, myLibAdditionalOutsideSrcDir, myLibAdditionalOutsideExcludedDir, myLibAdditionalOutsideClsDir; private VirtualFile myLibDir, myLibSrcDir, myLibClsDir; private VirtualFile myLibAdditionalDir, myLibAdditionalSrcDir, myLibAdditionalSrcFile, myLibAdditionalExcludedDir, myLibAdditionalClsDir, myLibAdditionalClsFile; private VirtualFile myCvsDir; private VirtualFile myExcludeDir; private VirtualFile myOutputDir; private VirtualFile myModule1OutputDir; private VirtualFile myResDir, myTestResDir; private SourceFolder myResDirFolder, myTestResDirFolder; private VirtualFile myExcludedLibSrcDir, myExcludedLibClsDir; @Override protected void setUp() throws Exception { super.setUp(); final File root = createTempDirectory(); ApplicationManager.getApplication().runWriteAction(() -> { /* root lib file.src file.cls additional-lib src excluded cls module1 src1 pack1 testSrc pack2 res testRes lib src exc cls exc additional-lib src a.txt excluded cls module2 src2 CVS excluded module3 out module1 */ myRootVFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(root); assertNotNull(myRootVFile); myFileLibDir = createChildDirectory(myRootVFile, "lib"); myFileLibSrc = createChildData(myFileLibDir, "file.src"); myFileLibCls = createChildData(myFileLibDir, "file.cls"); myLibAdditionalOutsideDir = createChildDirectory(myRootVFile, "additional-lib"); myLibAdditionalOutsideSrcDir = createChildDirectory(myLibAdditionalOutsideDir, "src"); myLibAdditionalOutsideExcludedDir = createChildDirectory(myLibAdditionalOutsideDir, "excluded"); myLibAdditionalOutsideClsDir = createChildDirectory(myLibAdditionalOutsideDir, "cls"); myModule1Dir = createChildDirectory(myRootVFile, "module1"); mySrcDir1 = createChildDirectory(myModule1Dir, "src1"); myPack1Dir = createChildDirectory(mySrcDir1, "pack1"); myTestSrc1 = createChildDirectory(mySrcDir1, "testSrc"); myPack2Dir = createChildDirectory(myTestSrc1, "pack2"); myResDir = createChildDirectory(myModule1Dir, "res"); myTestResDir = createChildDirectory(myModule1Dir, "testRes"); myLibDir = createChildDirectory(myModule1Dir, "lib"); myLibSrcDir = createChildDirectory(myLibDir, "src"); myExcludedLibSrcDir = createChildDirectory(myLibSrcDir, "exc"); myLibAdditionalDir = createChildDirectory(myModule1Dir, "additional-lib"); myLibAdditionalSrcDir = createChildDirectory(myLibAdditionalDir, "src"); myLibAdditionalSrcFile = createChildData(myLibAdditionalDir, "a.txt"); myLibAdditionalExcludedDir = createChildDirectory(myLibAdditionalDir, "excluded"); myLibAdditionalClsDir = createChildDirectory(myLibAdditionalDir, "cls"); myLibAdditionalClsFile = createChildDirectory(myLibAdditionalDir, "file.cls"); myLibClsDir = createChildDirectory(myLibDir, "cls"); myExcludedLibClsDir = createChildDirectory(myLibClsDir, "exc"); myModule2Dir = createChildDirectory(myModule1Dir, "module2"); mySrcDir2 = createChildDirectory(myModule2Dir, "src2"); myCvsDir = createChildDirectory(mySrcDir2, "CVS"); myExcludeDir = createChildDirectory(mySrcDir2, "excluded"); myModule3Dir = createChildDirectory(myRootVFile, "module3"); myOutputDir = createChildDirectory(myRootVFile, "out"); myModule1OutputDir = createChildDirectory(myOutputDir, "module1"); getCompilerProjectExtension().setCompilerOutputUrl(myOutputDir.getUrl()); // fill roots of module1 { ModuleRootModificationUtil.setModuleSdk(myModule, null); PsiTestUtil.addContentRoot(myModule, myModule1Dir); mySrcDir1Folder = PsiTestUtil.addSourceRoot(myModule, mySrcDir1); myTestSrc1Folder = PsiTestUtil.addSourceRoot(myModule, myTestSrc1, true); myResDirFolder = PsiTestUtil.addSourceRoot(myModule, myResDir, JavaResourceRootType.RESOURCE); myTestResDirFolder = PsiTestUtil.addSourceRoot(myModule, myTestResDir, JavaResourceRootType.TEST_RESOURCE); ModuleRootModificationUtil.addModuleLibrary(myModule, "lib.js", Collections.singletonList(myFileLibCls.getUrl()), Collections .singletonList(myFileLibSrc.getUrl())); PsiTestUtil.addExcludedRoot(myModule, myExcludedLibClsDir); PsiTestUtil.addExcludedRoot(myModule, myExcludedLibSrcDir); } // fill roots of module2 { myModule2 = createJavaModuleWithContent(getProject(), "module2", myModule2Dir); PsiTestUtil.addContentRoot(myModule2, myModule2Dir); mySrcDir2Folder = PsiTestUtil.addSourceRoot(myModule2, mySrcDir2); PsiTestUtil.addExcludedRoot(myModule2, myExcludeDir); ModuleRootModificationUtil.addModuleLibrary(myModule2, "lib", Collections.singletonList(myLibClsDir.getUrl()), Collections.singletonList(myLibSrcDir.getUrl()), Arrays.asList(myExcludedLibClsDir.getUrl(), myExcludedLibSrcDir.getUrl()), DependencyScope.COMPILE, true); } ExtensionTestUtil.maskExtensions(AdditionalLibraryRootsProvider.EP_NAME, Collections.<AdditionalLibraryRootsProvider>singletonList(new AdditionalLibraryRootsProvider() { @NotNull @Override public Collection<SyntheticLibrary> getAdditionalProjectLibraries(@NotNull Project project) { return myProject == project ? Collections.singletonList( new JavaSyntheticLibrary( ContainerUtil.newArrayList(myLibAdditionalSrcDir, myLibAdditionalOutsideSrcDir), ContainerUtil.newArrayList(myLibAdditionalClsDir, myLibAdditionalOutsideClsDir), ContainerUtil.newHashSet(myLibAdditionalExcludedDir, myLibAdditionalOutsideExcludedDir), null) ) : Collections.emptyList(); } }), getTestRootDisposable()); // fill roots of module3 { myModule3 = createJavaModuleWithContent(getProject(), "module3", myModule3Dir); PsiTestUtil.addContentRoot(myModule3, myModule3Dir); ModuleRootModificationUtil.addDependency(myModule3, myModule2); } }); // to not interfere with previous test firing vfs events VirtualFileManager.getInstance().syncRefresh(); } @Override protected void tearDown() throws Exception { myModule2 = null; myModule3 = null; super.tearDown(); } private CompilerProjectExtension getCompilerProjectExtension() { final CompilerProjectExtension instance = CompilerProjectExtension.getInstance(myProject); assertNotNull(instance); return instance; } public void testDirInfos() { assertNotInProject(myRootVFile); // beware: files in directory index checkInfo(myFileLibSrc, null, false, true, "", null, null, myModule); checkInfo(myFileLibCls, null, true, false, "", null, null, myModule); checkInfo(myLibAdditionalOutsideSrcDir, null, false, true, "", null, null); checkInfo(myLibAdditionalOutsideClsDir, null, true, false, "", null, null); assertExcludedFromProject(myLibAdditionalOutsideExcludedDir); assertIndexableContent(Arrays.asList(myLibAdditionalOutsideSrcDir, myLibAdditionalOutsideClsDir), Collections.singletonList(myLibAdditionalOutsideExcludedDir)); checkInfo(myModule1Dir, myModule, false, false, null, null, null); checkInfo(mySrcDir1, myModule, false, false, "", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule); checkInfo(myPack1Dir, myModule, false, false, "pack1", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule); checkInfo(myTestSrc1, myModule, false, false, "", myTestSrc1Folder, JavaSourceRootType.TEST_SOURCE, myModule); checkInfo(myPack2Dir, myModule, false, false, "pack2", myTestSrc1Folder, JavaSourceRootType.TEST_SOURCE, myModule); checkInfo(myResDir, myModule, false, false, "", myResDirFolder, JavaResourceRootType.RESOURCE, myModule); checkInfo(myTestResDir, myModule, false, false, "", myTestResDirFolder, JavaResourceRootType.TEST_RESOURCE, myModule); checkInfo(myLibDir, myModule, false, false, null, null, null); checkInfo(myLibSrcDir, myModule, false, true, "", null, null, myModule2, myModule3); checkInfo(myLibClsDir, myModule, true, false, "", null, null, myModule2, myModule3); assertEquals(myLibSrcDir, assertInProject(myLibSrcDir).getSourceRoot()); checkInfo(myModule2Dir, myModule2, false, false, null, null, null); checkInfo(mySrcDir2, myModule2, false, false, "", mySrcDir2Folder, JavaSourceRootType.SOURCE, myModule2, myModule3); assertNotInProject(myCvsDir); assertIgnored(myCvsDir); assertExcluded(myExcludeDir, myModule2); assertExcluded(myExcludedLibClsDir, myModule); assertExcluded(myExcludedLibSrcDir, myModule); assertEquals(myModule1Dir, assertInProject(myLibClsDir).getContentRoot()); checkInfo(myModule3Dir, myModule3, false, false, null, null, null); VirtualFile cvs = createChildDirectory(myPack1Dir, "CVS"); assertNotInProject(cvs); assertIgnored(cvs); assertNull(myFileIndex.getPackageNameByDirectory(cvs)); } public void testDirsByPackageName() { checkPackage("", true, mySrcDir1, myTestSrc1, myResDir, myTestResDir, mySrcDir2, myLibSrcDir, myLibClsDir, myLibAdditionalSrcDir, myLibAdditionalOutsideSrcDir, myLibAdditionalClsDir, myLibAdditionalOutsideClsDir); checkPackage("", false, mySrcDir1, myTestSrc1, myResDir, myTestResDir, mySrcDir2, myLibClsDir, myLibAdditionalClsDir, myLibAdditionalOutsideClsDir); checkPackage("pack1", true, myPack1Dir); checkPackage("pack1", false, myPack1Dir); checkPackage("pack2", true, myPack2Dir); checkPackage("pack2", false, myPack2Dir); checkPackage(".pack2", false); checkPackage(".pack2", true); VirtualFile libClsPack = createChildDirectory(myLibClsDir, "pack1"); VirtualFile libSrcPack = createChildDirectory(myLibSrcDir, "pack1"); VirtualFile pack3Cls = createChildDirectory(myLibAdditionalClsDir, "pack3"); VirtualFile pack3Src = createChildDirectory(myLibAdditionalSrcDir, "pack3"); VirtualFile pack4Cls = createChildDirectory(myLibAdditionalOutsideClsDir, "pack4"); VirtualFile pack4Src = createChildDirectory(myLibAdditionalOutsideSrcDir, "pack4"); fireRootsChanged(); checkPackage("pack1", true, myPack1Dir, libSrcPack, libClsPack); checkPackage("pack1", false, myPack1Dir, libClsPack); checkPackage("pack3", false, pack3Cls); checkPackage("pack3", true, pack3Src, pack3Cls); checkPackage("pack4", false, pack4Cls); checkPackage("pack4", true, pack4Src, pack4Cls); } public void testDirectoriesWithPackagePrefix() { PsiTestUtil.addSourceRoot(myModule3, myModule3Dir); WriteCommandAction.runWriteCommandAction(myProject, () -> { final ModifiableRootModel model = ModuleRootManager.getInstance(myModule3).getModifiableModel(); model.getContentEntries()[0].getSourceFolders()[0].setPackagePrefix("pack1"); model.commit(); }); checkPackage("pack1", true, myPack1Dir, myModule3Dir); } public void testPackageDirectoriesWithDots() { VirtualFile fooBar = createChildDirectory(mySrcDir1, "foo.bar"); VirtualFile goo1 = createChildDirectory(fooBar, "goo"); VirtualFile foo = createChildDirectory(mySrcDir2, "foo"); VirtualFile bar = createChildDirectory(foo, "bar"); VirtualFile goo2 = createChildDirectory(bar, "goo"); checkPackage("foo", false, foo); checkPackage("foo.bar", false, bar, fooBar); checkPackage("foo.bar.goo", false, goo2, goo1); } public void testCreateDir() { String path = mySrcDir1.getPath(); assertTrue(new File(path + "/dir1/dir2").mkdirs()); assertTrue(new File(path + "/CVS").mkdirs()); VirtualFileManager.getInstance().syncRefresh(); } public void testDeleteDir() { VirtualFile subdir1 = createChildDirectory(mySrcDir1, "subdir1"); VirtualFile subdir2 = createChildDirectory(subdir1, "subdir2"); createChildDirectory(subdir2, "subdir3"); VfsTestUtil.deleteFile(subdir1); } public void testMoveDir() { VirtualFile subdir = createChildDirectory(mySrcDir2, "subdir1"); createChildDirectory(subdir, "subdir2"); move(subdir, mySrcDir1); } public void testRenameDir() { VirtualFile subdir = createChildDirectory(mySrcDir2, "subdir1"); createChildDirectory(subdir, "subdir2"); rename(subdir, "abc.d"); } public void testRenameRoot() { LocalFileSystem.getInstance().refresh(false); rename(myModule1Dir, "newName"); } public void testMoveRoot() { move(myModule1Dir, myModule3Dir); } public void testAddProjectDir() { WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newDir = createChildDirectory(myModule1Dir.getParent(), "newDir"); createChildDirectory(newDir, "subdir"); PsiTestUtil.addContentRoot(myModule, newDir); }); } public void testChangeIgnoreList() { VirtualFile newDir = createChildDirectory(myModule1Dir, "newDir"); assertInProject(newDir); final FileTypeManagerEx fileTypeManager = (FileTypeManagerEx)FileTypeManager.getInstance(); final String list = fileTypeManager.getIgnoredFilesList(); try { final String list1 = list + ";" + "newDir"; ApplicationManager.getApplication().runWriteAction(() -> fileTypeManager.setIgnoredFilesList(list1)); assertNotInProject(newDir); } finally { ApplicationManager.getApplication().runWriteAction(() -> fileTypeManager.setIgnoredFilesList(list)); assertInProject(newDir); } } public void testIgnoredFile() { VirtualFile ignoredFile = createChildData(myModule1Dir, "CVS"); assertIgnored(ignoredFile); } private void assertIgnored(@NotNull VirtualFile ignoredFile) { DirectoryInfo info = myIndex.getInfoForFile(ignoredFile); assertTrue(info.isIgnored()); assertTrue(myFileIndex.isExcluded(ignoredFile)); assertTrue(myFileIndex.isUnderIgnored(ignoredFile)); assertNull(myFileIndex.getContentRootForFile(ignoredFile, false)); assertNull(myFileIndex.getModuleForFile(ignoredFile, false)); } public void testAddModule() { WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newModuleContent = createChildDirectory(myRootVFile, "newModule"); createChildDirectory(newModuleContent, "subDir"); ModuleManager moduleManager = ModuleManager.getInstance(myProject); Module module = moduleManager.newModule(myRootVFile.getPath() + "/newModule.iml", ModuleTypeId.JAVA_MODULE); PsiTestUtil.addContentRoot(module, newModuleContent); }); } public void testModuleUnderIgnoredDir() { final VirtualFile ignored = createChildDirectory(myRootVFile, ".git"); assertTrue(FileTypeManager.getInstance().isFileIgnored(ignored)); assertTrue(myFileIndex.isExcluded(ignored)); assertTrue(myFileIndex.isUnderIgnored(ignored)); final VirtualFile module4 = createChildDirectory(ignored, "module4"); assertFalse(FileTypeManager.getInstance().isFileIgnored(module4)); assertTrue(myFileIndex.isExcluded(module4)); assertTrue(myFileIndex.isUnderIgnored(module4)); WriteCommandAction.writeCommandAction(getProject()).run(() -> { ModuleManager moduleManager = ModuleManager.getInstance(myProject); Module module = moduleManager.newModule(myRootVFile.getPath() + "/newModule.iml", ModuleTypeId.JAVA_MODULE); PsiTestUtil.addContentRoot(module, module4); assertNotInProject(ignored); checkInfo(module4, module, false, false, null, null, null); }); } public void testModuleInIgnoredDir() { final VirtualFile ignored = createChildDirectory(myRootVFile, ".git"); assertTrue(FileTypeManager.getInstance().isFileIgnored(ignored)); WriteCommandAction.writeCommandAction(getProject()).run(() -> { ModuleManager moduleManager = ModuleManager.getInstance(myProject); ModifiableModuleModel model = moduleManager.getModifiableModel(); model.disposeModule(myModule); model.disposeModule(myModule2); model.disposeModule(myModule3); model.commit(); Module module = moduleManager.newModule(myRootVFile.getPath() + "/newModule.iml", ModuleTypeId.JAVA_MODULE); PsiTestUtil.addContentRoot(module, ignored); checkInfo(ignored, module, false, false, null, null, null); }); } public void testExcludedDirsInLibraries() { assertFalse(myFileIndex.isInLibraryClasses(myExcludedLibClsDir)); assertTrue(myFileIndex.isExcluded(myExcludedLibClsDir)); assertFalse(myFileIndex.isUnderIgnored(myExcludedLibClsDir)); assertFalse(myFileIndex.isInLibrarySource(myExcludedLibSrcDir)); assertFalse(myFileIndex.isInSource(myExcludedLibSrcDir)); assertTrue(myFileIndex.isExcluded(myExcludedLibSrcDir)); assertFalse(myFileIndex.isUnderIgnored(myExcludedLibSrcDir)); } public void testExplicitExcludeOfInner() { PsiTestUtil.addExcludedRoot(myModule, myModule2Dir); checkInfo(myModule2Dir, myModule2, false, false, null, null, null); checkInfo(mySrcDir2, myModule2, false, false, "", mySrcDir2Folder, JavaSourceRootType.SOURCE, myModule2, myModule3); } public void testResettingProjectOutputPath() { VirtualFile output1 = createChildDirectory(myModule1Dir, "output1"); VirtualFile output2 = createChildDirectory(myModule1Dir, "output2"); assertInProject(output1); assertInProject(output2); getCompilerProjectExtension().setCompilerOutputUrl(output1.getUrl()); fireRootsChanged(); assertExcluded(output1, myModule); assertInProject(output2); getCompilerProjectExtension().setCompilerOutputUrl(output2.getUrl()); fireRootsChanged(); assertInProject(output1); assertExcluded(output2, myModule); } private void fireRootsChanged() { ApplicationManager.getApplication().runWriteAction(() -> ProjectRootManagerEx.getInstanceEx(getProject()).makeRootsChange(EmptyRunnable.getInstance(), false, true)); } private static OrderEntry[] toArray(Collection<OrderEntry> orderEntries) { return orderEntries.toArray(OrderEntry.EMPTY_ARRAY); } public void testModuleSourceAsLibrarySource() { ModuleRootModificationUtil.addModuleLibrary(myModule, "someLib", Collections.emptyList(), Collections.singletonList(mySrcDir1.getUrl())); checkInfo(mySrcDir1, myModule, false, true, "", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule, myModule); Collection<OrderEntry> entriesResult = myIndex.getOrderEntries(myIndex.getInfoForFile(mySrcDir1)); OrderEntry[] entries = toArray(entriesResult); assertInstanceOf(entries[0], LibraryOrderEntry.class); assertInstanceOf(entries[1], ModuleSourceOrderEntry.class); checkInfo(myTestSrc1, myModule, false, true, "testSrc", myTestSrc1Folder, JavaSourceRootType.TEST_SOURCE, myModule, myModule); entriesResult = myIndex.getOrderEntries(myIndex.getInfoForFile(myTestSrc1)); entries = toArray(entriesResult); assertInstanceOf(entries[0], LibraryOrderEntry.class); assertInstanceOf(entries[1], ModuleSourceOrderEntry.class); } public void testModuleSourceAsLibraryClasses() { ModuleRootModificationUtil.addModuleLibrary(myModule, "someLib", Collections.singletonList(mySrcDir1.getUrl()), Collections.emptyList()); checkInfo(mySrcDir1, myModule, true, false, "", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule); assertInstanceOf(assertOneElement(toArray(myIndex.getOrderEntries(assertInProject(mySrcDir1)))), ModuleSourceOrderEntry.class); } public void testModulesWithSameSourceContentRoot() { // now our API allows this (ReformatCodeActionTest), although UI doesn't. Maybe API shouldn't allow it as well? PsiTestUtil.addContentRoot(myModule2, myModule1Dir); PsiTestUtil.addSourceRoot(myModule2, mySrcDir1); checkInfo(myModule1Dir, myModule, false, false, null, null, null); checkInfo(mySrcDir1, myModule, false, false, "", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule3, myModule); checkInfo(myTestSrc1, myModule, false, false, "", myTestSrc1Folder, JavaSourceRootType.TEST_SOURCE, myModule3, myModule); checkInfo(myResDir, myModule, false, false, "", myResDirFolder, JavaResourceRootType.RESOURCE, myModule); checkInfo(mySrcDir2, myModule2, false, false, "", mySrcDir2Folder, JavaSourceRootType.SOURCE, myModule2, myModule3); assertEquals(myModule2Dir, myIndex.getInfoForFile(mySrcDir2).getContentRoot()); } public void testModuleWithSameSourceRoot() { SourceFolder sourceFolder = PsiTestUtil.addSourceRoot(myModule2, mySrcDir1); checkInfo(mySrcDir1, myModule2, false, false, "", sourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); checkInfo(myTestSrc1, myModule2, false, false, "testSrc", sourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); } public void testModuleContentUnderSourceRoot() { PsiTestUtil.addContentRoot(myModule2, myPack1Dir); checkInfo(myPack1Dir, myModule2, false, false, null, null, null); } public void testSameSourceAndOutput() { PsiTestUtil.setCompilerOutputPath(myModule, mySrcDir1.getUrl(), false); assertExcluded(mySrcDir1, myModule); } public void testExcludedDirShouldBeExcludedRightAfterItsCreation() { VirtualFile excluded = createChildDirectory(myModule1Dir, "excluded"); VirtualFile projectOutput = createChildDirectory(myModule1Dir, "projectOutput"); VirtualFile module2Output = createChildDirectory(myModule1Dir, "module2Output"); VirtualFile module2TestOutput = createChildDirectory(myModule2Dir, "module2TestOutput"); assertInProject(excluded); assertInProject(projectOutput); assertInProject(module2Output); assertInProject(module2TestOutput); getCompilerProjectExtension().setCompilerOutputUrl(projectOutput.getUrl()); PsiTestUtil.addExcludedRoot(myModule, excluded); PsiTestUtil.setCompilerOutputPath(myModule2, module2Output.getUrl(), false); PsiTestUtil.setCompilerOutputPath(myModule2, module2TestOutput.getUrl(), true); PsiTestUtil.setExcludeCompileOutput(myModule2, true); assertExcluded(excluded, myModule); assertExcluded(projectOutput, myModule); assertExcluded(module2Output, myModule); assertExcluded(module2TestOutput, myModule2); VfsTestUtil.deleteFile(excluded); VfsTestUtil.deleteFile(projectOutput); VfsTestUtil.deleteFile(module2Output); VfsTestUtil.deleteFile(module2TestOutput); final List<VirtualFile> created = new ArrayList<>(); VirtualFileListener l = new VirtualFileListener() { @Override public void fileCreated(@NotNull VirtualFileEvent e) { VirtualFile file = e.getFile(); String fileName = e.getFileName(); assertExcluded(file, fileName.contains("module2TestOutput") ? myModule2 : myModule); created.add(file); } }; VirtualFileManager.getInstance().addVirtualFileListener(l, getTestRootDisposable()); excluded = createChildDirectory(myModule1Dir, excluded.getName()); assertExcluded(excluded, myModule); projectOutput = createChildDirectory(myModule1Dir, projectOutput.getName()); assertExcluded(projectOutput, myModule); module2Output = createChildDirectory(myModule1Dir, module2Output.getName()); assertExcluded(module2Output, myModule); module2TestOutput = createChildDirectory(myModule2Dir, module2TestOutput.getName()); assertExcluded(module2TestOutput, myModule2); assertEquals(created.toString(), 4, created.size()); } public void testExcludesShouldBeRecognizedRightOnRefresh() { final VirtualFile dir = createChildDirectory(myModule1Dir, "dir"); final VirtualFile excluded = createChildDirectory(dir, "excluded"); PsiTestUtil.addExcludedRoot(myModule, excluded); VfsTestUtil.deleteFile(dir); boolean created = new File(myModule1Dir.getPath(), "dir/excluded/foo").mkdirs(); assertTrue(created); VirtualFileListener l = new VirtualFileListener() { @Override public void fileCreated(@NotNull VirtualFileEvent e) { assertEquals("dir", e.getFileName()); VirtualFile file = e.getFile(); assertInProject(file); assertExcluded(file.findFileByRelativePath("excluded"), myModule); assertExcluded(file.findFileByRelativePath("excluded/foo"), myModule); } }; VirtualFileManager.getInstance().addVirtualFileListener(l, getTestRootDisposable()); VirtualFileManager.getInstance().syncRefresh(); } public void testProcessingNestedContentRootsOfExcludedDirsOnCreation() { String rootPath = myModule1Dir.getPath(); final File f = new File(rootPath, "excludedDir/dir/anotherContentRoot"); ApplicationManager.getApplication().runWriteAction(() -> { ModifiableRootModel rootModel = ModuleRootManager.getInstance(myModule).getModifiableModel(); rootModel.getContentEntries()[0] .addExcludeFolder(VfsUtilCore.pathToUrl(f.getParentFile().getParent())); rootModel.commit(); ModuleRootModificationUtil.addContentRoot(myModule2, FileUtil.toSystemIndependentName(f.getPath())); assertTrue(f.getPath(), f.exists() || f.mkdirs()); LocalFileSystem.getInstance().refresh(false); }); assertExcluded(LocalFileSystem.getInstance().findFileByIoFile(f.getParentFile().getParentFile()), myModule); assertInProject(LocalFileSystem.getInstance().findFileByIoFile(f)); } public void testSyntheticLibraryInContent() { ModuleRootModificationUtil.addContentRoot(myModule, FileUtil.toSystemIndependentName(myModule1Dir.getPath())); checkInfo(myLibAdditionalDir, myModule, false, false, null, null, null); checkInfo(myLibAdditionalSrcDir, myModule, false, true, "", null, null); checkInfo(myLibAdditionalClsDir, myModule, true, false, "", null, null); checkInfo(myLibAdditionalExcludedDir, myModule, false, false, null, null, null); assertInProject(myLibAdditionalExcludedDir); assertIndexableContent(Arrays.asList(myLibAdditionalSrcDir, myLibAdditionalSrcFile, myLibAdditionalExcludedDir, myLibAdditionalClsDir, myLibAdditionalClsFile), null); } public void testLibraryDirInContent() { ModuleRootModificationUtil.addModuleLibrary(myModule, myModule1Dir.getUrl()); checkInfo(myModule1Dir, myModule, true, false, "", null, null, myModule); checkInfo(mySrcDir1, myModule, true, false, "", mySrcDir1Folder, JavaSourceRootType.SOURCE, myModule); checkInfo(myModule2Dir, myModule2, true, false, "module2", null, null, myModule); checkInfo(mySrcDir2, myModule2, true, false, "", mySrcDir2Folder, JavaSourceRootType.SOURCE, myModule2, myModule3); checkInfo(myExcludeDir, null, true, false, "module2.src2.excluded", null, null, myModule3); checkInfo(myLibDir, myModule, true, false, "lib", null, null, myModule); checkInfo(myLibClsDir, myModule, true, false, "", null, null, myModule2, myModule3); //myModule is included into order entries instead of myModule2 because classes root for libraries dominates on source roots checkInfo(myLibSrcDir, myModule, true, true, "", null, null, myModule, myModule3); checkInfo(myResDir, myModule, true, false, "", myResDirFolder, JavaResourceRootType.RESOURCE, myModule); assertInstanceOf(assertOneElement(toArray(myIndex.getOrderEntries(assertInProject(myResDir)))), ModuleSourceOrderEntry.class); checkInfo(myExcludedLibSrcDir, null, true, false, "lib.src.exc", null, null, myModule3, myModule); checkInfo(myExcludedLibClsDir, null, true, false, "lib.cls.exc", null, null, myModule3); checkPackage("lib.src.exc", true, myExcludedLibSrcDir); checkPackage("lib.cls.exc", true, myExcludedLibClsDir); checkPackage("lib.src", true); checkPackage("lib.cls", true); checkPackage("exc", false); checkPackage("exc", true); } public void testExcludeCompilerOutputOutsideOfContentRoot() { assertTrue(myFileIndex.isExcluded(myOutputDir)); assertFalse(myFileIndex.isUnderIgnored(myOutputDir)); assertTrue(myFileIndex.isExcluded(myModule1OutputDir)); assertFalse(myFileIndex.isExcluded(myOutputDir.getParent())); assertExcludedFromProject(myOutputDir); assertExcludedFromProject(myModule1OutputDir); String moduleOutputUrl = myModule1OutputDir.getUrl(); VfsTestUtil.deleteFile(myOutputDir); PsiTestUtil.setCompilerOutputPath(myModule, moduleOutputUrl, false); myOutputDir = createChildDirectory(myRootVFile, "out"); myModule1OutputDir = createChildDirectory(myOutputDir, "module1"); assertExcludedFromProject(myOutputDir); assertExcludedFromProject(myModule1OutputDir); assertTrue(myFileIndex.isExcluded(myModule1OutputDir)); PsiTestUtil.setCompilerOutputPath(myModule, moduleOutputUrl, true); PsiTestUtil.setCompilerOutputPath(myModule2, moduleOutputUrl, false); PsiTestUtil.setCompilerOutputPath(myModule2, moduleOutputUrl, true); PsiTestUtil.setCompilerOutputPath(myModule3, moduleOutputUrl, false); PsiTestUtil.setCompilerOutputPath(myModule3, moduleOutputUrl, true); // now no module inherits project output dir, but it still should be project-excluded assertExcludedFromProject(myOutputDir); // project output inside module content shouldn't be projectExcludeRoot VirtualFile projectOutputUnderContent = createChildDirectory(myModule1Dir, "projectOutputUnderContent"); getCompilerProjectExtension().setCompilerOutputUrl(projectOutputUnderContent.getUrl()); fireRootsChanged(); assertNotExcluded(myOutputDir); assertExcluded(projectOutputUnderContent, myModule); VfsTestUtil.deleteFile(projectOutputUnderContent); projectOutputUnderContent = createChildDirectory(myModule1Dir, "projectOutputUnderContent"); assertNotExcluded(myOutputDir); assertExcluded(projectOutputUnderContent, myModule); } public void testFileContentAndSourceRoots() { VirtualFile fileRoot = createChildData(myRootVFile, "fileRoot.txt"); VirtualFile fileSourceRoot = createChildData(myRootVFile, "fileSourceRoot.txt"); VirtualFile fileTestSourceRoot = createChildData(myRootVFile, "fileTestSourceRoot.txt"); assertNotInProject(fileRoot); assertFalse(myFileIndex.isInContent(fileRoot)); assertIteratedContent(myFileIndex, null, Arrays.asList(fileRoot, fileSourceRoot, fileTestSourceRoot)); ContentEntry contentEntry = PsiTestUtil.addContentRoot(myModule, fileRoot); assertNotNull(contentEntry); assertEquals(fileRoot, contentEntry.getFile()); checkInfo(fileRoot, myModule, false, false, "", null, null); assertTrue(myFileIndex.isInContent(fileRoot)); assertFalse(myFileIndex.isInSource(fileRoot)); PsiTestUtil.addContentRoot(myModule, fileSourceRoot); SourceFolder fileSourceFolder = PsiTestUtil.addSourceRoot(myModule, fileSourceRoot); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); PsiTestUtil.addContentRoot(myModule, fileTestSourceRoot); SourceFolder fileTestSourceFolder = PsiTestUtil.addSourceRoot(myModule, fileTestSourceRoot, true); checkInfo(fileTestSourceRoot, myModule, false, false, "", fileTestSourceFolder, JavaSourceRootType.TEST_SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileTestSourceRoot)); assertTrue(myFileIndex.isInSource(fileTestSourceRoot)); assertIteratedContent(myFileIndex, Arrays.asList(fileRoot, fileSourceRoot, fileTestSourceRoot), null); // removing file source root PsiTestUtil.removeSourceRoot(myModule, fileTestSourceRoot); checkInfo(fileTestSourceRoot, myModule, false, false, "", null, null); assertTrue(myFileIndex.isInContent(fileTestSourceRoot)); assertFalse(myFileIndex.isInSource(fileTestSourceRoot)); assertIteratedContent(myFileIndex, Arrays.asList(fileRoot, fileSourceRoot, fileTestSourceRoot), null); // removing file content root PsiTestUtil.removeContentEntry(myModule, Objects.requireNonNull(contentEntry.getFile())); assertNotInProject(fileRoot); assertFalse(myFileIndex.isInContent(fileRoot)); assertFalse(myFileIndex.isInSource(fileRoot)); assertIteratedContent(myFileIndex, Arrays.asList(fileSourceRoot, fileTestSourceRoot), Collections.singletonList(fileRoot)); } public void testFileSourceRootsUnderDirContentRoot() { VirtualFile fileSourceRoot = createChildData(myModule1Dir, "fileSourceRoot.txt"); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); SourceFolder fileSourceFolder = PsiTestUtil.addSourceRoot(myModule, fileSourceRoot); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); // removing file source root PsiTestUtil.removeSourceRoot(myModule, fileSourceRoot); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); } public void testFileModuleExcludeRootUnderDirectoryRoot() { VirtualFile fileExcludeRoot = createChildData(mySrcDir1, "fileExcludeRoot.txt"); assertTrue(myFileIndex.isInContent(fileExcludeRoot)); assertTrue(myFileIndex.isInSource(fileExcludeRoot)); assertIteratedContent(myFileIndex, Collections.singletonList(fileExcludeRoot), null); PsiTestUtil.addExcludedRoot(myModule, fileExcludeRoot); assertFalse(myFileIndex.isInContent(fileExcludeRoot)); assertFalse(myFileIndex.isInSource(fileExcludeRoot)); assertNull(myFileIndex.getContentRootForFile(fileExcludeRoot)); assertEquals(myModule1Dir, myFileIndex.getContentRootForFile(fileExcludeRoot, false)); assertNull(myFileIndex.getModuleForFile(fileExcludeRoot)); assertEquals(myModule, myFileIndex.getModuleForFile(fileExcludeRoot, false)); assertExcluded(fileExcludeRoot, myModule); assertIteratedContent(myFileIndex, null, Collections.singletonList(fileExcludeRoot)); // removing file exclude root PsiTestUtil.removeExcludedRoot(myModule, fileExcludeRoot); assertTrue(myFileIndex.isInContent(fileExcludeRoot)); assertTrue(myFileIndex.isInSource(fileExcludeRoot)); assertIteratedContent(myFileIndex, Collections.singletonList(fileExcludeRoot), null); } public void testFileModuleExcludeRootUnderFileRoot() { VirtualFile fileRoot = createChildData(myRootVFile, "fileRoot.txt"); PsiTestUtil.addContentRoot(myModule, fileRoot); checkInfo(fileRoot, myModule, false, false, null, null, null); assertTrue(myFileIndex.isInContent(fileRoot)); assertIteratedContent(myFileIndex, Collections.singletonList(fileRoot), null); PsiTestUtil.addExcludedRoot(myModule, fileRoot); assertFalse(myFileIndex.isInContent(fileRoot)); assertExcluded(fileRoot, myModule); assertIteratedContent(myFileIndex, null, Collections.singletonList(fileRoot)); // removing file exclude root PsiTestUtil.removeExcludedRoot(myModule, fileRoot); checkInfo(fileRoot, myModule, false, false, null, null, null); assertTrue(myFileIndex.isInContent(fileRoot)); assertIteratedContent(myFileIndex, Collections.singletonList(fileRoot), null); } public void testIterateModuleLevelFileIndexMustStopBeforeTheNestingModule() { ModuleFileIndex moduleFileIndex = ModuleRootManager.getInstance(myModule).getFileIndex(); assertIteratedContent(moduleFileIndex, myModule1Dir, Arrays.asList(myModule1Dir, mySrcDir1, myResDir, myTestResDir, myLibAdditionalDir, myLibDir), Collections.singletonList(myModule2Dir)); } public void testFileLibraryInsideFolderLibrary() { VirtualFile file = createChildData(myLibSrcDir, "empty.txt"); ModuleRootModificationUtil.addModuleLibrary(myModule2, "lib2", Collections.emptyList(), Collections.singletonList(file.getUrl()), Collections.emptyList(), DependencyScope.COMPILE, true); // same for the dir and for the file checkInfo(file, myModule, false, true, "", null, null, myModule2, myModule3); checkInfo(myLibSrcDir, myModule, false, true, "", null, null, myModule2, myModule3); } public void testFileContentRootsModifications() { assertNotInProject(myRootVFile); VirtualFile temp = createChildDirectory(myRootVFile, "temp"); VirtualFile fileSourceRoot = createChildData(myRootVFile, "fileSourceRoot.txt"); assertNotInProject(fileSourceRoot); PsiTestUtil.addContentRoot(myModule, fileSourceRoot); SourceFolder fileSourceFolder = PsiTestUtil.addSourceRoot(myModule, fileSourceRoot); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); // delete and recreate VfsTestUtil.deleteFile(fileSourceRoot); assertNotInProject(fileSourceRoot); assertFalse(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); fileSourceRoot = createChildData(myRootVFile, "fileSourceRoot.txt"); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); // delete and move from another dir VfsTestUtil.deleteFile(fileSourceRoot); assertNotInProject(fileSourceRoot); assertFalse(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); fileSourceRoot = createChildData(temp, "fileSourceRoot.txt"); assertNotInProject(fileSourceRoot); move(fileSourceRoot, myRootVFile); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); // delete and copy from another dir VfsTestUtil.deleteFile(fileSourceRoot); assertNotInProject(fileSourceRoot); assertFalse(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); fileSourceRoot = createChildData(temp, "fileSourceRoot.txt"); assertNotInProject(fileSourceRoot); fileSourceRoot = copy(fileSourceRoot, myRootVFile, "fileSourceRoot.txt"); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); // delete and rename from another file VfsTestUtil.deleteFile(fileSourceRoot); assertNotInProject(fileSourceRoot); assertFalse(myFileIndex.isInContent(fileSourceRoot)); assertFalse(myFileIndex.isInSource(fileSourceRoot)); fileSourceRoot = createChildData(myRootVFile, "temp_file.txt"); assertNotInProject(fileSourceRoot); rename(fileSourceRoot, "fileSourceRoot.txt"); checkInfo(fileSourceRoot, myModule, false, false, "", fileSourceFolder, JavaSourceRootType.SOURCE, myModule); assertTrue(myFileIndex.isInContent(fileSourceRoot)); assertTrue(myFileIndex.isInSource(fileSourceRoot)); } public void testSourceContentRootsUnderExcludedRoot() { VirtualFile contentRoot = createChildDirectory(myExcludeDir, "content"); PsiTestUtil.addContentRoot(myModule2, contentRoot); checkInfo(contentRoot, myModule2, false, false, null, null, null, myModule2, myModule3); VirtualFile excludedFile = createChildData(myExcludeDir, "excluded.txt"); VirtualFile sourceRoot = createChildDirectory(myExcludeDir, "src"); VirtualFile sourceFile = createChildData(sourceRoot, "source.txt"); SourceFolder sourceFolder = PsiTestUtil.addSourceRoot(myModule2, sourceRoot); assertEquals(myModule2Dir, assertInProject(sourceRoot).getContentRoot()); checkInfo(sourceRoot, myModule2, false, false, "", sourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); VirtualFile contentSourceRoot = createChildDirectory(myExcludeDir, "content-src"); VirtualFile contentSourceFile = createChildData(sourceRoot, "content-source.txt"); SourceFolder contentSourceFolder = PsiTestUtil.addSourceContentToRoots(myModule2, contentSourceRoot); checkInfo(contentSourceRoot, myModule2, false, false, "", contentSourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); assertIteratedContent(myModule2, Arrays.asList(sourceFile, contentSourceFile, sourceRoot, contentSourceRoot), Arrays.asList(excludedFile, myExcludeDir)); } public void testSourceContentRootsUnderExcludedRootUnderSourceRoot() { VirtualFile excluded = createChildDirectory(myModule2Dir, "excluded"); PsiTestUtil.addExcludedRoot(myModule2, excluded); VirtualFile excludedFile = createChildData(excluded, "excluded.txt"); VirtualFile contentRoot = createChildDirectory(excluded, "content"); PsiTestUtil.addContentRoot(myModule2, contentRoot); checkInfo(contentRoot, myModule2, false, false, null, null, null); VirtualFile sourceRoot = createChildDirectory(excluded, "src"); SourceFolder sourceFolder = PsiTestUtil.addSourceRoot(myModule2, sourceRoot); VirtualFile sourceFile = createChildData(sourceRoot, "source.txt"); assertEquals(myModule2Dir, assertInProject(sourceRoot).getContentRoot()); checkInfo(sourceRoot, myModule2, false, false, "", sourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); VirtualFile contentSourceRoot = createChildDirectory(excluded, "content-src"); VirtualFile contentSourceFile = createChildData(contentSourceRoot, "content-source.txt"); SourceFolder contentSourceFolder = PsiTestUtil.addSourceContentToRoots(myModule2, contentSourceRoot); checkInfo(contentSourceRoot, myModule2, false, false, "", contentSourceFolder, JavaSourceRootType.SOURCE, myModule2, myModule3); assertIteratedContent(myModule2, Arrays.asList(sourceFile, contentSourceFile, sourceRoot, contentSourceRoot), Arrays.asList(excludedFile, myExcludeDir)); } public void testExcludedSourceRootUnderExcluded() { VirtualFile excluded = createChildDirectory(myModule2Dir, "excluded"); PsiTestUtil.addExcludedRoot(myModule2, excluded); VirtualFile src = createChildDirectory(excluded, "src"); VirtualFile sourceFile = createChildData(src, "src.txt"); PsiTestUtil.addSourceRoot(myModule2, src); PsiTestUtil.addExcludedRoot(myModule2, src); assertExcluded(src, myModule2); assertIteratedContent(myModule2, null, Collections.singletonList(sourceFile)); } private void checkInfo(VirtualFile file, @Nullable Module module, boolean isInLibraryClasses, boolean isInLibrarySource, @Nullable String packageName, @Nullable SourceFolder moduleSourceFolder, @Nullable final JpsModuleSourceRootType<?> moduleSourceRootType, Module... modulesOfOrderEntries) { DirectoryInfo info = assertInProject(file); assertEquals(module, info.getModule()); if (moduleSourceFolder != null || moduleSourceRootType != null) { assertTrue("isInModuleSource", info.isInModuleSource(file)); assertEquals(moduleSourceFolder, myIndex.getSourceRootFolder(info)); assertEquals(moduleSourceRootType, myIndex.getSourceRootType(info)); } else { assertFalse("isInModuleSource", info.isInModuleSource(file)); } assertEquals(isInLibraryClasses, info.hasLibraryClassRoot()); assertEquals(isInLibrarySource, info.isInLibrarySource(file)); assertEquals(isInLibraryClasses || isInLibrarySource, myFileIndex.isInLibrary(file)); if (file.isDirectory()) { assertEquals(packageName, myFileIndex.getPackageNameByDirectory(file)); } assertEquals(Arrays.toString(toArray(myIndex.getOrderEntries(info))), modulesOfOrderEntries.length, toArray(myIndex.getOrderEntries(info)).length); for (Module aModule : modulesOfOrderEntries) { OrderEntry found = ModuleFileIndexImpl.findOrderEntryWithOwnerModule(aModule, myIndex.getOrderEntries(info)); assertNotNull("not found: " + aModule + " in " + Arrays.toString(toArray(myIndex.getOrderEntries(info))), found); } } private void checkPackage(String packageName, boolean includeLibrarySources, VirtualFile... expectedDirs) { VirtualFile[] actualDirs = myIndex.getDirectoriesByPackageName(packageName, includeLibrarySources).toArray(VirtualFile.EMPTY_ARRAY); assertNotNull(actualDirs); Arrays.sort(actualDirs, Comparator.comparing(VirtualFile::getPath)); Arrays.sort(expectedDirs, Comparator.comparing(VirtualFile::getPath)); assertOrderedEquals(actualDirs, expectedDirs); for (VirtualFile dir : expectedDirs) { String actualName = myIndex.getPackageName(dir); assertEquals("Invalid package name for dir " + dir + ": " + packageName, packageName, actualName); } } public void testUnrelatedDirectoriesCreationMustNotLeadToDirectoryIndexRebuildToImproveCheckoutSpeed() { VirtualFile root = ModuleRootManager.getInstance(myModule).getContentRoots()[0]; WriteAction.run(()->ModuleRootModificationUtil.updateModel(myModule, model -> { ContentEntry rootEntry = model.getContentEntries()[0]; rootEntry.addSourceFolder(createChildDirectory(root, "extsrc"), false); })); DirectoryIndexImpl dirIndex = (DirectoryIndexImpl)DirectoryIndex.getInstance(myProject); RootIndex rootIndex = dirIndex.getRootIndex(); VirtualFile xxx = createChildDirectory(root, "xxx"); assertFalse(ProjectFileIndex.getInstance(getProject()).isInSource(xxx)); delete(xxx); assertSame(rootIndex, dirIndex.getRootIndex()); } public void testSourceRootResidingUnderExcludedDirectoryMustBeIndexed() throws IOException { VirtualFile contentDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(createTempDir("module")); Module module = createJavaModuleWithContent(getProject(), "module", contentDir); ApplicationManager.getApplication().runWriteAction(() -> { VirtualFile excludedDir = createChildDirectory(contentDir, "excluded"); VirtualFile sourcesDir = createChildDirectory(excludedDir, "sources"); createChildData(sourcesDir, "A.java"); PsiTestUtil.addContentRoot(module, contentDir); PsiTestUtil.addExcludedRoot(module, excludedDir); PsiTestUtil.addSourceRoot(module, sourcesDir); }); VirtualFile aJava = contentDir.findChild("excluded").findChild("sources").findChild("A.java"); assertIndexableContent(Collections.singletonList(aJava), Collections.emptyList()); } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.sling.hapi.impl; import org.apache.sling.hapi.HApiProperty; import org.apache.sling.hapi.HApiType; import org.apache.sling.hapi.MicrodataAttributeHelper; import org.apache.sling.hapi.HApiUtil; import org.apache.felix.scr.annotations.*; import org.apache.felix.scr.annotations.Property; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.Resource; import org.apache.sling.commons.osgi.PropertiesUtil; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jcr.*; import javax.jcr.query.Query; import javax.jcr.query.QueryManager; import javax.jcr.query.QueryResult; import java.util.*; @Component(label = "Apache Sling Hypermedia API tools", metatype = true) @Service() public class HApiUtilImpl implements HApiUtil { @Property(label = "HApi Resource Type", cardinality = 0, value = DEFAULT_RESOURCE_TYPE) public static final String HAPI_RESOURCE_TYPE = "org.apache.sling.hapi.tools.resourcetype"; @Property(label = "HApi Types Search Paths", cardinality=50, value = {"/libs/sling/hapi/types"}) public static final String HAPI_PATHS = "org.apache.sling.hapi.tools.searchpaths"; public static String resourceType; public static String[] hApiPaths; @Activate private void activate(ComponentContext context, Map<String, Object> configuration) { resourceType = PropertiesUtil.toString(configuration.get(HAPI_RESOURCE_TYPE), DEFAULT_RESOURCE_TYPE); hApiPaths = PropertiesUtil.toStringArray(configuration.get(HAPI_PATHS)); } /** * {@inheritDoc} */ public Node getTypeNode(ResourceResolver resolver, String type) throws RepositoryException { Session session = resolver.adaptTo(Session.class); // Try to resolve the resource as a path Resource res = resolver.getResource(type); if (null != res) { LOG.debug("res = " + res.getName() + " " + res.getPath()); return res.adaptTo(Node.class); } else { for (String path : new HashSet<String>(Arrays.asList(hApiPaths))) { // Remove trailing slash from path path = (path.endsWith("/")) ? path.substring(0,path.length() - 1) : path; // Get the query manager for the session QueryManager queryManager = session.getWorkspace().getQueryManager(); // Build query for the search paths StringBuilder queryString = new StringBuilder("SELECT * FROM [nt:unstructured] WHERE "); queryString.append(String.format("ISDESCENDANTNODE([%s]) ", path)); queryString.append(String.format("AND [sling:resourceType]='%s' AND fqdn = '%s'", resourceType, type)); // Execute query Query query = queryManager.createQuery(queryString.toString(), Query.JCR_SQL2); LOG.debug("Querying HAPi: {}", queryString.toString()); QueryResult result = query.execute(); NodeIterator nodeIter = result.getNodes(); if (nodeIter.hasNext()) { return nodeIter.nextNode(); } else { // continue } } // Type has to be abstract return null; } } /** * {@inheritDoc} */ public HApiType fromPath(ResourceResolver resolver, String type) throws RepositoryException { Node typeNode = this.getTypeNode(resolver, type); LOG.debug("typeNode=" + typeNode); if (null == typeNode) { return new AbstractHapiTypeImpl(type); } else { return fromNode(resolver, typeNode); } } /** * {@inheritDoc} */ public HApiType fromNode(ResourceResolver resolver, Node typeNode) throws RepositoryException { if (null == typeNode) return null; String name = typeNode.getProperty("name").getValue().getString(); String description = typeNode.getProperty("description").getValue().getString(); String path = typeNode.getPath(); String fqdn = typeNode.getProperty("fqdn").getValue().getString(); // get parent if it exists HApiType parent = null; String parentPath = typeNode.hasProperty("extends") ? typeNode.getProperty("extends").getString() : null; if (null != parentPath) { parent = this.fromPath(resolver, parentPath); } // get parameters Value[] parameterValues = typeNode.hasProperty("parameters") ? typeNode.getProperty("parameters").getValues() : new Value[]{}; List<String> parameters = new ArrayList<String>(parameterValues.length); for (Value p : Arrays.asList(parameterValues)) { parameters.add(p.getString()); } // Get properties Map<String, HApiProperty> properties = new HashMap<String, HApiProperty>(); // Add the properties from this node Iterator<Node> it = typeNode.getNodes(); while (it.hasNext()) { Node propNode = it.next(); String propName = propNode.getName(); String propDescription = propNode.hasProperty("description") ? propNode.getProperty("description").getString() : ""; // TODO: maybe create adapter and use adaptTo() // TODO: this could be slow, the types can be instantiated externally in a service activate() String type = propNode.getProperty("type").getValue().getString(); HApiType propType = this.fromPath(resolver, type); Boolean propMultiple = propNode.hasProperty("multiple") ? propNode.getProperty("multiple").getBoolean() : false; HApiProperty prop = new HApiPropertyImpl(propName, propDescription, propType, propMultiple); properties.put(prop.getName(), prop); } return new HApiTypeImpl(name, description, path, fqdn, parameters, properties, parent, false); } /** * {@inheritDoc} */ public MicrodataAttributeHelper getHelper(ResourceResolver resolver, String type) throws RepositoryException { return new MicrodataAttributeHelperImpl(resolver, TypesCache.getInstance(this).getType(resolver, type)); } } /** * <p>Cache for types</p> * */ class TypesCache { private static final Logger LOG = LoggerFactory.getLogger(TypesCache.class); Map<String, HApiType> types; private static TypesCache singleton = null; private HApiUtil hApiUtil; public static TypesCache getInstance(HApiUtil hApiUtil) { if (null == singleton) { singleton = new TypesCache(hApiUtil); } LOG.debug("singleton: {}", singleton); return singleton; } private TypesCache(HApiUtil hApiUtil) { this.types = new HashMap<String, HApiType>(); this.hApiUtil = hApiUtil; } public HApiType getType(ResourceResolver resolver, String typePath) throws RepositoryException { if (types.containsKey(typePath)) { return this.types.get(typePath); } else { HApiType type = hApiUtil.fromPath(resolver, typePath); types.put(type.getPath(), type); return type; } } public void addType(HApiType type) { this.types.put(type.getPath(), type); } }
package biz.aQute.resolve; import static aQute.bnd.osgi.resource.CapReqBuilder.createRequirementFromCapability; import static aQute.bnd.osgi.resource.ResourceUtils.createWildcardRequirement; import static aQute.bnd.osgi.resource.ResourceUtils.getIdentityCapability; import static java.util.Collections.singleton; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.osgi.resource.Capability; import org.osgi.resource.Requirement; import org.osgi.resource.Resource; import org.osgi.resource.Wire; import org.osgi.service.repository.Repository; import org.osgi.service.resolver.ResolutionException; import org.osgi.service.resolver.Resolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import aQute.bnd.http.HttpClient; import aQute.bnd.osgi.Processor; import aQute.bnd.osgi.repository.ResourcesRepository; import aQute.bnd.osgi.repository.XMLResourceParser; import aQute.bnd.osgi.resource.ResolutionDirective; import aQute.bnd.osgi.resource.ResourceBuilder; import aQute.bnd.osgi.resource.ResourceUtils; import aQute.bnd.osgi.resource.ResourceUtils.IdentityCapability; import aQute.bnd.repository.osgi.OSGiRepository; import aQute.lib.strings.Strings; public class ResolverValidator extends Processor { private final static Logger logger = LoggerFactory.getLogger(ResolverValidator.class); LogReporter reporter = new LogReporter(this); Resolver resolver = new BndResolver(reporter); List<URI> repositories = new ArrayList<>(); Resource system = null; public static class Resolution { public Resource resource; public boolean succeeded; public String message; public Set<Resource> resolved = new LinkedHashSet<>(); public List<Requirement> system = new ArrayList<>(); public List<Requirement> repos = new ArrayList<>(); public List<Requirement> missing = new ArrayList<>(); public List<Requirement> optionals = new ArrayList<>(); public List<Requirement> unresolved = new ArrayList<>(); } public ResolverValidator(Processor parent) throws Exception { super(parent); } public ResolverValidator() {} public void addRepository(URI url) throws Exception { repositories.add(url); } public void setSystem(Resource resource) throws Exception { assert resource != null; this.system = resource; } public List<Resolution> validate() throws Exception { try (OSGiRepository repository = new OSGiRepository(); HttpClient httpClient = new HttpClient()) { Map<String,String> map = new HashMap<>(); map.put("locations", Strings.join(repositories)); map.put("name", "ResolverValidator"); repository.setProperties(map); Processor registry = new Processor(); registry.addBasicPlugin(httpClient); repository.setRegistry(registry); repository.setReporter(this); Set<Resource> resources = getAllResources(repository); return validateResources(repository, resources); } } public List<Resolution> validate(Collection<Resource> toBeChecked) throws Exception { Set<Resource> allResources = new LinkedHashSet<>(); for (URI uri : repositories) { allResources.addAll(XMLResourceParser.getResources(uri)); } allResources.addAll(toBeChecked); ResourcesRepository repository = new ResourcesRepository(allResources); return validateResources(repository, toBeChecked); } public List<Resolution> validateResources(Repository repository, Collection<Resource> resources) throws Exception { setProperty("-runfw", "dummy"); List<Resolution> result = new ArrayList<>(); List<Resource> resourceList = new ArrayList<>(resources); while (!resourceList.isEmpty()) { Resource resource = resourceList.remove(0); Resolution resolution = resolve(repository, resource); result.add(resolution); for (Resource resolved : resolution.resolved) { if (resourceList.remove(resolved)) { Resolution curResolution = new Resolution(); curResolution.resource = resolved; curResolution.succeeded = true; result.add(curResolution); } } } return result; } public static Set<Resource> getAllResources(Repository repository) { Requirement r = createWildcardRequirement(); Map<Requirement,Collection<Capability>> providers = repository.findProviders(Collections.singleton(r)); Set<Resource> resources = ResourceUtils.getResources(providers.get(r)); return resources; } private BndrunResolveContext getResolveContext() throws Exception { BndrunResolveContext context = new BndrunResolveContext(this, null, this, reporter) { @Override void loadFramework(ResourceBuilder systemBuilder) throws Exception { systemBuilder.addCapabilities(system.getCapabilities(null)); } }; return context; } public Requirement getIdentity(Resource resource) { IdentityCapability identityCapability = getIdentityCapability(resource); return createRequirementFromCapability(identityCapability).buildSyntheticRequirement(); } public Resolution resolve(Repository repository, Resource resource) throws Exception { Resolution resolution = new Resolution(); Requirement identity = getIdentity(resource); setProperty("-runrequires", ResourceUtils.toRequireCapability(identity)); BndrunResolveContext context = getResolveContext(); context.addRepository(repository); context.init(); resolution.resource = resource; try { Map<Resource,List<Wire>> resolve2 = resolver.resolve(context); resolution.succeeded = true; resolution.resolved = resolve2.keySet(); logger.debug("resolving {} succeeded", resource); } catch (ResolutionException e) { logger.debug("resolving {} failed", resource); resolution.succeeded = false; resolution.message = e.getMessage(); for (Requirement req : e.getUnresolvedRequirements()) { logger.debug(" missing {}", req); resolution.unresolved.add(req); } ResourcesRepository systemRepository = new ResourcesRepository(system); for (Requirement r : resource.getRequirements(null)) { Collection<Capability> caps = systemRepository.findProvider(r); boolean missing = caps.isEmpty(); if (missing) { Set<Requirement> requirements = singleton(r); caps = repository.findProviders(requirements).get(r); missing = caps.isEmpty(); if (missing) { if (ResourceUtils.getResolution(r) == ResolutionDirective.optional) resolution.optionals.add(r); else resolution.missing.add(r); } else { logger.debug(" found {} in repo", r); resolution.repos.add(r); } } else { logger.debug(" found {} in system", r); resolution.system.add(r); } } error("resolving %s failed with %s", resource, resolution.message); } catch (Exception e) { e.printStackTrace(); error("resolving %s failed with %s", context.getInputResource().getRequirements(null), e); resolution.message = e.getMessage(); } return resolution; } }
package net.i2p.router.message; /* * free (adj.): unencumbered; not under the control of others * Written by jrandom in 2003 and released into the public domain * with no warranty of any kind, either expressed or implied. * It probably won't make your computer catch on fire, or eat * your children, but it might. Use at your own risk. * */ import java.util.HashSet; import java.util.Set; import net.i2p.data.Certificate; import net.i2p.data.Hash; import net.i2p.data.PublicKey; import net.i2p.data.router.RouterInfo; import net.i2p.data.SessionKey; import net.i2p.data.SessionTag; import net.i2p.data.i2np.DeliveryInstructions; import net.i2p.data.i2np.DeliveryStatusMessage; import net.i2p.data.i2np.I2NPMessage; import net.i2p.router.Job; import net.i2p.router.JobImpl; import net.i2p.router.MessageSelector; import net.i2p.router.ReplyJob; import net.i2p.router.Router; import net.i2p.router.RouterContext; import net.i2p.router.peermanager.PeerProfile; import net.i2p.util.Log; /** * Build a test message that will be sent to the target to make sure they're alive. * Once that is verified, onSendJob is enqueued. If their reachability isn't * known (or they're unreachable) within timeoutMs, onSendFailedJob is enqueued. * The test message is sent at the specified priority. * */ public class BuildTestMessageJob extends JobImpl { private Log _log; private RouterInfo _target; private Hash _replyTo; private Job _onSend; private Job _onSendFailed; private long _timeoutMs; private int _priority; private long _testMessageKey; /** * * @param target router being tested * @param onSendJob after the ping is successful * @param onSendFailedJob after the ping fails or times out * @param timeoutMs how long to wait before timing out * @param priority how high priority to send this test */ public BuildTestMessageJob(RouterContext ctx, RouterInfo target, Hash replyTo, Job onSendJob, Job onSendFailedJob, long timeoutMs, int priority) { super(ctx); _log = ctx.logManager().getLog(BuildTestMessageJob.class); _target = target; _replyTo = replyTo; _onSend = onSendJob; _onSendFailed = onSendFailedJob; _timeoutMs = timeoutMs; _priority = priority; _testMessageKey = -1; } public String getName() { return "Build Test Message"; } public void runJob() { if (alreadyKnownReachable()) { getContext().jobQueue().addJob(_onSend); return; } // This is a test message - build a garlic with a DeliveryStatusMessage that // first goes to the peer then back to us. if (_log.shouldLog(Log.DEBUG)) _log.debug("Building garlic message to test " + _target.getIdentity().getHash().toBase64()); GarlicConfig config = buildGarlicCloveConfig(); // TODO: make the last params on this specify the correct sessionKey and tags used ReplyJob replyJob = new JobReplyJob(getContext(), _onSend, config.getRecipient().getIdentity().getPublicKey(), config.getId(), null, new HashSet<SessionTag>()); MessageSelector sel = buildMessageSelector(); SendGarlicJob job = new SendGarlicJob(getContext(), config, null, _onSendFailed, replyJob, _onSendFailed, _timeoutMs, _priority, sel); getContext().jobQueue().addJob(job); } private boolean alreadyKnownReachable() { PeerProfile profile = getContext().profileOrganizer().getProfile(_target.getIdentity().getHash()); if ( (profile == null) || (!profile.getIsActive()) ) return false; else return true; } private MessageSelector buildMessageSelector() { return new TestMessageSelector(_testMessageKey, _timeoutMs + getContext().clock().now()); } private GarlicConfig buildGarlicCloveConfig() { _testMessageKey = getContext().random().nextLong(I2NPMessage.MAX_ID_VALUE); if (_log.shouldLog(Log.INFO)) _log.info("Test message key: " + _testMessageKey); GarlicConfig config = new GarlicConfig(); PayloadGarlicConfig ackClove = buildAckClove(); config.addClove(ackClove); DeliveryInstructions instructions = new DeliveryInstructions(); instructions.setDeliveryMode(DeliveryInstructions.DELIVERY_MODE_ROUTER); instructions.setDelayRequested(false); instructions.setDelaySeconds(0); instructions.setEncrypted(false); instructions.setEncryptionKey(null); instructions.setRouter(_target.getIdentity().getHash()); instructions.setTunnelId(null); config.setCertificate(new Certificate(Certificate.CERTIFICATE_TYPE_NULL, null)); config.setDeliveryInstructions(instructions); config.setId(getContext().random().nextLong(I2NPMessage.MAX_ID_VALUE)); config.setExpiration(_timeoutMs+getContext().clock().now()+2*Router.CLOCK_FUDGE_FACTOR); config.setRecipient(_target); return config; } /** * Build a clove that sends a DeliveryStatusMessage to us */ private PayloadGarlicConfig buildAckClove() { PayloadGarlicConfig ackClove = new PayloadGarlicConfig(); DeliveryInstructions ackInstructions = new DeliveryInstructions(); ackInstructions.setDeliveryMode(DeliveryInstructions.DELIVERY_MODE_ROUTER); ackInstructions.setRouter(_replyTo); // yikes! ackInstructions.setDelayRequested(false); ackInstructions.setDelaySeconds(0); ackInstructions.setEncrypted(false); DeliveryStatusMessage msg = new DeliveryStatusMessage(getContext()); msg.setArrival(getContext().clock().now()); msg.setMessageId(_testMessageKey); if (_log.shouldLog(Log.DEBUG)) _log.debug("Delivery status message key: " + _testMessageKey + " arrival: " + msg.getArrival()); ackClove.setCertificate(new Certificate(Certificate.CERTIFICATE_TYPE_NULL, null)); ackClove.setDeliveryInstructions(ackInstructions); ackClove.setExpiration(_timeoutMs+getContext().clock().now()); ackClove.setId(getContext().random().nextLong(I2NPMessage.MAX_ID_VALUE)); ackClove.setPayload(msg); ackClove.setRecipient(_target); return ackClove; } /** * Search inbound messages for delivery status messages with our key */ private final static class TestMessageSelector implements MessageSelector { private long _testMessageKey; private long _timeout; public TestMessageSelector(long key, long timeout) { _testMessageKey = key; _timeout = timeout; } public boolean continueMatching() { return false; } public long getExpiration() { return _timeout; } public boolean isMatch(I2NPMessage inMsg) { if (inMsg.getType() == DeliveryStatusMessage.MESSAGE_TYPE) { return ((DeliveryStatusMessage)inMsg).getMessageId() == _testMessageKey; } else { return false; } } } /** * On reply, fire off the specified job * */ private static final class JobReplyJob extends JobImpl implements ReplyJob { private Job _job; private PublicKey _target; private long _msgId; private Set<SessionTag> _sessionTagsDelivered; private SessionKey _keyDelivered; public JobReplyJob(RouterContext ctx, Job job, PublicKey target, long msgId, SessionKey keyUsed, Set<SessionTag> tagsDelivered) { super(ctx); _job = job; _target = target; _msgId = msgId; _keyDelivered = keyUsed; _sessionTagsDelivered = tagsDelivered; } public String getName() { return "Reply To Test Message Received"; } public void runJob() { if ( (_keyDelivered != null) && (_sessionTagsDelivered != null) && (_sessionTagsDelivered.size() > 0) ) getContext().sessionKeyManager().tagsDelivered(_target, _keyDelivered, _sessionTagsDelivered); getContext().jobQueue().addJob(_job); } public void setMessage(I2NPMessage message) { // ignored, this is just a ping } } }
package de.katzenpapst.amunra.mob.entity; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import micdoodle8.mods.galacticraft.api.entity.IAntiGrav; import micdoodle8.mods.galacticraft.api.vector.Vector3; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityFlying; import net.minecraft.entity.monster.IMob; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.DamageSource; import net.minecraft.util.MathHelper; import net.minecraft.world.EnumDifficulty; import net.minecraft.world.World; public abstract class EntityFlyingMob extends EntityFlying implements IMob, IAntiGrav { public int courseChangeCooldown; public double waypointX; public double waypointY; public double waypointZ; private Entity targetedEntity; /** Cooldown time between target loss and new target aquirement. */ private int aggroCooldown; public int prevAttackCounter; public int attackCounter; /** The explosion radius of spawned fireballs. */ protected int explosionStrength = 1; protected static final float distanceToKeep = 10.0F; public EntityFlyingMob(World world) { super(world); } @SideOnly(Side.CLIENT) public boolean useShootingTexture() { // copied over from the ghast. WTF is this? // oh, this seems to be where the renderer decides which texture to use // see net.minecraft.client.renderer.entity.RenderGhast.getEntityTexture(EntityGhast) return this.dataWatcher.getWatchableObjectByte(16) != 0; } abstract protected float getVisionDistance(); /** * Called when the entity is attacked. */ @Override public boolean attackEntityFrom(DamageSource dmgSrc, float amount) { if (this.isEntityInvulnerable()) { return false; } else { return super.attackEntityFrom(dmgSrc, amount); } } @Override protected void entityInit() { super.entityInit(); this.dataWatcher.addObject(16, Byte.valueOf((byte)0)); } abstract protected void performAttack(Entity target, double accelX, double accelY, double accelZ); protected void findWaypoint() { if(this.targetedEntity != null) { // attempt to move closer to the target Vector3 targetVec = new Vector3(targetedEntity); Vector3 myPos = new Vector3(this); Vector3 thisToTarget = myPos.difference(targetVec); // I don't get around sqrt'ing here double distance = thisToTarget.getMagnitude(); thisToTarget.scale(distanceToKeep / distance); // scale the vector to distanceToKeep myPos = targetVec.translate(thisToTarget); // this should be correct now... if(this.isCourseTraversable(myPos.x, myPos.y, myPos.z, distance)) { this.waypointX = myPos.x; this.waypointY = myPos.y; this.waypointZ = myPos.z; return; } } // otherwise, get a random point this.waypointX = this.posX + (double)((this.rand.nextFloat() * 2.0F - 1.0F) * 16.0F); this.waypointY = this.posY + (double)((this.rand.nextFloat() * 2.0F - 1.0F) * 16.0F); this.waypointZ = this.posZ + (double)((this.rand.nextFloat() * 2.0F - 1.0F) * 16.0F); } @Override protected void updateEntityActionState() { if (!this.worldObj.isRemote && this.worldObj.difficultySetting == EnumDifficulty.PEACEFUL) { this.setDead(); } this.despawnEntity(); this.prevAttackCounter = this.attackCounter; double deltaX = this.waypointX - this.posX; double deltaY = this.waypointY - this.posY; double deltaZ = this.waypointZ - this.posZ; double distanceSq = deltaX * deltaX + deltaY * deltaY + deltaZ * deltaZ; if (this.targetedEntity != null && this.targetedEntity.isDead) { this.targetedEntity = null; } if (distanceSq < 1.0D || distanceSq > 3600.0D) { // find next waypoint? findWaypoint(); } if (this.courseChangeCooldown-- <= 0) { this.courseChangeCooldown += this.rand.nextInt(5) + 2; distanceSq = (double)MathHelper.sqrt_double(distanceSq); if (this.isCourseTraversable(this.waypointX, this.waypointY, this.waypointZ, distanceSq)) { this.motionX += deltaX / distanceSq * 0.1D; this.motionY += deltaY / distanceSq * 0.1D; this.motionZ += deltaZ / distanceSq * 0.1D; } else { this.waypointX = this.posX; this.waypointY = this.posY; this.waypointZ = this.posZ; } } if (this.targetedEntity == null || this.aggroCooldown-- <= 0) { // target locked? this.targetedEntity = this.worldObj.getClosestVulnerablePlayerToEntity(this, getVisionDistance()); if (this.targetedEntity != null) { this.aggroCooldown = 20; } } double maxTargetDistance = 64.0D; if (this.targetedEntity != null && this.targetedEntity.getDistanceSqToEntity(this) < maxTargetDistance * maxTargetDistance) { this.faceEntity(this.targetedEntity, 10.0F, (float)this.getVerticalFaceSpeed()); double accelX = this.targetedEntity.posX - this.posX; double accelY = this.targetedEntity.boundingBox.minY + (double)(this.targetedEntity.height / 2.0F) - (this.posY + (double)(this.height / 2.0F)); double accelZ = this.targetedEntity.posZ - this.posZ; this.renderYawOffset = this.rotationYaw = -((float)Math.atan2(accelX, accelZ)) * 180.0F / (float)Math.PI; if (this.canEntityBeSeen(this.targetedEntity)) { if (this.attackCounter == 10) { // WTF? // 1007 might be some sort of an ID // playSoundAtEntity(entity, GalacticraftCore.TEXTURE_PREFIX + "ambience.scaryscape", 9.0F, 1.4F) // this.worldObj.playSoundAtEntity(this, AmunRa.TEXTUREPREFIX + "mob.sentryblock.fire", 1.0F, 1.0F); // this.worldObj.playAuxSFXAtEntity((EntityPlayer)null, 1007, (int)this.posX, (int)this.posY, (int)this.posZ, 0); // charging? } ++this.attackCounter; if (this.attackCounter == 20) { // another one. WTF is this? //this.worldObj.playSoundAtEntity(this, AmunRa.TEXTUREPREFIX + "mob.sentryblock.fire", getSoundVolume(), 1.0F); this.worldObj.playSoundAtEntity(this, getFiringSound(), getSoundVolume(), 1.0F); // this.worldObj.playAuxSFXAtEntity((EntityPlayer)null, 1008, (int)this.posX, (int)this.posY, (int)this.posZ, 0); // this seems to be an actual attack performAttack(targetedEntity, accelX, accelY, accelZ); // actual attack end this.attackCounter = -40; } } else if (this.attackCounter > 0) { --this.attackCounter; } } else { this.rotationPitch = this.defaultPitch; this.renderYawOffset = this.rotationYaw = -((float)Math.atan2(this.motionX, this.motionZ)) * 180.0F / (float)Math.PI; if (this.attackCounter > 0) { --this.attackCounter; } } if (!this.worldObj.isRemote) { byte b1 = this.dataWatcher.getWatchableObjectByte(16); byte b0 = (byte)(this.attackCounter > 10 ? 1 : 0); if (b1 != b0) { this.dataWatcher.updateObject(16, Byte.valueOf(b0)); } } } /** * True if the ghast has an unobstructed line of travel to the waypoint. */ protected boolean isCourseTraversable(double p_70790_1_, double p_70790_3_, double p_70790_5_, double distance) { double relDeltaX = (this.waypointX - this.posX) / distance; double relDeltaY = (this.waypointY - this.posY) / distance; double relDeltaZ = (this.waypointZ - this.posZ) / distance; AxisAlignedBB axisalignedbb = this.boundingBox.copy(); for (int i = 1; (double)i < distance; ++i) { axisalignedbb.offset(relDeltaX, relDeltaY, relDeltaZ); if (!this.worldObj.getCollidingBoundingBoxes(this, axisalignedbb).isEmpty()) { return false; } } return true; } /** * Returns the volume for the sounds this mob makes. */ @Override protected float getSoundVolume() { return 1.0F; } /** * Checks if the entity's current position is a valid location to spawn this entity. */ @Override public boolean getCanSpawnHere() { return this.rand.nextInt(20) == 0 && super.getCanSpawnHere() && this.worldObj.difficultySetting != EnumDifficulty.PEACEFUL; } /** * Will return how many at most can spawn in a chunk at once. */ @Override public int getMaxSpawnedInChunk() { return 1; } abstract public String getFiringSound(); /** * (abstract) Protected helper method to write subclass entity data to NBT. */ @Override public void writeEntityToNBT(NBTTagCompound nbt) { super.writeEntityToNBT(nbt); nbt.setInteger("ExplosionPower", this.explosionStrength); } /** * (abstract) Protected helper method to read subclass entity data from NBT. */ @Override public void readEntityFromNBT(NBTTagCompound nbt) { super.readEntityFromNBT(nbt); if (nbt.hasKey("ExplosionPower", 99)) { this.explosionStrength = nbt.getInteger("ExplosionPower"); } } }
package org.motechproject.outbox.api; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.runners.MockitoJUnitRunner; import org.motechproject.event.MotechEvent; import org.motechproject.event.listener.EventRelay; import org.motechproject.outbox.api.contract.SortKey; import org.motechproject.outbox.api.domain.OutboundVoiceMessage; import org.motechproject.outbox.api.domain.OutboundVoiceMessageStatus; import org.motechproject.outbox.api.repository.AllOutboundVoiceMessages; import org.motechproject.outbox.api.service.impl.VoiceOutboxServiceImpl; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.List; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNull; import static junit.framework.Assert.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class VoiceOutboxServiceTest { private static final int MAX_MESSAGES_PENDING = 15; @InjectMocks VoiceOutboxServiceImpl voiceOutboxService = new VoiceOutboxServiceImpl(); @Mock AllOutboundVoiceMessages allOutboundVoiceMessages; @Mock EventRelay eventRelay; @Before public void initMocks() { MockitoAnnotations.initMocks(this); voiceOutboxService.setNumDaysKeepSavedMessages(10); voiceOutboxService.setMaxNumberOfPendingMessages(MAX_MESSAGES_PENDING); } @Test public void testAddMessage() { String externalId = "pid"; OutboundVoiceMessage outboundVoiceMessage = new OutboundVoiceMessage(); outboundVoiceMessage.setExternalId(externalId); voiceOutboxService.addMessage(outboundVoiceMessage); when(allOutboundVoiceMessages.get(null)).thenThrow(new NullPointerException("Argument cannot be null")); verify(allOutboundVoiceMessages).add(outboundVoiceMessage); } @Test(expected = IllegalArgumentException.class) public void testAddMessageNullMessage() { voiceOutboxService.addMessage(null); verify(allOutboundVoiceMessages, never()).add(null); } @Test public void testGetNextMessageWithCreationTimeSortKey() { //given String externalId = "pid"; OutboundVoiceMessage outboundVoiceMessage1 = new OutboundVoiceMessage(); OutboundVoiceMessage outboundVoiceMessage2 = new OutboundVoiceMessage(); List<OutboundVoiceMessage> pendingVoiceMessages = new ArrayList<OutboundVoiceMessage>(); pendingVoiceMessages.add(outboundVoiceMessage1); pendingVoiceMessages.add(outboundVoiceMessage2); when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime)).thenReturn(pendingVoiceMessages); //when OutboundVoiceMessage nextMessage = voiceOutboxService.getNextMessage(externalId, OutboundVoiceMessageStatus.PENDING); //then verify(allOutboundVoiceMessages, times(1)).getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); assertEquals(outboundVoiceMessage1, nextMessage); } @Test public void testGetNextMessageWithDSequenceNumberSortKey() { //given String externalId = "pid"; OutboundVoiceMessage outboundVoiceMessage1 = new OutboundVoiceMessage(); OutboundVoiceMessage outboundVoiceMessage2 = new OutboundVoiceMessage(); List<OutboundVoiceMessage> pendingVoiceMessages = new ArrayList<OutboundVoiceMessage>(); pendingVoiceMessages.add(outboundVoiceMessage1); pendingVoiceMessages.add(outboundVoiceMessage2); when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.SequenceNumber)).thenReturn(pendingVoiceMessages); //when OutboundVoiceMessage nextMessage = voiceOutboxService.getNextMessage(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.SequenceNumber); //then verify(allOutboundVoiceMessages, times(1)).getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.SequenceNumber); assertEquals(outboundVoiceMessage1, nextMessage); } @Test(expected = IllegalArgumentException.class) public void testGetNextMessageNullExternalId() { voiceOutboxService.getNextMessage(null, OutboundVoiceMessageStatus.PENDING); verify(allOutboundVoiceMessages, never()).getMessages(null, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void testGetNextMessageNullStatus() { voiceOutboxService.getNextMessage("extid", null); verify(allOutboundVoiceMessages, never()).getMessages("extid", null, SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void testGetNextMessageNullSortKey() { voiceOutboxService.getNextMessage("extid", OutboundVoiceMessageStatus.PENDING, null); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), Matchers.<OutboundVoiceMessageStatus>any(), Matchers.<SortKey>any()); } @Test(expected = IllegalArgumentException.class) public void testGetNextMessageEmptyExternalId() { voiceOutboxService.getNextMessage("", OutboundVoiceMessageStatus.PENDING); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test public void testGetNextMessageNoMessages() { String externalId = "pid"; when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime)).thenReturn(new ArrayList<OutboundVoiceMessage>()); assertNull(voiceOutboxService.getNextMessage(externalId, OutboundVoiceMessageStatus.PENDING)); } @Test public void testGetMessageById() { String messageId = "msgId"; OutboundVoiceMessage message = new OutboundVoiceMessage(); when(allOutboundVoiceMessages.get(messageId)).thenReturn(message); assertEquals(message, voiceOutboxService.getMessageById(messageId)); } @Test(expected = IllegalArgumentException.class) public void testGetMessageByIdNullId() { voiceOutboxService.getMessageById(null); verify(allOutboundVoiceMessages, never()).get(Matchers.<String>anyObject()); } @Test(expected = IllegalArgumentException.class) public void testGetMessageByIdEmptyId() { voiceOutboxService.getMessageById(""); verify(allOutboundVoiceMessages, never()).get(Matchers.<String>anyObject()); } @Test public void testRemoveMessage() { String messageId = "msgId"; OutboundVoiceMessage message = new OutboundVoiceMessage(); when(allOutboundVoiceMessages.get(messageId)).thenReturn(message); voiceOutboxService.removeMessage(messageId); verify(allOutboundVoiceMessages, times(1)).safeRemove(message); } @Test(expected = IllegalArgumentException.class) public void testRemoveMessageNullMessageId() { voiceOutboxService.removeMessage(null); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void testRemoveMessageEmptyMessageId() { voiceOutboxService.removeMessage(""); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test public void testRemoveMessageMessageNotExist() { //TODO - implement } @Test public void testSetMessageStatus() { String messageId = "msgId"; when(allOutboundVoiceMessages.get(messageId)).thenReturn(new OutboundVoiceMessage()); voiceOutboxService.setMessageStatus(messageId, OutboundVoiceMessageStatus.PLAYED); verify(allOutboundVoiceMessages).update(Matchers.<OutboundVoiceMessage>anyObject()); } @Test(expected = IllegalArgumentException.class) public void testSetMessageStatusNullMessageId() { voiceOutboxService.setMessageStatus(null, OutboundVoiceMessageStatus.PLAYED); verify(allOutboundVoiceMessages, never()).update(Matchers.<OutboundVoiceMessage>anyObject()); } @Test(expected = IllegalArgumentException.class) public void testSetMessageStatusEmptyMessageId() { voiceOutboxService.setMessageStatus("", OutboundVoiceMessageStatus.PLAYED); verify(allOutboundVoiceMessages, never()).update(Matchers.<OutboundVoiceMessage>anyObject()); } @Test public void testGetNumberOfMessages() { String externalId = "pid"; when(allOutboundVoiceMessages.getMessagesCount(externalId, OutboundVoiceMessageStatus.PENDING)).thenReturn(2); assertEquals(2, voiceOutboxService.getNumberOfMessages(externalId, OutboundVoiceMessageStatus.PENDING)); } @Test public void testGetNumberPendingMessagesNoMessages() { String externalId = "pid"; when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime)).thenReturn(new ArrayList<OutboundVoiceMessage>()); assertEquals(0, voiceOutboxService.getNumberOfMessages(externalId, OutboundVoiceMessageStatus.PENDING)); } @Test(expected = IllegalArgumentException.class) public void testGetNumberPendingMessagesNullExternalId() { voiceOutboxService.getNumberOfMessages(null, OutboundVoiceMessageStatus.PENDING); verify(allOutboundVoiceMessages, never()).getMessages(null, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void testGetNumberPendingMessagesNullMessageStatus() { String externalId = "external id"; voiceOutboxService.getNumberOfMessages(externalId, null); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), Matchers.<OutboundVoiceMessageStatus>any(), SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void testGetNumberPendingMessagesEmptyExternalId() { voiceOutboxService.getNumberOfMessages("", OutboundVoiceMessageStatus.PENDING); verify(allOutboundVoiceMessages, never()).getMessages(anyString(), OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test(expected = IllegalArgumentException.class) public void externalIdShouldNotBeNull_getNumberOfMessages() { voiceOutboxService.getNumberOfMessages(null, OutboundVoiceMessageStatus.SAVED, ""); verify(allOutboundVoiceMessages, never()).getMessagesCount(anyString(), OutboundVoiceMessageStatus.SAVED, anyString()); } @Test(expected = IllegalArgumentException.class) public void statusShouldNotBeNull_getNumberOfMessages() { voiceOutboxService.getNumberOfMessages("ext_id", null, ""); verify(allOutboundVoiceMessages, never()).getMessagesCount(anyString(), Matchers.<OutboundVoiceMessageStatus>any(), anyString()); } @Test public void getNumberOfMessages() { voiceOutboxService.getNumberOfMessages("ext_id", OutboundVoiceMessageStatus.SAVED, "message_type"); verify(allOutboundVoiceMessages, times(1)).getMessagesCount("ext_id", OutboundVoiceMessageStatus.SAVED, "message_type"); } @Test public void testSaveMessage() { String messageId = "msgId"; OutboundVoiceMessage message = new OutboundVoiceMessage(); when(allOutboundVoiceMessages.get(messageId)).thenReturn(message); Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, voiceOutboxService.getNumDaysKeepSavedMessages()); voiceOutboxService.saveMessage(messageId); verify(allOutboundVoiceMessages).update(message); assertEquals(OutboundVoiceMessageStatus.SAVED, message.getStatus()); System.out.println(calendar.getTime().getTime() - message.getExpirationDate().getTime()); assertTrue(message.getExpirationDate().getTime() - calendar.getTime().getTime() < 1000); } @Test(expected = IllegalArgumentException.class) public void testSaveMessageNoMessageId() { voiceOutboxService.saveMessage(null); verify(allOutboundVoiceMessages, never()).update(Matchers.<OutboundVoiceMessage>any()); } @Test public void testMaxMessagesReached() { String externalId = "001"; OutboundVoiceMessage outboundVoiceMessage = new OutboundVoiceMessage(); outboundVoiceMessage.setExternalId(externalId); when(allOutboundVoiceMessages.getMessagesCount(externalId, OutboundVoiceMessageStatus.PENDING)).thenReturn(MAX_MESSAGES_PENDING); voiceOutboxService.addMessage(outboundVoiceMessage); verify(allOutboundVoiceMessages).add(outboundVoiceMessage); ArgumentCaptor<MotechEvent> argument = ArgumentCaptor.forClass(MotechEvent.class); verify(eventRelay).sendEventMessage(argument.capture()); assertEquals(argument.getValue().getSubject(), EventKeys.OUTBOX_MAX_PENDING_MESSAGES_EVENT_SUBJECT); assertEquals(EventKeys.getExternalID(argument.getValue()), externalId); } @Test public void testMaxPendingMessagesMoreAndLess() { String externalId = "001"; OutboundVoiceMessage outboundVoiceMessage = new OutboundVoiceMessage(); outboundVoiceMessage.setExternalId(externalId); // LESS when(allOutboundVoiceMessages.getMessagesCount(externalId, OutboundVoiceMessageStatus.PENDING)).thenReturn(MAX_MESSAGES_PENDING - 1); voiceOutboxService.addMessage(outboundVoiceMessage); // MORE when(allOutboundVoiceMessages.getMessagesCount(externalId, OutboundVoiceMessageStatus.PENDING)).thenReturn(MAX_MESSAGES_PENDING + 1); voiceOutboxService.addMessage(outboundVoiceMessage); verify(allOutboundVoiceMessages, times(2)).add(outboundVoiceMessage); verify(eventRelay, never()).sendEventMessage(any(MotechEvent.class)); } @Test public void getTheFirstMessage() { OutboundVoiceMessage outboundVoiceMessage = new OutboundVoiceMessage(); String externalId = "123"; when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime)).thenReturn(Arrays.asList(outboundVoiceMessage)); OutboundVoiceMessage nextMessage = voiceOutboxService.nextMessage(null, externalId); assertEquals(outboundVoiceMessage, nextMessage); } @Test public void markMessageAsRead() { String externalId = "123"; OutboundVoiceMessage currentMessage = mock(OutboundVoiceMessage.class); String currentMessageId = "1"; OutboundVoiceMessage outboundVoiceMessage = new OutboundVoiceMessage(); outboundVoiceMessage.setId("2"); when(allOutboundVoiceMessages.get(currentMessageId)).thenReturn(currentMessage); when(allOutboundVoiceMessages.getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime)).thenReturn(Arrays.asList(outboundVoiceMessage)); OutboundVoiceMessage nextMessage = voiceOutboxService.nextMessage(currentMessageId, externalId); InOrder inOrder = inOrder(currentMessage, allOutboundVoiceMessages); assertEquals(outboundVoiceMessage, nextMessage); inOrder.verify(currentMessage).setStatus(OutboundVoiceMessageStatus.PLAYED); inOrder.verify(allOutboundVoiceMessages).update(currentMessage); inOrder.verify(allOutboundVoiceMessages).getMessages(externalId, OutboundVoiceMessageStatus.PENDING, SortKey.CreationTime); } @Test public void getAllMessagesByExternalIdAndStatusSortedBygivenSortKey() { String externalID = "ext_id"; ArrayList<OutboundVoiceMessage> expectedResult = new ArrayList<OutboundVoiceMessage>(); OutboundVoiceMessageStatus status = OutboundVoiceMessageStatus.PENDING; SortKey sequenceNumber = SortKey.SequenceNumber; when(allOutboundVoiceMessages.getMessages(externalID, status, sequenceNumber)).thenReturn(expectedResult); List<OutboundVoiceMessage> actualResult = voiceOutboxService.getMessages(externalID, status, sequenceNumber); assertEquals(expectedResult, actualResult); verify(allOutboundVoiceMessages, times(1)).getMessages(externalID, status, sequenceNumber); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.llama.am; import com.cloudera.llama.am.api.LlamaAMEvent; import com.cloudera.llama.am.api.PlacedReservation; import com.cloudera.llama.am.api.PlacedResource; import com.cloudera.llama.am.api.Reservation; import com.cloudera.llama.am.api.Resource; import com.cloudera.llama.am.api.TestUtils; import com.cloudera.llama.am.impl.LlamaAMEventImpl; import com.cloudera.llama.am.impl.PlacedReservationImpl; import com.cloudera.llama.am.impl.PlacedResourceImpl; import com.cloudera.llama.server.ClientInfo; import com.cloudera.llama.util.UUID; import com.cloudera.llama.util.VersionInfo; import junit.framework.Assert; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Test; import java.io.StringWriter; import java.util.ArrayList; import java.util.List; import java.util.Map; public class TestRestData { Map parseJson(String s) throws Exception { ObjectMapper mapper = new ObjectMapper(); return mapper.readValue(s, Map.class); } private PlacedReservationImpl createReservation(UUID id, UUID handle, String queue, PlacedReservation.Status status) { List<Resource> rs = new ArrayList<Resource>(); rs.add(TestUtils.createResource("h1", Resource.Locality.PREFERRED, 1, 1024)); rs.add(TestUtils.createResource("h2", Resource.Locality.PREFERRED, 1, 1024)); Reservation r = TestUtils.createReservation(handle, "u", queue, rs, true); PlacedReservationImpl pr = new PlacedReservationImpl(id, r); pr.setStatus(status); return pr; } private void assertReservationStatus(RestData restData, UUID id, PlacedReservation.Status status, boolean exists) throws Exception { try { StringWriter writer = new StringWriter(); restData.writeReservationAsJson(id, writer); writer.close(); Assert.assertTrue(exists); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); map = (Map) map.get(RestData.RESERVATION_DATA); Assert.assertEquals(status.toString(), map.get("status")); } catch (RestData.NotFoundException ex) { Assert.assertFalse(exists); } } private void assertReservationBackedOffFlag(RestData restData, UUID id, boolean backedOff) throws Exception { StringWriter writer = new StringWriter(); restData.writeReservationAsJson(id, writer); writer.close(); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); map = (Map) map.get(RestData.RESERVATION_DATA); Assert.assertEquals(backedOff, map.get("hasBeenBackedOff")); } private void assertResourceStatus(RestData restData, UUID id, int resourceIdx, PlacedResource.Status status) throws Exception { StringWriter writer = new StringWriter(); restData.writeReservationAsJson(id, writer); writer.close(); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); map = (Map) map.get(RestData.RESERVATION_DATA); map = (Map) ((List)map.get("resources")).get(resourceIdx); Assert.assertEquals(status.toString(), map.get("status")); } private void assertReservationQueue(RestData restData, UUID id, String queue, boolean exists) throws Exception { try { StringWriter writer = new StringWriter(); restData.writeQueueReservationsAsJson(queue, writer); writer.close(); Assert.assertTrue(exists); Map map = parseJson(writer.toString()); List reservations = (List)map.get(RestData.QUEUE_DATA); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); Assert.assertEquals(exists, !reservations.isEmpty()); if (exists) { Assert.assertEquals(id.toString(), ((Map) reservations.get(0)).get("reservationId")); } } catch (RestData.NotFoundException ex) { Assert.assertFalse(exists); } } private void assertHandle(RestData restData, UUID handle, boolean exists) throws Exception { try { StringWriter writer = new StringWriter(); restData.writeHandleReservationsAsJson(handle, writer); writer.close(); Assert.assertTrue(exists); } catch (RestData.NotFoundException ex) { Assert.assertFalse(exists); } } private void assertReservationHandle(RestData restData, UUID id, UUID handle, boolean exists) throws Exception { StringWriter writer = new StringWriter(); restData.writeHandleReservationsAsJson(handle, writer); writer.close(); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); List reservations = (List) ((Map) map.get(RestData.HANDLE_DATA)).get(RestData.RESERVATIONS); Assert.assertEquals(exists, !reservations.isEmpty()); if (exists) { Assert.assertEquals(id.toString(), ((Map)reservations.get(0)).get("reservationId")); } } private void assertReservationNode(RestData restData, UUID id, String node, boolean exists) throws Exception { try { StringWriter writer = new StringWriter(); restData.writeNodeResourcesAsJson(node, writer); writer.close(); Assert.assertTrue(exists); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); List reservations = (List)map.get(RestData.NODE_DATA); Assert.assertEquals(exists, !reservations.isEmpty()); if (exists) { Assert.assertEquals(id.toString(), ((Map) reservations.get(0)).get("reservationId")); } } catch (RestData.NotFoundException ex) { Assert.assertFalse(exists); } } private LlamaAMEvent createEvents(PlacedReservation pr) { LlamaAMEventImpl event = new LlamaAMEventImpl(); event.addReservation(pr); return event; } @Test public void testLifeCycleAndSelectors() throws Exception { RestData restData = new RestData(null); UUID id1 = UUID.randomUUID(); final UUID handle1 = UUID.randomUUID(); PlacedReservationImpl pr1 = createReservation(id1, handle1, "q1", PlacedReservation.Status.PENDING); // not there assertReservationStatus(restData, id1, PlacedReservation.Status.PENDING, false); assertReservationQueue(restData, id1, "q1", false); assertReservationNode(restData, id1, "h1", false); assertReservationNode(restData, id1, "h2", false); ClientInfo clientInfo = new ClientInfo() { @Override public UUID getClientId() { return UUID.randomUUID(); } @Override public UUID getHandle() { return handle1; } @Override public String getCallbackAddress() { return "a:0"; } }; assertHandle(restData, handle1, false); //register clientInfo restData.onRegister(clientInfo); assertHandle(restData, handle1, true); assertReservationHandle(restData, id1, handle1, false); // pending restData.onEvent(createEvents(pr1)); assertReservationStatus(restData, id1, PlacedReservation.Status.PENDING, true); assertReservationBackedOffFlag(restData, id1, false); assertResourceStatus(restData, id1, 0, PlacedResource.Status.PENDING); assertResourceStatus(restData, id1, 1, PlacedResource.Status.PENDING); assertReservationQueue(restData, id1, "q1", true); assertReservationHandle(restData, id1, handle1, true); assertReservationNode(restData, id1, "h1", true); assertReservationNode(restData, id1, "h2", true); // backed off pr1.setStatus(PlacedReservation.Status.BACKED_OFF); restData.onEvent(createEvents(pr1)); assertReservationStatus(restData, id1, PlacedReservation.Status.BACKED_OFF, true); assertReservationBackedOffFlag(restData, id1, true); assertResourceStatus(restData, id1, 0, PlacedResource.Status.PENDING); assertResourceStatus(restData, id1, 1, PlacedResource.Status.PENDING); assertReservationQueue(restData, id1, "q1", true); assertReservationHandle(restData, id1, handle1, true); assertReservationNode(restData, id1, "h1", true); assertReservationNode(restData, id1, "h2", true); // pending pr1.setStatus(PlacedReservation.Status.PENDING); restData.onEvent(createEvents(pr1));; assertReservationStatus(restData, id1, PlacedReservation.Status.PENDING, true); assertReservationBackedOffFlag(restData, id1, true); assertResourceStatus(restData, id1, 0, PlacedResource.Status.PENDING); assertResourceStatus(restData, id1, 1, PlacedResource.Status.PENDING); assertReservationQueue(restData, id1, "q1", true); assertReservationHandle(restData, id1, handle1, true); assertReservationNode(restData, id1, "h1", true); assertReservationNode(restData, id1, "h2", true); // partial pr1.setStatus(PlacedReservation.Status.PARTIAL); ((PlacedResourceImpl) pr1.getResources().get(0)).setAllocationInfo("h1", 2, 2024); restData.onEvent(createEvents(pr1));; assertReservationStatus(restData, id1, PlacedReservation.Status.PARTIAL, true); assertReservationBackedOffFlag(restData, id1, true); assertResourceStatus(restData, id1, 0, PlacedResource.Status.ALLOCATED); assertResourceStatus(restData, id1, 1, PlacedResource.Status.PENDING); assertReservationQueue(restData, id1, "q1", true); assertReservationHandle(restData, id1, handle1, true); assertReservationNode(restData, id1, "h1", true); assertReservationNode(restData, id1, "h2", true); // allocated pr1.setStatus(PlacedReservation.Status.ALLOCATED); ((PlacedResourceImpl) pr1.getResources().get(0)).setAllocationInfo("h1", 2, 2024); ((PlacedResourceImpl) pr1.getResources().get(1)).setAllocationInfo("h3", 3, 3036); restData.onEvent(createEvents(pr1));; assertReservationStatus(restData, id1, PlacedReservation.Status.ALLOCATED, true); assertReservationBackedOffFlag(restData, id1, true); assertResourceStatus(restData, id1, 0, PlacedResource.Status.ALLOCATED); assertResourceStatus(restData, id1, 1, PlacedResource.Status.ALLOCATED); assertReservationQueue(restData, id1, "q1", true); assertReservationHandle(restData, id1, handle1, true); assertReservationNode(restData, id1, "h1", true); assertReservationNode(restData, id1, "h3", true); // ended pr1.setStatus(PlacedReservation.Status.RELEASED); restData.onEvent(createEvents(pr1));; assertReservationStatus(restData, id1, null, false); assertReservationQueue(restData, id1, "q1", false); assertReservationHandle(restData, id1, handle1, false); assertReservationNode(restData, id1, "h1", false); assertReservationNode(restData, id1, "h3", false); // unregister handle restData.onUnregister(clientInfo); assertHandle(restData, handle1, false); } public class EmptyBean { public EmptyBean() {} }; @Test public void testEmptyBean() throws Exception { RestData restData = new RestData(null); StringWriter writer = new StringWriter(); EmptyBean obj = new EmptyBean(); restData.writeAsJson("Test", obj, writer); writer.close(); // Test passes if there is no exception thrown. } @Test public void testAll() throws Exception { RestData restData = new RestData(null); UUID id1 = UUID.randomUUID(); final UUID handle1 = UUID.randomUUID(); PlacedReservationImpl pr1 = createReservation(id1, handle1, "q1", PlacedReservation.Status.PENDING); ClientInfo clientInfo = new ClientInfo() { @Override public UUID getClientId() { return UUID.randomUUID(); } @Override public UUID getHandle() { return handle1; } @Override public String getCallbackAddress() { return "a:0"; } }; restData.onRegister(clientInfo); restData.onEvent(createEvents(pr1));; StringWriter writer = new StringWriter(); restData.writeAllAsJson(writer); writer.close(); Map map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); Assert.assertTrue(map.containsKey(RestData.ALL_DATA)); map = (Map) map.get(RestData.ALL_DATA); Assert.assertTrue(map.containsKey(RestData.VERSION_INFO_KEY)); assertVersionInfo((Map) map.get(RestData.VERSION_INFO_KEY)); Map map1 = (Map) map.get(RestData.RESERVATIONS); Assert.assertEquals(1, map1.size()); Assert.assertTrue(map1.containsKey(id1.toString())); List list = (List) map.get(RestData.CLIENT_INFOS); Assert.assertEquals(1, list.size()); Assert.assertEquals(handle1.toString(), ((Map)list.get(0)).get("handle")); map1 = (Map) map.get(RestData.QUEUES_CROSSREF); Assert.assertEquals(1, map1.size()); Assert.assertTrue(map1.containsKey("q1")); Assert.assertEquals(id1.toString(), ((List) map1.get("q1")).get(0)); map1 = (Map) map.get(RestData.HANDLES_CROSSREF); Assert.assertEquals(1, map1.size()); Assert.assertTrue(map1.containsKey(handle1.toString())); Assert.assertEquals(id1.toString(), ((List) map1.get(handle1.toString())).get(0)); map1 = (Map) map.get(RestData.NODES_CROSSREF); Assert.assertEquals(2, map1.size()); Assert.assertTrue(map1.containsKey("h1")); Assert.assertEquals(id1.toString(), ((List) map1.get("h1")).get(0)); Assert.assertTrue(map1.containsKey("h2")); Assert.assertEquals(id1.toString(), ((List) map1.get("h2")).get(0)); pr1.setStatus(PlacedReservation.Status.RELEASED); restData.onEvent(createEvents(pr1));; restData.onUnregister(clientInfo); } @Test public void testSummary() throws Exception { RestData restData = new RestData(null); StringWriter writer = new StringWriter(); restData.writeSummaryAsJson(writer); writer.close(); Map map = parseJson(writer.toString()); Assert.assertTrue(map.containsKey(RestData.SUMMARY_DATA)); map = (Map) map.get(RestData.SUMMARY_DATA); Assert.assertEquals(0, map.get(RestData.RESERVATIONS_COUNT_KEY)); Assert.assertEquals(0, ((List) map.get(RestData.QUEUES_SUMMARY_KEY)).size()); Assert.assertEquals(0, ((List) map.get(RestData.CLIENTS_SUMMARY_KEY)).size()); Assert.assertEquals(0, ((List) map.get(RestData.NODES_SUMMARY_KEY)).size()); UUID id1 = UUID.randomUUID(); final UUID handle1 = UUID.randomUUID(); PlacedReservationImpl pr1 = createReservation(id1, handle1, "q1", PlacedReservation.Status.PENDING); ClientInfo clientInfo = new ClientInfo() { @Override public UUID getClientId() { return UUID.randomUUID(); } @Override public UUID getHandle() { return handle1; } @Override public String getCallbackAddress() { return "a:0"; } }; restData.onRegister(clientInfo); restData.onEvent(createEvents(pr1));; writer = new StringWriter(); restData.writeSummaryAsJson(writer); writer.close(); map = parseJson(writer.toString()); Assert.assertEquals(2, map.size()); Assert.assertTrue(map.containsKey(RestData.REST_VERSION_KEY)); Assert.assertEquals(RestData.REST_VERSION_VALUE, map.get(RestData.REST_VERSION_KEY)); Assert.assertTrue(map.containsKey(RestData.SUMMARY_DATA)); map = (Map) map.get(RestData.SUMMARY_DATA); Assert.assertEquals(1, map.get(RestData.RESERVATIONS_COUNT_KEY)); Assert.assertEquals(1, ((List) map.get(RestData.QUEUES_SUMMARY_KEY)).size()); Assert.assertEquals("q1", ((Map) ((List) map.get(RestData.QUEUES_SUMMARY_KEY)).get(0)).get("queue")); Assert.assertEquals(1, ((List) map.get(RestData.CLIENTS_SUMMARY_KEY)).size()); Assert.assertEquals(handle1.toString(), ((Map) ((List) map.get(RestData.CLIENTS_SUMMARY_KEY)).get(0)).get("handle")); Assert.assertEquals(2, ((List) map.get(RestData.NODES_SUMMARY_KEY)).size()); Assert.assertEquals("h1", ((Map) ((List) map.get(RestData.NODES_SUMMARY_KEY)).get(0)).get("node")); Assert.assertEquals("h2", ((Map) ((List) map.get(RestData.NODES_SUMMARY_KEY)).get(1)).get("node")); pr1.setStatus(PlacedReservation.Status.RELEASED); restData.onEvent(createEvents(pr1));; restData.onUnregister(clientInfo); writer = new StringWriter(); restData.writeSummaryAsJson(writer); writer.close(); map = parseJson(writer.toString()); Assert.assertTrue(map.containsKey(RestData.SUMMARY_DATA)); map = (Map) map.get(RestData.SUMMARY_DATA); Assert.assertEquals(0, map.get(RestData.RESERVATIONS_COUNT_KEY)); Assert.assertEquals(0, ((List) map.get(RestData.QUEUES_SUMMARY_KEY)).size()); Assert.assertEquals(0, ((List) map.get(RestData.CLIENTS_SUMMARY_KEY)).size()); Assert.assertEquals(0, ((List) map.get(RestData.NODES_SUMMARY_KEY)).size()); } private void assertVersionInfo(Map map) { Assert.assertEquals(VersionInfo.getVersion(), map.get("llamaVersion")); Assert.assertEquals(VersionInfo.getBuiltDate(), map.get("llamaBuiltDate")); Assert.assertEquals(VersionInfo.getBuiltBy(), map.get("llamaBuiltBy")); Assert.assertEquals(VersionInfo.getSCMURI(), map.get("llamaScmUri")); Assert.assertEquals(VersionInfo.getSCMRevision(), map.get("llamaScmRevision")); Assert.assertEquals(VersionInfo.getSourceMD5(), map.get("llamaSourceMD5")); Assert.assertEquals(VersionInfo.getHadoopVersion(), map.get("llamaHadoopVersion")); } }
/* * Copyright (c) 2012-2016 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ package org.antlr.v4.test.tool; import org.antlr.v4.runtime.atn.ArrayPredictionContext; import org.antlr.v4.runtime.atn.PredictionContext; import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.atn.SingletonPredictionContext; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import java.util.ArrayDeque; import java.util.Deque; import java.util.IdentityHashMap; import java.util.Map; import static org.junit.Assert.assertEquals; public class TestGraphNodes { PredictionContextCache contextCache; @Before public void setUp() { PredictionContext.globalNodeCount = 1; contextCache = new PredictionContextCache(); } public boolean rootIsWildcard() { return true; } public boolean fullCtx() { return false; } @Test public void test_$_$() { PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, PredictionContext.EMPTY, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"*\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_$_$_fullctx() { PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, PredictionContext.EMPTY, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"$\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_x_$() { PredictionContext r = PredictionContext.merge(x(), PredictionContext.EMPTY, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"*\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_x_$_fullctx() { PredictionContext r = PredictionContext.merge(x(), PredictionContext.EMPTY, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>$\"];\n" + " s1[label=\"$\"];\n" + " s0:p0->s1[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_$_x() { PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, x(), rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"*\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_$_x_fullctx() { PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, x(), fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>$\"];\n" + " s1[label=\"$\"];\n" + " s0:p0->s1[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_a_a() { PredictionContext r = PredictionContext.merge(a(), a(), rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_a$_ax() { PredictionContext a1 = a(); PredictionContext x = x(); PredictionContext a2 = createSingleton(x, 1); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_a$_ax_fullctx() { PredictionContext a1 = a(); PredictionContext x = x(); PredictionContext a2 = createSingleton(x, 1); PredictionContext r = PredictionContext.merge(a1, a2, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[shape=record, label=\"<p0>|<p1>$\"];\n" + " s2[label=\"$\"];\n" + " s0->s1[label=\"1\"];\n" + " s1:p0->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_ax$_a$() { PredictionContext x = x(); PredictionContext a1 = createSingleton(x, 1); PredictionContext a2 = a(); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_aa$_a$_$_fullCtx() { PredictionContext empty = PredictionContext.EMPTY; PredictionContext child1 = createSingleton(empty, 8); PredictionContext right = PredictionContext.merge(empty, child1, false, null); PredictionContext left = createSingleton(right, 8); PredictionContext merged = PredictionContext.merge(left, right, false, null); String actual = toDOTString(merged, false); // System.out.println(actual); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>$\"];\n" + " s1[shape=record, label=\"<p0>|<p1>$\"];\n" + " s2[label=\"$\"];\n" + " s0:p0->s1[label=\"8\"];\n" + " s1:p0->s2[label=\"8\"];\n" + "}\n"; assertEquals(expecting, actual); } @Test public void test_ax$_a$_fullctx() { PredictionContext x = x(); PredictionContext a1 = createSingleton(x, 1); PredictionContext a2 = a(); PredictionContext r = PredictionContext.merge(a1, a2, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[shape=record, label=\"<p0>|<p1>$\"];\n" + " s2[label=\"$\"];\n" + " s0->s1[label=\"1\"];\n" + " s1:p0->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_a_b() { PredictionContext r = PredictionContext.merge(a(), b(), rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_ax_ax_same() { PredictionContext x = x(); PredictionContext a1 = createSingleton(x, 1); PredictionContext a2 = createSingleton(x, 1); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + " s1->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_ax_ax() { PredictionContext x1 = x(); PredictionContext x2 = x(); PredictionContext a1 = createSingleton(x1, 1); PredictionContext a2 = createSingleton(x2, 1); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + " s1->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_abx_abx() { PredictionContext x1 = x(); PredictionContext x2 = x(); PredictionContext b1 = createSingleton(x1, 2); PredictionContext b2 = createSingleton(x2, 2); PredictionContext a1 = createSingleton(b1, 1); PredictionContext a2 = createSingleton(b2, 1); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + " s1->s2[label=\"2\"];\n" + " s2->s3[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_abx_acx() { PredictionContext x1 = x(); PredictionContext x2 = x(); PredictionContext b = createSingleton(x1, 2); PredictionContext c = createSingleton(x2, 3); PredictionContext a1 = createSingleton(b, 1); PredictionContext a2 = createSingleton(c, 1); PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + " s1:p0->s2[label=\"2\"];\n" + " s1:p1->s2[label=\"3\"];\n" + " s2->s3[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_ax_bx_same() { PredictionContext x = x(); PredictionContext a = createSingleton(x, 1); PredictionContext b = createSingleton(x, 2); PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s1->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_ax_bx() { PredictionContext x1 = x(); PredictionContext x2 = x(); PredictionContext a = createSingleton(x1, 1); PredictionContext b = createSingleton(x2, 2); PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s1->s2[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_ax_by() { PredictionContext a = createSingleton(x(), 1); PredictionContext b = createSingleton(y(), 2); PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"*\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s2->s3[label=\"10\"];\n" + " s1->s3[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_a$_bx() { PredictionContext x2 = x(); PredictionContext a = a(); PredictionContext b = createSingleton(x2, 2); PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s2->s1[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_a$_bx_fullctx() { PredictionContext x2 = x(); PredictionContext a = a(); PredictionContext b = createSingleton(x2, 2); PredictionContext r = PredictionContext.merge(a, b, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s1[label=\"$\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s2->s1[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Ignore("Known inefficiency but deferring resolving the issue for now") @Test public void test_aex_bfx() { // TJP: this is inefficient as it leaves the top x nodes unmerged. PredictionContext x1 = x(); PredictionContext x2 = x(); PredictionContext e = createSingleton(x1, 5); PredictionContext f = createSingleton(x2, 6); PredictionContext a = createSingleton(e, 1); PredictionContext b = createSingleton(f, 2); PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"3\"];\n" + " s4[label=\"*\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s2->s3[label=\"6\"];\n" + " s3->s4[label=\"9\"];\n" + " s1->s3[label=\"5\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } // Array merges @Test public void test_A$_A$_fullctx() { ArrayPredictionContext A1 = array(PredictionContext.EMPTY); ArrayPredictionContext A2 = array(PredictionContext.EMPTY); PredictionContext r = PredictionContext.merge(A1, A2, fullCtx(), null); // System.out.println(toDOTString(r, fullCtx())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"$\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, fullCtx())); } @Test public void test_Aab_Ac() { // a,b + c SingletonPredictionContext a = a(); SingletonPredictionContext b = b(); SingletonPredictionContext c = c(); ArrayPredictionContext A1 = array(a, b); ArrayPredictionContext A2 = array(c); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s0:p2->s1[label=\"3\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aa_Aa() { SingletonPredictionContext a1 = a(); SingletonPredictionContext a2 = a(); ArrayPredictionContext A1 = array(a1); ArrayPredictionContext A2 = array(a2); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aa_Abc() { // a + b,c SingletonPredictionContext a = a(); SingletonPredictionContext b = b(); SingletonPredictionContext c = c(); ArrayPredictionContext A1 = array(a); ArrayPredictionContext A2 = array(b, c); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s0:p2->s1[label=\"3\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aac_Ab() { // a,c + b SingletonPredictionContext a = a(); SingletonPredictionContext b = b(); SingletonPredictionContext c = c(); ArrayPredictionContext A1 = array(a, c); ArrayPredictionContext A2 = array(b); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s0:p2->s1[label=\"3\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aab_Aa() { // a,b + a ArrayPredictionContext A1 = array(a(), b()); ArrayPredictionContext A2 = array(a()); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aab_Ab() { // a,b + b ArrayPredictionContext A1 = array(a(), b()); ArrayPredictionContext A2 = array(b()); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aax_Aby() { // ax + by but in arrays SingletonPredictionContext a = createSingleton(x(), 1); SingletonPredictionContext b = createSingleton(y(), 2); ArrayPredictionContext A1 = array(a); ArrayPredictionContext A2 = array(b); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"*\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s2->s3[label=\"10\"];\n" + " s1->s3[label=\"9\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aax_Aay() { // ax + ay -> merged singleton a, array parent SingletonPredictionContext a1 = createSingleton(x(), 1); SingletonPredictionContext a2 = createSingleton(y(), 1); ArrayPredictionContext A1 = array(a1); ArrayPredictionContext A2 = array(a2); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"0\"];\n" + " s1[shape=record, label=\"<p0>|<p1>\"];\n" + " s2[label=\"*\"];\n" + " s0->s1[label=\"1\"];\n" + " s1:p0->s2[label=\"9\"];\n" + " s1:p1->s2[label=\"10\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaxc_Aayd() { // ax,c + ay,d -> merged a, array parent SingletonPredictionContext a1 = createSingleton(x(), 1); SingletonPredictionContext a2 = createSingleton(y(), 1); ArrayPredictionContext A1 = array(a1, c()); ArrayPredictionContext A2 = array(a2, d()); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s2[label=\"*\"];\n" + " s1[shape=record, label=\"<p0>|<p1>\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"3\"];\n" + " s0:p2->s2[label=\"4\"];\n" + " s1:p0->s2[label=\"9\"];\n" + " s1:p1->s2[label=\"10\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaubv_Acwdx() { // au,bv + cw,dx -> [a,b,c,d]->[u,v,w,x] SingletonPredictionContext a = createSingleton(u(), 1); SingletonPredictionContext b = createSingleton(v(), 2); SingletonPredictionContext c = createSingleton(w(), 3); SingletonPredictionContext d = createSingleton(x(), 4); ArrayPredictionContext A1 = array(a, b); ArrayPredictionContext A2 = array(c, d); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>|<p3>\"];\n" + " s4[label=\"4\"];\n" + " s5[label=\"*\"];\n" + " s3[label=\"3\"];\n" + " s2[label=\"2\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s0:p2->s3[label=\"3\"];\n" + " s0:p3->s4[label=\"4\"];\n" + " s4->s5[label=\"9\"];\n" + " s3->s5[label=\"8\"];\n" + " s2->s5[label=\"7\"];\n" + " s1->s5[label=\"6\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaubv_Abvdx() { // au,bv + bv,dx -> [a,b,d]->[u,v,x] SingletonPredictionContext a = createSingleton(u(), 1); SingletonPredictionContext b1 = createSingleton(v(), 2); SingletonPredictionContext b2 = createSingleton(v(), 2); SingletonPredictionContext d = createSingleton(x(), 4); ArrayPredictionContext A1 = array(a, b1); ArrayPredictionContext A2 = array(b2, d); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s3[label=\"3\"];\n" + " s4[label=\"*\"];\n" + " s2[label=\"2\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s0:p2->s3[label=\"4\"];\n" + " s3->s4[label=\"9\"];\n" + " s2->s4[label=\"7\"];\n" + " s1->s4[label=\"6\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaubv_Abwdx() { // au,bv + bw,dx -> [a,b,d]->[u,[v,w],x] SingletonPredictionContext a = createSingleton(u(), 1); SingletonPredictionContext b1 = createSingleton(v(), 2); SingletonPredictionContext b2 = createSingleton(w(), 2); SingletonPredictionContext d = createSingleton(x(), 4); ArrayPredictionContext A1 = array(a, b1); ArrayPredictionContext A2 = array(b2, d); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s3[label=\"3\"];\n" + " s4[label=\"*\"];\n" + " s2[shape=record, label=\"<p0>|<p1>\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s0:p2->s3[label=\"4\"];\n" + " s3->s4[label=\"9\"];\n" + " s2:p0->s4[label=\"7\"];\n" + " s2:p1->s4[label=\"8\"];\n" + " s1->s4[label=\"6\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaubv_Abvdu() { // au,bv + bv,du -> [a,b,d]->[u,v,u]; u,v shared SingletonPredictionContext a = createSingleton(u(), 1); SingletonPredictionContext b1 = createSingleton(v(), 2); SingletonPredictionContext b2 = createSingleton(v(), 2); SingletonPredictionContext d = createSingleton(u(), 4); ArrayPredictionContext A1 = array(a, b1); ArrayPredictionContext A2 = array(b2, d); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" + " s2[label=\"2\"];\n" + " s3[label=\"*\"];\n" + " s1[label=\"1\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s2[label=\"2\"];\n" + " s0:p2->s1[label=\"4\"];\n" + " s2->s3[label=\"7\"];\n" + " s1->s3[label=\"6\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } @Test public void test_Aaubu_Acudu() { // au,bu + cu,du -> [a,b,c,d]->[u,u,u,u] SingletonPredictionContext a = createSingleton(u(), 1); SingletonPredictionContext b = createSingleton(u(), 2); SingletonPredictionContext c = createSingleton(u(), 3); SingletonPredictionContext d = createSingleton(u(), 4); ArrayPredictionContext A1 = array(a, b); ArrayPredictionContext A2 = array(c, d); PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null); // System.out.println(toDOTString(r, rootIsWildcard())); String expecting = "digraph G {\n" + "rankdir=LR;\n" + " s0[shape=record, label=\"<p0>|<p1>|<p2>|<p3>\"];\n" + " s1[label=\"1\"];\n" + " s2[label=\"*\"];\n" + " s0:p0->s1[label=\"1\"];\n" + " s0:p1->s1[label=\"2\"];\n" + " s0:p2->s1[label=\"3\"];\n" + " s0:p3->s1[label=\"4\"];\n" + " s1->s2[label=\"6\"];\n" + "}\n"; assertEquals(expecting, toDOTString(r, rootIsWildcard())); } // ------------ SUPPORT ------------------------- protected SingletonPredictionContext a() { return createSingleton(PredictionContext.EMPTY, 1); } private SingletonPredictionContext b() { return createSingleton(PredictionContext.EMPTY, 2); } private SingletonPredictionContext c() { return createSingleton(PredictionContext.EMPTY, 3); } private SingletonPredictionContext d() { return createSingleton(PredictionContext.EMPTY, 4); } private SingletonPredictionContext u() { return createSingleton(PredictionContext.EMPTY, 6); } private SingletonPredictionContext v() { return createSingleton(PredictionContext.EMPTY, 7); } private SingletonPredictionContext w() { return createSingleton(PredictionContext.EMPTY, 8); } private SingletonPredictionContext x() { return createSingleton(PredictionContext.EMPTY, 9); } private SingletonPredictionContext y() { return createSingleton(PredictionContext.EMPTY, 10); } public SingletonPredictionContext createSingleton(PredictionContext parent, int payload) { SingletonPredictionContext a = SingletonPredictionContext.create(parent, payload); return a; } public ArrayPredictionContext array(SingletonPredictionContext... nodes) { PredictionContext[] parents = new PredictionContext[nodes.length]; int[] invokingStates = new int[nodes.length]; for (int i=0; i<nodes.length; i++) { parents[i] = nodes[i].parent; invokingStates[i] = nodes[i].returnState; } return new ArrayPredictionContext(parents, invokingStates); } private static String toDOTString(PredictionContext context, boolean rootIsWildcard) { StringBuilder nodes = new StringBuilder(); StringBuilder edges = new StringBuilder(); Map<PredictionContext, PredictionContext> visited = new IdentityHashMap<PredictionContext, PredictionContext>(); Map<PredictionContext, Integer> contextIds = new IdentityHashMap<PredictionContext, Integer>(); Deque<PredictionContext> workList = new ArrayDeque<PredictionContext>(); visited.put(context, context); contextIds.put(context, contextIds.size()); workList.add(context); while (!workList.isEmpty()) { PredictionContext current = workList.pop(); nodes.append(" s").append(contextIds.get(current)).append('['); if (current.size() > 1) { nodes.append("shape=record, "); } nodes.append("label=\""); if (current.isEmpty()) { nodes.append(rootIsWildcard ? '*' : '$'); } else if (current.size() > 1) { for (int i = 0; i < current.size(); i++) { if (i > 0) { nodes.append('|'); } nodes.append("<p").append(i).append('>'); if (current.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) { nodes.append(rootIsWildcard ? '*' : '$'); } } } else { nodes.append(contextIds.get(current)); } nodes.append("\"];\n"); if (current.isEmpty()) { continue; } for (int i = 0; i < current.size(); i++) { if (current.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) { continue; } if (visited.put(current.getParent(i), current.getParent(i)) == null) { contextIds.put(current.getParent(i), contextIds.size()); workList.push(current.getParent(i)); } edges.append(" s").append(contextIds.get(current)); if (current.size() > 1) { edges.append(":p").append(i); } edges.append("->"); edges.append('s').append(contextIds.get(current.getParent(i))); edges.append("[label=\"").append(current.getReturnState(i)).append("\"]"); edges.append(";\n"); } } StringBuilder builder = new StringBuilder(); builder.append("digraph G {\n"); builder.append("rankdir=LR;\n"); builder.append(nodes); builder.append(edges); builder.append("}\n"); return builder.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.clusterframework.ApplicationStatus; import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters; import org.apache.flink.runtime.clusterframework.FlinkResourceManager; import org.apache.flink.runtime.clusterframework.messages.StopCluster; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService; import org.apache.flink.util.Preconditions; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.flink.yarn.messages.ContainersAllocated; import org.apache.flink.yarn.messages.ContainersComplete; import akka.actor.ActorRef; import akka.actor.Props; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.api.AMRMClient; import org.apache.hadoop.yarn.client.api.NMClient; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.slf4j.Logger; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Objects.requireNonNull; /** * Specialized Flink Resource Manager implementation for YARN clusters. It is started as the * YARN ApplicationMaster and implements the YARN-specific logic for container requests and failure * monitoring. */ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYarnWorkerNode> { /** The heartbeat interval while the resource master is waiting for containers. */ private static final int FAST_YARN_HEARTBEAT_INTERVAL_MS = 500; /** The default heartbeat interval during regular operation. */ private static final int DEFAULT_YARN_HEARTBEAT_INTERVAL_MS = 5000; /** Environment variable name of the final container id used by the Flink ResourceManager. * Container ID generation may vary across Hadoop versions. */ static final String ENV_FLINK_CONTAINER_ID = "_FLINK_CONTAINER_ID"; /** The containers where a TaskManager is starting and we are waiting for it to register. */ private final Map<ResourceID, YarnContainerInLaunch> containersInLaunch; /** Containers we have released, where we are waiting for an acknowledgement that * they are released. */ private final Map<ContainerId, Container> containersBeingReturned; /** The YARN / Hadoop configuration object. */ private final YarnConfiguration yarnConfig; /** The TaskManager container parameters (like container memory size). */ private final ContaineredTaskManagerParameters taskManagerParameters; /** Context information used to start a TaskManager Java process. */ private final ContainerLaunchContext taskManagerLaunchContext; /** Host name for the container running this process. */ private final String applicationMasterHostName; /** Web interface URL, may be null. */ private final String webInterfaceURL; /** Default heartbeat interval between this actor and the YARN ResourceManager. */ private final int yarnHeartbeatIntervalMillis; /** Number of failed TaskManager containers before stopping the application. -1 means infinite. */ private final int maxFailedContainers; /** Callback handler for the asynchronous resourceManagerClient. */ private YarnResourceManagerCallbackHandler resourceManagerCallbackHandler; /** Client to communicate with the Resource Manager (YARN's master). */ private AMRMClientAsync<AMRMClient.ContainerRequest> resourceManagerClient; /** Client to communicate with the Node manager and launch TaskManager processes. */ private NMClient nodeManagerClient; /** The number of containers requested, but not yet granted. */ private int numPendingContainerRequests; /** The number of failed containers since the master became active. */ private int failedContainersSoFar; /** A reference to the reflector to look up previous session containers. */ private RegisterApplicationMasterResponseReflector applicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); public YarnFlinkResourceManager( Configuration flinkConfig, YarnConfiguration yarnConfig, LeaderRetrievalService leaderRetrievalService, String applicationMasterHostName, String webInterfaceURL, ContaineredTaskManagerParameters taskManagerParameters, ContainerLaunchContext taskManagerLaunchContext, int yarnHeartbeatIntervalMillis, int maxFailedContainers, int numInitialTaskManagers) { this( flinkConfig, yarnConfig, leaderRetrievalService, applicationMasterHostName, webInterfaceURL, taskManagerParameters, taskManagerLaunchContext, yarnHeartbeatIntervalMillis, maxFailedContainers, numInitialTaskManagers, new YarnResourceManagerCallbackHandler()); } public YarnFlinkResourceManager( Configuration flinkConfig, YarnConfiguration yarnConfig, LeaderRetrievalService leaderRetrievalService, String applicationMasterHostName, String webInterfaceURL, ContaineredTaskManagerParameters taskManagerParameters, ContainerLaunchContext taskManagerLaunchContext, int yarnHeartbeatIntervalMillis, int maxFailedContainers, int numInitialTaskManagers, YarnResourceManagerCallbackHandler callbackHandler) { this( flinkConfig, yarnConfig, leaderRetrievalService, applicationMasterHostName, webInterfaceURL, taskManagerParameters, taskManagerLaunchContext, yarnHeartbeatIntervalMillis, maxFailedContainers, numInitialTaskManagers, callbackHandler, AMRMClientAsync.createAMRMClientAsync(yarnHeartbeatIntervalMillis, callbackHandler), NMClient.createNMClient()); } public YarnFlinkResourceManager( Configuration flinkConfig, YarnConfiguration yarnConfig, LeaderRetrievalService leaderRetrievalService, String applicationMasterHostName, String webInterfaceURL, ContaineredTaskManagerParameters taskManagerParameters, ContainerLaunchContext taskManagerLaunchContext, int yarnHeartbeatIntervalMillis, int maxFailedContainers, int numInitialTaskManagers, YarnResourceManagerCallbackHandler callbackHandler, AMRMClientAsync<AMRMClient.ContainerRequest> resourceManagerClient, NMClient nodeManagerClient) { super(numInitialTaskManagers, flinkConfig, leaderRetrievalService); this.yarnConfig = requireNonNull(yarnConfig); this.taskManagerParameters = requireNonNull(taskManagerParameters); this.taskManagerLaunchContext = requireNonNull(taskManagerLaunchContext); this.applicationMasterHostName = requireNonNull(applicationMasterHostName); this.webInterfaceURL = webInterfaceURL; this.yarnHeartbeatIntervalMillis = yarnHeartbeatIntervalMillis; this.maxFailedContainers = maxFailedContainers; this.resourceManagerCallbackHandler = Preconditions.checkNotNull(callbackHandler); this.resourceManagerClient = Preconditions.checkNotNull(resourceManagerClient); this.nodeManagerClient = Preconditions.checkNotNull(nodeManagerClient); this.containersInLaunch = new HashMap<>(); this.containersBeingReturned = new HashMap<>(); } // ------------------------------------------------------------------------ // Actor messages // ------------------------------------------------------------------------ @Override protected void handleMessage(Object message) { // check for YARN specific actor messages first if (message instanceof ContainersAllocated) { containersAllocated(((ContainersAllocated) message).containers()); } else if (message instanceof ContainersComplete) { containersComplete(((ContainersComplete) message).containers()); } else { // message handled by the generic resource master code super.handleMessage(message); } } // ------------------------------------------------------------------------ // YARN specific behavior // ------------------------------------------------------------------------ @Override protected void initialize() throws Exception { LOG.info("Initializing YARN resource master"); resourceManagerCallbackHandler.initialize(self()); resourceManagerClient.init(yarnConfig); resourceManagerClient.start(); // create the client to communicate with the node managers nodeManagerClient.init(yarnConfig); nodeManagerClient.start(); nodeManagerClient.cleanupRunningContainersOnStop(true); // register with Resource Manager LOG.info("Registering Application Master with tracking url {}", webInterfaceURL); scala.Option<Object> portOption = AkkaUtils.getAddress(getContext().system()).port(); int actorSystemPort = portOption.isDefined() ? (int) portOption.get() : -1; RegisterApplicationMasterResponse response = resourceManagerClient.registerApplicationMaster( applicationMasterHostName, actorSystemPort, webInterfaceURL); // if this application master starts as part of an ApplicationMaster/JobManager recovery, // then some worker containers are most likely still alive and we can re-obtain them List<Container> containersFromPreviousAttempts = applicationMasterResponseReflector.getContainersFromPreviousAttempts(response); if (!containersFromPreviousAttempts.isEmpty()) { LOG.info("Retrieved {} TaskManagers from previous attempt", containersFromPreviousAttempts.size()); final long now = System.currentTimeMillis(); for (Container c : containersFromPreviousAttempts) { YarnContainerInLaunch containerInLaunch = new YarnContainerInLaunch(c, now); containersInLaunch.put(containerInLaunch.getResourceID(), containerInLaunch); } // adjust the progress indicator updateProgress(); } } @Override protected void shutdownApplication(ApplicationStatus finalStatus, String optionalDiagnostics) { // first, de-register from YARN FinalApplicationStatus yarnStatus = getYarnStatus(finalStatus); LOG.info("Unregistering application from the YARN Resource Manager"); try { resourceManagerClient.unregisterApplicationMaster(yarnStatus, optionalDiagnostics, ""); } catch (Throwable t) { LOG.error("Could not unregister the application master.", t); } // now shut down all our components try { resourceManagerClient.stop(); } catch (Throwable t) { LOG.error("Could not cleanly shut down the Asynchronous Resource Manager Client", t); } try { nodeManagerClient.stop(); } catch (Throwable t) { LOG.error("Could not cleanly shut down the Node Manager Client", t); } // stop the actor after finishing processing the stop message getContext().system().stop(getSelf()); } @Override protected void fatalError(String message, Throwable error) { // we do not unregister, but cause a hard fail of this process, to have it // restarted by YARN LOG.error("FATAL ERROR IN YARN APPLICATION MASTER: " + message, error); LOG.error("Shutting down process"); // kill this process, this will make YARN restart the process System.exit(EXIT_CODE_FATAL_ERROR); } @Override protected void requestNewWorkers(int numWorkers) { final long mem = taskManagerParameters.taskManagerTotalMemoryMB(); final int containerMemorySizeMB; if (mem <= Integer.MAX_VALUE) { containerMemorySizeMB = (int) mem; } else { containerMemorySizeMB = Integer.MAX_VALUE; LOG.error("Decreasing container size from {} MB to {} MB (integer value overflow)", mem, containerMemorySizeMB); } for (int i = 0; i < numWorkers; i++) { numPendingContainerRequests++; LOG.info("Requesting new TaskManager container with {} megabytes memory. Pending requests: {}", containerMemorySizeMB, numPendingContainerRequests); // Priority for worker containers - priorities are intra-application Priority priority = Priority.newInstance(0); // Resource requirements for worker containers int taskManagerSlots = taskManagerParameters.numSlots(); int vcores = config.getInteger(YarnConfigOptions.VCORES, Math.max(taskManagerSlots, 1)); Resource capability = Resource.newInstance(containerMemorySizeMB, vcores); resourceManagerClient.addContainerRequest( new AMRMClient.ContainerRequest(capability, null, null, priority)); } // make sure we transmit the request fast and receive fast news of granted allocations resourceManagerClient.setHeartbeatInterval(FAST_YARN_HEARTBEAT_INTERVAL_MS); } @Override protected void releasePendingWorker(ResourceID id) { YarnContainerInLaunch container = containersInLaunch.remove(id); if (container != null) { releaseYarnContainer(container.container()); } else { LOG.error("Cannot find container {} to release. Ignoring request.", id); } } @Override protected void releaseStartedWorker(RegisteredYarnWorkerNode worker) { releaseYarnContainer(worker.yarnContainer()); } private void releaseYarnContainer(Container container) { LOG.info("Releasing YARN container {}", container.getId()); containersBeingReturned.put(container.getId(), container); // release the container on the node manager try { nodeManagerClient.stopContainer(container.getId(), container.getNodeId()); } catch (Throwable t) { // we only log this error. since the ResourceManager also gets the release // notification, the container should be eventually cleaned up LOG.error("Error while calling YARN Node Manager to release container", t); } // tell the master that the container is no longer needed resourceManagerClient.releaseAssignedContainer(container.getId()); } @Override protected RegisteredYarnWorkerNode workerStarted(ResourceID resourceID) { YarnContainerInLaunch inLaunch = containersInLaunch.remove(resourceID); if (inLaunch == null) { // Container was not in state "being launched", this can indicate that the TaskManager // in this container was already registered or that the container was not started // by this resource manager. Simply ignore this resourceID. return null; } else { return new RegisteredYarnWorkerNode(inLaunch.container()); } } @Override protected Collection<RegisteredYarnWorkerNode> reacceptRegisteredWorkers(Collection<ResourceID> toConsolidate) { // we check for each task manager if we recognize its container List<RegisteredYarnWorkerNode> accepted = new ArrayList<>(); for (ResourceID resourceID : toConsolidate) { YarnContainerInLaunch yci = containersInLaunch.remove(resourceID); if (yci != null) { LOG.info("YARN container consolidation recognizes Resource {} ", resourceID); accepted.add(new RegisteredYarnWorkerNode(yci.container())); } else { if (isStarted(resourceID)) { LOG.info("TaskManager {} has already been registered at the resource manager.", resourceID); } else { LOG.info("YARN container consolidation does not recognize TaskManager {}", resourceID); } } } return accepted; } @Override protected int getNumWorkerRequestsPending() { return numPendingContainerRequests; } @Override protected int getNumWorkersPendingRegistration() { return containersInLaunch.size(); } // ------------------------------------------------------------------------ // Callbacks from the YARN Resource Manager // ------------------------------------------------------------------------ private void containersAllocated(List<Container> containers) { final int numRequired = getDesignatedWorkerPoolSize(); final int numRegistered = getNumberOfStartedTaskManagers(); for (Container container : containers) { numPendingContainerRequests = Math.max(0, numPendingContainerRequests - 1); LOG.info("Received new container: {} - Remaining pending container requests: {}", container.getId(), numPendingContainerRequests); // decide whether to return the container, or whether to start a TaskManager if (numRegistered + containersInLaunch.size() < numRequired) { // start a TaskManager final YarnContainerInLaunch containerInLaunch = new YarnContainerInLaunch(container); final ResourceID resourceID = containerInLaunch.getResourceID(); containersInLaunch.put(resourceID, containerInLaunch); String message = "Launching TaskManager in container " + containerInLaunch + " on host " + container.getNodeId().getHost(); LOG.info(message); sendInfoMessage(message); try { // set a special environment variable to uniquely identify this container taskManagerLaunchContext.getEnvironment() .put(ENV_FLINK_CONTAINER_ID, resourceID.getResourceIdString()); nodeManagerClient.startContainer(container, taskManagerLaunchContext); } catch (Throwable t) { // failed to launch the container containersInLaunch.remove(resourceID); // return container, a new one will be requested eventually LOG.error("Could not start TaskManager in container " + containerInLaunch, t); containersBeingReturned.put(container.getId(), container); resourceManagerClient.releaseAssignedContainer(container.getId()); } } else { // return excessive container LOG.info("Returning excess container {}", container.getId()); containersBeingReturned.put(container.getId(), container); resourceManagerClient.releaseAssignedContainer(container.getId()); } } updateProgress(); // if we are waiting for no further containers, we can go to the // regular heartbeat interval if (numPendingContainerRequests <= 0) { resourceManagerClient.setHeartbeatInterval(yarnHeartbeatIntervalMillis); } // make sure we re-check the status of workers / containers one more time at least, // in case some containers did not come up properly triggerCheckWorkers(); } /** * Invoked when the ResourceManager informs of completed containers. * Called via an actor message by the callback from the ResourceManager client. * * @param containers The containers that have completed. */ private void containersComplete(List<ContainerStatus> containers) { // the list contains both failed containers, as well as containers that // were gracefully returned by this application master for (ContainerStatus status : containers) { final ResourceID id = new ResourceID(status.getContainerId().toString()); // check if this is a failed container or a completed container if (containersBeingReturned.remove(status.getContainerId()) != null) { // regular completed container that we released LOG.info("Container {} completed successfully with diagnostics: {}", id, status.getDiagnostics()); } else { // failed container, either at startup, or running final String exitStatus; switch (status.getExitStatus()) { case -103: exitStatus = "Vmem limit exceeded (-103)"; break; case -104: exitStatus = "Pmem limit exceeded (-104)"; break; default: exitStatus = String.valueOf(status.getExitStatus()); } final YarnContainerInLaunch launched = containersInLaunch.remove(id); if (launched != null) { LOG.info("Container {} failed, with a TaskManager in launch or registration. " + "Exit status: {}", id, exitStatus); // we will trigger re-acquiring new containers at the end } else { // failed registered worker LOG.info("Container {} failed. Exit status: {}", id, exitStatus); // notify the generic logic, which notifies the JobManager, etc. notifyWorkerFailed(id, "Container " + id + " failed. " + "Exit status: {}" + exitStatus); } // general failure logging failedContainersSoFar++; String diagMessage = String.format("Diagnostics for container %s in state %s : " + "exitStatus=%s diagnostics=%s", id, status.getState(), exitStatus, status.getDiagnostics()); sendInfoMessage(diagMessage); LOG.info(diagMessage); LOG.info("Total number of failed containers so far: " + failedContainersSoFar); // maxFailedContainers == -1 is infinite number of retries. if (maxFailedContainers >= 0 && failedContainersSoFar > maxFailedContainers) { String msg = "Stopping YARN session because the number of failed containers (" + failedContainersSoFar + ") exceeded the maximum failed containers (" + maxFailedContainers + "). This number is controlled by the '" + YarnConfigOptions.MAX_FAILED_CONTAINERS.key() + "' configuration setting. " + "By default its the number of requested containers."; LOG.error(msg); self().tell(decorateMessage(new StopCluster(ApplicationStatus.FAILED, msg)), ActorRef.noSender()); // no need to do anything else return; } } } updateProgress(); // in case failed containers were among the finished containers, make // sure we re-examine and request new ones triggerCheckWorkers(); } // ------------------------------------------------------------------------ // Utilities // ------------------------------------------------------------------------ /** * Extracts a unique ResourceID from the Yarn Container. * @param container The Yarn container * @return The ResourceID for the container */ static ResourceID extractResourceID(Container container) { return new ResourceID(container.getId().toString()); } private void updateProgress() { final int required = getDesignatedWorkerPoolSize(); final int available = getNumberOfStartedTaskManagers() + containersInLaunch.size(); final float progress = (required <= 0) ? 1.0f : available / (float) required; if (resourceManagerCallbackHandler != null) { resourceManagerCallbackHandler.setCurrentProgress(progress); } } /** * Converts a Flink application status enum to a YARN application status enum. * @param status The Flink application status. * @return The corresponding YARN application status. */ private FinalApplicationStatus getYarnStatus(ApplicationStatus status) { if (status == null) { return FinalApplicationStatus.UNDEFINED; } else { switch (status) { case SUCCEEDED: return FinalApplicationStatus.SUCCEEDED; case FAILED: return FinalApplicationStatus.FAILED; case CANCELED: return FinalApplicationStatus.KILLED; default: return FinalApplicationStatus.UNDEFINED; } } } /** * Looks up the getContainersFromPreviousAttempts method on RegisterApplicationMasterResponse * once and saves the method. This saves computation time on the sequent calls. */ private static class RegisterApplicationMasterResponseReflector { private Logger logger; private Method method; public RegisterApplicationMasterResponseReflector(Logger log) { this.logger = log; try { method = RegisterApplicationMasterResponse.class .getMethod("getContainersFromPreviousAttempts"); } catch (NoSuchMethodException e) { // that happens in earlier Hadoop versions logger.info("Cannot reconnect to previously allocated containers. " + "This YARN version does not support 'getContainersFromPreviousAttempts()'"); } } /** * Checks if a YARN application still has registered containers. If the application master * registered at the ResourceManager for the first time, this list will be empty. If the * application master registered a repeated time (after a failure and recovery), this list * will contain the containers that were previously allocated. * * @param response The response object from the registration at the ResourceManager. * @return A list with containers from previous application attempt. */ private List<Container> getContainersFromPreviousAttempts(RegisterApplicationMasterResponse response) { if (method != null && response != null) { try { @SuppressWarnings("unchecked") List<Container> list = (List<Container>) method.invoke(response); if (list != null && !list.isEmpty()) { return list; } } catch (Throwable t) { logger.error("Error invoking 'getContainersFromPreviousAttempts()'", t); } } return Collections.emptyList(); } } // ------------------------------------------------------------------------ // Actor props factory // ------------------------------------------------------------------------ /** * Creates the props needed to instantiate this actor. * * <p>Rather than extracting and validating parameters in the constructor, this factory method takes * care of that. That way, errors occur synchronously, and are not swallowed simply in a * failed asynchronous attempt to start the actor. * * @param actorClass * The actor class, to allow overriding this actor with subclasses for testing. * @param flinkConfig * The Flink configuration object. * @param yarnConfig * The YARN configuration object. * @param applicationMasterHostName * The hostname where this application master actor runs. * @param webFrontendURL * The URL of the tracking web frontend. * @param taskManagerParameters * The parameters for launching TaskManager containers. * @param taskManagerLaunchContext * The parameters for launching the TaskManager processes in the TaskManager containers. * @param numInitialTaskManagers * The initial number of TaskManagers to allocate. * @param log * The logger to log to. * * @return The Props object to instantiate the YarnFlinkResourceManager actor. */ public static Props createActorProps(Class<? extends YarnFlinkResourceManager> actorClass, Configuration flinkConfig, YarnConfiguration yarnConfig, LeaderRetrievalService leaderRetrievalService, String applicationMasterHostName, String webFrontendURL, ContaineredTaskManagerParameters taskManagerParameters, ContainerLaunchContext taskManagerLaunchContext, int numInitialTaskManagers, Logger log) { final int yarnHeartbeatIntervalMS = flinkConfig.getInteger( YarnConfigOptions.HEARTBEAT_DELAY_SECONDS) * 1000; final long yarnExpiryIntervalMS = yarnConfig.getLong( YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_AM_EXPIRY_INTERVAL_MS); if (yarnHeartbeatIntervalMS >= yarnExpiryIntervalMS) { log.warn("The heartbeat interval of the Flink Application master ({}) is greater " + "than YARN's expiry interval ({}). The application is likely to be killed by YARN.", yarnHeartbeatIntervalMS, yarnExpiryIntervalMS); } final int maxFailedContainers = flinkConfig.getInteger( YarnConfigOptions.MAX_FAILED_CONTAINERS.key(), numInitialTaskManagers); if (maxFailedContainers >= 0) { log.info("YARN application tolerates {} failed TaskManager containers before giving up", maxFailedContainers); } return Props.create(actorClass, flinkConfig, yarnConfig, leaderRetrievalService, applicationMasterHostName, webFrontendURL, taskManagerParameters, taskManagerLaunchContext, yarnHeartbeatIntervalMS, maxFailedContainers, numInitialTaskManagers); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.chime.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-2018-05-01/CreateChannelMembership" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateChannelMembershipRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ARN of the channel to which you're adding users. * </p> */ private String channelArn; /** * <p> * The ARN of the member you want to add to the channel. * </p> */ private String memberArn; /** * <p> * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always returned * as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type filter in * <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not returned. This * is only supported by moderators. * </p> */ private String type; /** * <p> * The <code>AppInstanceUserArn</code> of the user that makes the API call. * </p> */ private String chimeBearer; /** * <p> * The ARN of the channel to which you're adding users. * </p> * * @param channelArn * The ARN of the channel to which you're adding users. */ public void setChannelArn(String channelArn) { this.channelArn = channelArn; } /** * <p> * The ARN of the channel to which you're adding users. * </p> * * @return The ARN of the channel to which you're adding users. */ public String getChannelArn() { return this.channelArn; } /** * <p> * The ARN of the channel to which you're adding users. * </p> * * @param channelArn * The ARN of the channel to which you're adding users. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateChannelMembershipRequest withChannelArn(String channelArn) { setChannelArn(channelArn); return this; } /** * <p> * The ARN of the member you want to add to the channel. * </p> * * @param memberArn * The ARN of the member you want to add to the channel. */ public void setMemberArn(String memberArn) { this.memberArn = memberArn; } /** * <p> * The ARN of the member you want to add to the channel. * </p> * * @return The ARN of the member you want to add to the channel. */ public String getMemberArn() { return this.memberArn; } /** * <p> * The ARN of the member you want to add to the channel. * </p> * * @param memberArn * The ARN of the member you want to add to the channel. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateChannelMembershipRequest withMemberArn(String memberArn) { setMemberArn(memberArn); return this; } /** * <p> * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always returned * as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type filter in * <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not returned. This * is only supported by moderators. * </p> * * @param type * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always * returned as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type * filter in <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not * returned. This is only supported by moderators. * @see ChannelMembershipType */ public void setType(String type) { this.type = type; } /** * <p> * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always returned * as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type filter in * <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not returned. This * is only supported by moderators. * </p> * * @return The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always * returned as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type * filter in <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are * not returned. This is only supported by moderators. * @see ChannelMembershipType */ public String getType() { return this.type; } /** * <p> * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always returned * as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type filter in * <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not returned. This * is only supported by moderators. * </p> * * @param type * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always * returned as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type * filter in <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not * returned. This is only supported by moderators. * @return Returns a reference to this object so that method calls can be chained together. * @see ChannelMembershipType */ public CreateChannelMembershipRequest withType(String type) { setType(type); return this; } /** * <p> * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always returned * as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type filter in * <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not returned. This * is only supported by moderators. * </p> * * @param type * The membership type of a user, <code>DEFAULT</code> or <code>HIDDEN</code>. Default members are always * returned as part of <code>ListChannelMemberships</code>. Hidden members are only returned if the type * filter in <code>ListChannelMemberships</code> equals <code>HIDDEN</code>. Otherwise hidden members are not * returned. This is only supported by moderators. * @return Returns a reference to this object so that method calls can be chained together. * @see ChannelMembershipType */ public CreateChannelMembershipRequest withType(ChannelMembershipType type) { this.type = type.toString(); return this; } /** * <p> * The <code>AppInstanceUserArn</code> of the user that makes the API call. * </p> * * @param chimeBearer * The <code>AppInstanceUserArn</code> of the user that makes the API call. */ public void setChimeBearer(String chimeBearer) { this.chimeBearer = chimeBearer; } /** * <p> * The <code>AppInstanceUserArn</code> of the user that makes the API call. * </p> * * @return The <code>AppInstanceUserArn</code> of the user that makes the API call. */ public String getChimeBearer() { return this.chimeBearer; } /** * <p> * The <code>AppInstanceUserArn</code> of the user that makes the API call. * </p> * * @param chimeBearer * The <code>AppInstanceUserArn</code> of the user that makes the API call. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateChannelMembershipRequest withChimeBearer(String chimeBearer) { setChimeBearer(chimeBearer); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getChannelArn() != null) sb.append("ChannelArn: ").append(getChannelArn()).append(","); if (getMemberArn() != null) sb.append("MemberArn: ").append(getMemberArn()).append(","); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getChimeBearer() != null) sb.append("ChimeBearer: ").append(getChimeBearer()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateChannelMembershipRequest == false) return false; CreateChannelMembershipRequest other = (CreateChannelMembershipRequest) obj; if (other.getChannelArn() == null ^ this.getChannelArn() == null) return false; if (other.getChannelArn() != null && other.getChannelArn().equals(this.getChannelArn()) == false) return false; if (other.getMemberArn() == null ^ this.getMemberArn() == null) return false; if (other.getMemberArn() != null && other.getMemberArn().equals(this.getMemberArn()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getChimeBearer() == null ^ this.getChimeBearer() == null) return false; if (other.getChimeBearer() != null && other.getChimeBearer().equals(this.getChimeBearer()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getChannelArn() == null) ? 0 : getChannelArn().hashCode()); hashCode = prime * hashCode + ((getMemberArn() == null) ? 0 : getMemberArn().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getChimeBearer() == null) ? 0 : getChimeBearer().hashCode()); return hashCode; } @Override public CreateChannelMembershipRequest clone() { return (CreateChannelMembershipRequest) super.clone(); } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.citygml.provider; import java.util.Collection; import java.util.List; import net.opengis.citygml.CitygmlFactory; import net.opengis.citygml.CitygmlPackage; import net.opengis.citygml.DocumentRoot; import net.opengis.citygml.appearance.AppearanceFactory; import net.opengis.citygml.building.provider.CityGMLEditPlugin; import net.opengis.gml.GmlFactory; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link net.opengis.citygml.DocumentRoot} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class DocumentRootItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DocumentRootItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__SITE); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_OBJECT); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_ADDRESS); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_CITY_MODEL); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_CITY_OBJECT); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_SITE); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__ADDRESS); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_MODEL); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_OBJECT_MEMBER); childrenFeatures.add(CitygmlPackage.Literals.DOCUMENT_ROOT__IMPLICIT_GEOMETRY); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns DocumentRoot.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/DocumentRoot")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { return getString("_UI_DocumentRoot_type"); } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(DocumentRoot.class)) { case CitygmlPackage.DOCUMENT_ROOT__SITE: case CitygmlPackage.DOCUMENT_ROOT__CITY_OBJECT: case CitygmlPackage.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_ADDRESS: case CitygmlPackage.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_CITY_MODEL: case CitygmlPackage.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_CITY_OBJECT: case CitygmlPackage.DOCUMENT_ROOT__GENERIC_APPLICATION_PROPERTY_OF_SITE: case CitygmlPackage.DOCUMENT_ROOT__ADDRESS: case CitygmlPackage.DOCUMENT_ROOT__CITY_MODEL: case CitygmlPackage.DOCUMENT_ROOT__CITY_OBJECT_MEMBER: case CitygmlPackage.DOCUMENT_ROOT__IMPLICIT_GEOMETRY: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (CitygmlPackage.Literals.DOCUMENT_ROOT__ADDRESS, CitygmlFactory.eINSTANCE.createAddressType())); newChildDescriptors.add (createChildParameter (CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_MODEL, CitygmlFactory.eINSTANCE.createCityModelType())); newChildDescriptors.add (createChildParameter (CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_OBJECT_MEMBER, GmlFactory.eINSTANCE.createFeaturePropertyType())); newChildDescriptors.add (createChildParameter (CitygmlPackage.Literals.DOCUMENT_ROOT__CITY_OBJECT_MEMBER, AppearanceFactory.eINSTANCE.createAppearancePropertyType())); newChildDescriptors.add (createChildParameter (CitygmlPackage.Literals.DOCUMENT_ROOT__IMPLICIT_GEOMETRY, CitygmlFactory.eINSTANCE.createImplicitGeometryType())); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResourceLocator getResourceLocator() { return CityGMLEditPlugin.INSTANCE; } }
// Code generated by Wire protocol buffer compiler, do not edit. // Source: squareup.proto2.alltypes.RepeatedPackedAndMap in collection_types.proto package com.squareup.wire.proto2; import com.squareup.wire.FieldEncoding; import com.squareup.wire.Message; import com.squareup.wire.ProtoAdapter; import com.squareup.wire.ProtoReader; import com.squareup.wire.ProtoWriter; import com.squareup.wire.ReverseProtoWriter; import com.squareup.wire.Syntax; import com.squareup.wire.WireField; import com.squareup.wire.internal.Internal; import java.io.IOException; import java.lang.Integer; import java.lang.Object; import java.lang.Override; import java.lang.String; import java.lang.StringBuilder; import java.util.List; import java.util.Map; import okio.ByteString; public final class RepeatedPackedAndMap extends Message<RepeatedPackedAndMap, RepeatedPackedAndMap.Builder> { public static final ProtoAdapter<RepeatedPackedAndMap> ADAPTER = new ProtoAdapter_RepeatedPackedAndMap(); private static final long serialVersionUID = 0L; @WireField( tag = 201, adapter = "com.squareup.wire.ProtoAdapter#INT32", label = WireField.Label.REPEATED ) public final List<Integer> rep_int32; @WireField( tag = 301, adapter = "com.squareup.wire.ProtoAdapter#INT32", label = WireField.Label.PACKED ) public final List<Integer> pack_int32; @WireField( tag = 401, keyAdapter = "com.squareup.wire.ProtoAdapter#INT32", adapter = "com.squareup.wire.ProtoAdapter#INT32" ) public final Map<Integer, Integer> map_int32_int32; public RepeatedPackedAndMap(List<Integer> rep_int32, List<Integer> pack_int32, Map<Integer, Integer> map_int32_int32) { this(rep_int32, pack_int32, map_int32_int32, ByteString.EMPTY); } public RepeatedPackedAndMap(List<Integer> rep_int32, List<Integer> pack_int32, Map<Integer, Integer> map_int32_int32, ByteString unknownFields) { super(ADAPTER, unknownFields); this.rep_int32 = Internal.immutableCopyOf("rep_int32", rep_int32); this.pack_int32 = Internal.immutableCopyOf("pack_int32", pack_int32); this.map_int32_int32 = Internal.immutableCopyOf("map_int32_int32", map_int32_int32); } @Override public Builder newBuilder() { Builder builder = new Builder(); builder.rep_int32 = Internal.copyOf(rep_int32); builder.pack_int32 = Internal.copyOf(pack_int32); builder.map_int32_int32 = Internal.copyOf(map_int32_int32); builder.addUnknownFields(unknownFields()); return builder; } @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof RepeatedPackedAndMap)) return false; RepeatedPackedAndMap o = (RepeatedPackedAndMap) other; return unknownFields().equals(o.unknownFields()) && rep_int32.equals(o.rep_int32) && pack_int32.equals(o.pack_int32) && map_int32_int32.equals(o.map_int32_int32); } @Override public int hashCode() { int result = super.hashCode; if (result == 0) { result = unknownFields().hashCode(); result = result * 37 + rep_int32.hashCode(); result = result * 37 + pack_int32.hashCode(); result = result * 37 + map_int32_int32.hashCode(); super.hashCode = result; } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(); if (!rep_int32.isEmpty()) builder.append(", rep_int32=").append(rep_int32); if (!pack_int32.isEmpty()) builder.append(", pack_int32=").append(pack_int32); if (!map_int32_int32.isEmpty()) builder.append(", map_int32_int32=").append(map_int32_int32); return builder.replace(0, 2, "RepeatedPackedAndMap{").append('}').toString(); } public static final class Builder extends Message.Builder<RepeatedPackedAndMap, Builder> { public List<Integer> rep_int32; public List<Integer> pack_int32; public Map<Integer, Integer> map_int32_int32; public Builder() { rep_int32 = Internal.newMutableList(); pack_int32 = Internal.newMutableList(); map_int32_int32 = Internal.newMutableMap(); } public Builder rep_int32(List<Integer> rep_int32) { Internal.checkElementsNotNull(rep_int32); this.rep_int32 = rep_int32; return this; } public Builder pack_int32(List<Integer> pack_int32) { Internal.checkElementsNotNull(pack_int32); this.pack_int32 = pack_int32; return this; } public Builder map_int32_int32(Map<Integer, Integer> map_int32_int32) { Internal.checkElementsNotNull(map_int32_int32); this.map_int32_int32 = map_int32_int32; return this; } @Override public RepeatedPackedAndMap build() { return new RepeatedPackedAndMap(rep_int32, pack_int32, map_int32_int32, super.buildUnknownFields()); } } private static final class ProtoAdapter_RepeatedPackedAndMap extends ProtoAdapter<RepeatedPackedAndMap> { private ProtoAdapter<Map<Integer, Integer>> map_int32_int32; public ProtoAdapter_RepeatedPackedAndMap() { super(FieldEncoding.LENGTH_DELIMITED, RepeatedPackedAndMap.class, "type.googleapis.com/squareup.proto2.alltypes.RepeatedPackedAndMap", Syntax.PROTO_2, null, "collection_types.proto"); } @Override public int encodedSize(RepeatedPackedAndMap value) { int result = 0; result += ProtoAdapter.INT32.asRepeated().encodedSizeWithTag(201, value.rep_int32); result += ProtoAdapter.INT32.asPacked().encodedSizeWithTag(301, value.pack_int32); result += map_int32_int32Adapter().encodedSizeWithTag(401, value.map_int32_int32); result += value.unknownFields().size(); return result; } @Override public void encode(ProtoWriter writer, RepeatedPackedAndMap value) throws IOException { ProtoAdapter.INT32.asRepeated().encodeWithTag(writer, 201, value.rep_int32); ProtoAdapter.INT32.asPacked().encodeWithTag(writer, 301, value.pack_int32); map_int32_int32Adapter().encodeWithTag(writer, 401, value.map_int32_int32); writer.writeBytes(value.unknownFields()); } @Override public void encode(ReverseProtoWriter writer, RepeatedPackedAndMap value) throws IOException { writer.writeBytes(value.unknownFields()); map_int32_int32Adapter().encodeWithTag(writer, 401, value.map_int32_int32); ProtoAdapter.INT32.asPacked().encodeWithTag(writer, 301, value.pack_int32); ProtoAdapter.INT32.asRepeated().encodeWithTag(writer, 201, value.rep_int32); } @Override public RepeatedPackedAndMap decode(ProtoReader reader) throws IOException { Builder builder = new Builder(); long token = reader.beginMessage(); for (int tag; (tag = reader.nextTag()) != -1;) { switch (tag) { case 201: builder.rep_int32.add(ProtoAdapter.INT32.decode(reader)); break; case 301: builder.pack_int32.add(ProtoAdapter.INT32.decode(reader)); break; case 401: builder.map_int32_int32.putAll(map_int32_int32Adapter().decode(reader)); break; default: { reader.readUnknownField(tag); } } } builder.addUnknownFields(reader.endMessageAndGetUnknownFields(token)); return builder.build(); } @Override public RepeatedPackedAndMap redact(RepeatedPackedAndMap value) { Builder builder = value.newBuilder(); builder.clearUnknownFields(); return builder.build(); } private ProtoAdapter<Map<Integer, Integer>> map_int32_int32Adapter() { ProtoAdapter<Map<Integer, Integer>> result = map_int32_int32; if (result == null) { result = ProtoAdapter.newMapAdapter(ProtoAdapter.INT32, ProtoAdapter.INT32); map_int32_int32 = result; } return result; } } }
package com.pada.spider.tool.impl; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.pada.common.FileOperator; import com.pada.mydao.bean.Wp_terms; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.pada.common.ArrUtil; import com.pada.common.ReflectUtil; import com.pada.spider.annotation.SpiderKeyInject; import com.pada.spider.bean.JsoupExpress; import com.pada.spider.bean.SpiderPath; import com.pada.spider.domain.SpiderResult; import com.pada.spider.domain.SpiderStrategy; import com.pada.spider.domain.SpiderUrlInfo; import com.pada.spider.event.SpiderEvent; import com.pada.spider.event.SpiderProcessor; import com.pada.spider.tool.SpiderAnalyse; import org.springframework.web.util.HtmlUtils; /** * Created by eqyun on 2014/10/31. */ @Component public class SpiderAnalyseImpl implements SpiderAnalyse { @Autowired private SpiderEvent spiderEvent; @Override public SpiderPath getSpiderPath(SpiderStrategy spiderStrategy, int depth) { SpiderPath[] spiderPaths = spiderStrategy.getSpiderPaths(); for (SpiderPath spiderPath : spiderPaths) { if (spiderPath.getDepth() == depth) return spiderPath; } return null; } @Override public SpiderPath getNextSpiderPath(SpiderStrategy spiderStrategy, int nowDepth) { SpiderPath[] spiderPaths = spiderStrategy.getSpiderPaths(); for (SpiderPath spiderPath : spiderPaths) { if (spiderPath.getDepth() == nowDepth + 1) return spiderPath; } return null; } @Override public SpiderPath getPreSpiderPath(SpiderStrategy spiderStrategy, int nowDepth) { SpiderPath[] spiderPaths = spiderStrategy.getSpiderPaths(); for (SpiderPath spiderPath : spiderPaths) { if (spiderPath.getDepth() == nowDepth - 1) return spiderPath; } return null; } @Override public String[] fetchJsoup(JsoupExpress jsoupExpress, Document body_copy) { String seekDom = jsoupExpress.getSeekDom(); String filterDom = jsoupExpress.getFilterDom(); String selectAttr = jsoupExpress.getSelectAttr(); if (filterDom != null) body_copy.select(filterDom).remove(); if (jsoupExpress.isRemoveStyle()) { body_copy.select("*").removeAttr("style"); body_copy.select("*").removeAttr("onclick"); body_copy.select("*").removeAttr("onfocus"); } Elements elements = body_copy.select(seekDom); List<String> values = Lists.newArrayList(); for (Element element : elements) { if (selectAttr != null) { String value = element.attr(selectAttr); values.add(value); } else { if (jsoupExpress.isHtml()) values.add(element.html()); else values.add(element.text()); } } return values.toArray(new String[values.size()]); } @Override public String[] fetchJsoup(JsoupExpress jsoupExpress, String html) { Document document = Jsoup.parse(html); return this.fetchJsoup(jsoupExpress, document); } @Override public List<String> fetchUrl(String html) { JsoupExpress jsoupExpress = new JsoupExpress("a", "href", null, true); String[] urls = fetchJsoup(jsoupExpress, html); return Lists.newArrayList(urls); } @Override public String[][] fetchUrlAndLinkDescription(String html, boolean isRss) { if (!isRss) { Document document = Jsoup.parse(html); Elements elements = document.select("a"); String[][] descriptionAndLinks = new String[elements.size()][2]; int i = 0; for (Element element : elements) { descriptionAndLinks[i][0] = element.text(); descriptionAndLinks[i][1] = element.attr("href"); i++; } return descriptionAndLinks; } else { List<String[]> _links = Lists.newArrayList(); String regex = "\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]"; Pattern pattern = Pattern.compile(regex); html = HtmlUtils.htmlUnescape(html); Matcher matcher = pattern.matcher(html); while (matcher.find()) { String[] _link = new String[]{"", matcher.group()}; _links.add(_link); } String[][] descriptionAndLinks = new String[_links.size()][2]; for (int i = 0; i < _links.size(); i++) { descriptionAndLinks[i] = _links.get(i); } return descriptionAndLinks; } } @Override public List<SpiderUrlInfo> getSpiderUrlInfoOldFamily( SpiderUrlInfo spiderUrlInfo) { List<SpiderUrlInfo> spiderUrlInfos = Lists.newArrayList(); String spiderUriInfoParentId = spiderUrlInfo.getParentId(); for (; ; ) { if (spiderUriInfoParentId.equals("0")) break; SpiderUrlInfo parentUrlInfo = spiderEvent .getSpiderUrlInfoById(spiderUriInfoParentId); if (parentUrlInfo == null) break; spiderUrlInfos.add(parentUrlInfo); spiderUriInfoParentId = parentUrlInfo.getParentId(); } return spiderUrlInfos; } @Override public boolean isEqu(JsoupExpress je1, JsoupExpress je2) { String[] js1 = new String[]{je1.getFilterDom(), je1.getSeekDom(), je1.getSelectAttr(), String.valueOf(je1.isHtml()), String.valueOf(je1.isRemoveStyle())}; String[] js2 = new String[]{je2.getFilterDom(), je2.getSeekDom(), je2.getSelectAttr(), String.valueOf(je2.isHtml()), String.valueOf(je2.isRemoveStyle())}; return ArrUtil.isEqu(js1, js2); } @Override public List<?> entityMapping(Class<?> entityType, SpiderResult spiderResult, SpiderProcessor spiderProcessorStrategy) { try { Map<Integer, Object> objectCache = Maps.newHashMap(); Map<String, Object[]> spiderResults = spiderResult .getSpiderResult(); Field[] fields = spiderProcessorStrategy.getClass().getDeclaredFields(); for (Field field : fields) { Annotation annotation = field .getAnnotation(SpiderKeyInject.class); if (annotation != null) { Class<?> clazz = ((SpiderKeyInject) annotation).clazz(); int sign = ((SpiderKeyInject) annotation).sign(); if (clazz.equals(entityType)) { if (!objectCache.containsKey(sign)) { objectCache.put(sign, Class.forName(entityType.getName()).newInstance()); } Object object = objectCache.get(sign); String key = ReflectUtil.getFieldValue(spiderProcessorStrategy, field.getName()); Object value = null; if ((spiderResults.get(key) == null || spiderResults.get(key).length == 0)) { if (((SpiderKeyInject) annotation).isNecessity()) { throw new Exception(key + " is not capture in the url:" + spiderResult.getUrl()); } else value = null; } else value = spiderResults.get(key)[0]; String keyMapping = ((SpiderKeyInject) annotation).mappingKey(); String valueMapping = ((SpiderKeyInject) annotation).mappingValue(); if (keyMapping.length() > 0) { ReflectUtil.setFieldValue(object, keyMapping, key); } if (valueMapping.length() > 0) { ReflectUtil.setFieldValue(object, valueMapping, value); } } } } return Lists.newArrayList(objectCache.values()); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } @Override public Map<String, Object[]> analyseOldNextCaptureInfos(String spiderUrlInfoId, int howOld) { SpiderUrlInfo spiderUrlInfo = spiderEvent.getSpiderUrlInfoById(spiderUrlInfoId); List<SpiderUrlInfo> oldFamily = getSpiderUrlInfoOldFamily(spiderUrlInfo); if (oldFamily != null && oldFamily.size() > 0) { int size = howOld - 1; SpiderUrlInfo spInfo = oldFamily.get(size); Map<String, Map<String, Object[]>> result = spInfo.getNextCaptureInfos(); return result.get(spiderUrlInfo.getUrl()); } return null; } @Override public Map<String, Object[]> analyseOldWebCaptureInfos( String spiderUrlInfoId, int howOld) { SpiderUrlInfo spiderUrlInfo = spiderEvent.getSpiderUrlInfoById(spiderUrlInfoId); List<SpiderUrlInfo> oldFamily = getSpiderUrlInfoOldFamily(spiderUrlInfo); if (oldFamily != null && oldFamily.size() > 0) { int size = howOld - 1; SpiderUrlInfo spInfo = oldFamily.get(size); Map<String, Object[]> result = spInfo.getCaptureInfos(); return result; } return null; } @Override public SpiderUrlInfo analyseOldSpiderUrlInfo(String spiderUrlInfoId, int howOld) { SpiderUrlInfo spiderUrlInfo = spiderEvent.getSpiderUrlInfoById(spiderUrlInfoId); List<SpiderUrlInfo> oldFamily = getSpiderUrlInfoOldFamily(spiderUrlInfo); if (oldFamily != null && oldFamily.size() > 0) { int size = howOld - 1; SpiderUrlInfo spInfo = oldFamily.get(size); return spInfo; } return null; } @Override public void completeImageUrl(Document document, String domain) { Elements elements = document.select("img"); if (elements != null && elements.size() > 0) for (Element element : elements) { String src = element.attr("src"); if (src.startsWith("/")) { element.attr("src", domain + src); } } } @Override public List<Wp_terms> generateDefaultTerms(Collection catOrTags) { if (catOrTags == null) return null; List<Wp_terms> wp_terms = Lists.newArrayList(); for (Object catOrTag : catOrTags) { wp_terms.add(new Wp_terms((String) catOrTag, null)); } return wp_terms; } }
/* * Copyright 2011-2013, by Vladimir Kostyukov and Contributors. * * This file is part of la4j project (http://la4j.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributor(s): Daniel Renshaw * Jakob Moellers * Maxim Samoylov * Miron Aseev * */ package org.la4j.vector; import java.io.Externalizable; import java.text.NumberFormat; import org.la4j.factory.Factory; import org.la4j.iterator.VectorIterator; import org.la4j.matrix.Matrix; import org.la4j.vector.dense.DenseVector; import org.la4j.vector.functor.VectorAccumulator; import org.la4j.vector.functor.VectorFunction; import org.la4j.vector.functor.VectorPredicate; import org.la4j.vector.functor.VectorProcedure; import org.la4j.operation.VectorMatrixOperation; import org.la4j.operation.VectorOperation; import org.la4j.operation.VectorVectorOperation; import org.la4j.vector.sparse.SparseVector; /** * The real vector interface. * * A vector represents an array of elements. It can be re-sized. */ public interface Vector extends Externalizable, Iterable<Double> { /** * Gets the specified element of this vector. * * @param i element's index * @return the element of this vector */ double get(int i); /** * Sets the specified element of this matrix to given {@code value}. * * @param i element's index * @param value element's new value */ void set(int i, double value); /** * Sets all elements of this vector to given {@code value}. * * @param value the element's new value */ void setAll(double value); /** * This method is deprecated. Use {@link Vector#setAll(double)} (double)} instead. * * Assigns all the elements of this vector to zero. */ @Deprecated void clear(); /** * This method is deprecated. Use {@link Factory#createConstantVector(int, double)} instead. * * Assigns all elements of this vector to given {@code value}. * * @param value the element's new value */ @Deprecated void assign(double value); /** * Returns the length of this vector. * * @return length of this vector */ int length(); /** * Adds given {@code value} (v) to this vector (X). * * @param value the right hand value for addition * * @return X + v */ Vector add(double value); /** * Adds given {@code value} (v) to this vector (X). * * @param value the right hand value for addition * @param factory the factory of result vector * * @return X + v */ @Deprecated Vector add(double value, Factory factory); /** * Adds given {@code vector} (X) to this vector (Y). * * @param that the right hand vector for addition * * @return X + Y */ Vector add(Vector that); /** * Adds given {@code vector} (X) to this vector (Y). * * @param that the right hand vector for addition * @param factory the factory of result vector * * @return X + Y */ @Deprecated Vector add(Vector that, Factory factory); /** * Multiplies this vector (X) by given {@code value} (v). * * @param value the right hand value for multiplication * * @return X * v */ Vector multiply(double value); /** * Multiplies this vector (X) by given {@code value} (v). * * @param value the right hand value for multiplication * @param factory the factory of result vector * * @return X * v */ @Deprecated Vector multiply(double value, Factory factory); /** * Calculates the Hadamard (element-wise) product of this vector and given {@code that}. * * @param that the right hand vector for Hadamard product * * @return the Hadamard product of two vectors */ Vector hadamardProduct(Vector that); /** * Calculates the Hadamard (element-wise) product of this vector and given {@code that}. * * @param that the right hand vector for Hadamard product * @param factory the factory of result vector * * @return the Hadamard product of two vectors */ @Deprecated Vector hadamardProduct(Vector that, Factory factory); /** * Multiples this vector (X) by given {@code that} (A). * * @param that the right hand matrix for multiplication * * @return X * A */ Vector multiply(Matrix that); /** * Multiples this vector (X) by given {@code that} (A). * * @param that the right hand matrix for multiplication * @param factory the factory of result vector * * @return X * A */ @Deprecated Vector multiply(Matrix that, Factory factory); /** * Subtracts given {@code value} (v) from this vector (X). * * @param value the right hand value for subtraction * * @return X - v */ Vector subtract(double value); /** * Subtracts given {@code value} (v) from this vector (X). * * @param value the right hand value for subtraction * @param factory the factory of result vector * * @return X - v */ @Deprecated Vector subtract(double value, Factory factory); /** * Subtracts given {@code that} (Y) from this vector (X). * * @param that the right hand vector for subtraction * * @return X - Y */ Vector subtract(Vector that); /** * Subtracts given {@code vector} (Y) from this vector (X). * * @param that the right hand vector for subtraction * @param factory the factory of result vector * * @return X - Y */ @Deprecated Vector subtract(Vector that, Factory factory); /** * Divides this vector (X) by given {@code value} (v). * * @param value the right hand value for division * * @return X / v */ Vector divide(double value); /** * Divides this vector (X) by given {@code value} (v). * * @param value the right hand value for division * @param factory the factory of result vector * * @return X / v */ @Deprecated Vector divide(double value, Factory factory); /** * Multiplies up all elements of this vector. * * @return product of all elements of this vector */ double product(); /** * Summarizes all elements of the vector * * @return sum of all elements of the vector */ double sum(); /** * Calculates the inner product of this vector and given {@code that}. * * @param that the right hand vector for inner product * * @return the inner product of two vectors */ double innerProduct(Vector that); /** * Calculates the outer product of this vector and given {@code that}. * * @param that the the right hand vector for outer product * * @return the outer product of two vectors */ Matrix outerProduct(Vector that); /** * Calculates the outer product of this vector and given {@code that}. * * @param that the the right hand vector for outer product * @param factory the factory of result vector * * @return the outer product of two vectors */ @Deprecated Matrix outerProduct(Vector that, Factory factory); /** * Calculates an Euclidean norm of this vector. * * @return an Euclidean norm */ double norm(); /** * Calculates an Euclidean norm of this vector. * * @return an Euclidean norm */ double euclideanNorm(); /** * Calculates a Manhattan norm of this vector. * * @return a Manhattan norm */ double manhattanNorm(); /** * Calculates an Infinity norm of this vector. * * @return an Infinity norm */ double infinityNorm(); /** * This method is deprecated. Use {@link Vector} instead. * * Swaps the specified elements of this vector. * * @param i element's index * @param j element's index */ @Deprecated void swap(int i, int j); /** * Swaps the specified elements of this vector. * * @param i element's index * @param j element's index */ void swapElements(int i, int j); /** * Creates a blank (an empty vector with same length) copy of this vector. * * @return blank vector */ Vector blank(); /** * Creates a blank (an empty vector) copy of this vector with the given * {@code length}. * * @param length the length of the blank vector * * @return blank vector */ Vector blankOfLength(int length); /** * Creates a blank (an empty vector with same length) copy of this vector. * * @param factory the factory of result vector * * @return blank vector */ @Deprecated Vector blank(Factory factory); /** * Copies this vector. * * @return the copy of this vector */ Vector copy(); /** * Copies this vector. * * @param factory the factory of result vector * * @return the copy of this vector */ @Deprecated Vector copy(Factory factory); /** * This method is deprecated, use {@link Vector#copyOfLength(int)} instead. * * Copies this vector into the new vector with specified {@code length}. * * @param length the length of new vector * * @return the copy of this vector with new length */ @Deprecated Vector resize(int length); /** * This method is deprecated, use {@link Vector#copyOfLength(int)} instead. * * Copies this vector into the new vector with specified {@code length}. * * @param length the length of new vector * @param factory the factory of result vector * * @return the copy of this vector with new length */ @Deprecated Vector resize(int length, Factory factory); /** * Copies this vector into the new vector with specified {@code length}. * * @param length the length of new vector * * @return the copy of this vector with new length */ Vector copyOfLength(int length); /** * Shuffles this vector. * * <p> * Copies this vector in the new vector that contains the same elements but with * the elements shuffled around (which might also result in the same vector * (all outcomes are equally probable)). * </p> * * @return the shuffled vector */ Vector shuffle(); /** * Shuffles this vector. * * Copies this vector in the new vector that contains the same elements but with * the elements shuffled around (which might also result in the same vector, * since all outcomes are equally probable). * * @param factory the factory of result vector * * @return the shuffled vector */ @Deprecated Vector shuffle(Factory factory); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. * * @param from the beginning of indices interval * @param until the ending of indices interval * * @return the sub-vector of this vector */ Vector slice(int from, int until); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. * * @param from the beginning of indices interval * @param until the ending of indices interval * @param factory the factory of result vector * * @return the sub-vector of this vector */ @Deprecated Vector slice(int from, int until, Factory factory); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. The left point of interval is fixed to zero. * * @param until the ending of indices interval * * @return the sub-vector of this vector */ Vector sliceLeft(int until); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. The left point of interval is fixed to zero. * * @param until the ending of indices interval * @param factory the factory of result vector * * @return the sub-vector of this vector */ @Deprecated Vector sliceLeft(int until, Factory factory); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. The right point of interval is fixed to vector's length. * * @param from the beginning of indices interval * * @return the sub-vector of this vector */ Vector sliceRight(int from); /** * Retrieves the specified sub-vector of this vector. The sub-vector is specified by * interval of indices. The right point of interval is fixed to vector's length. * * @param from the beginning of indices interval * @param factory the factory of result vector * * @return the sub-vector of this vector */ @Deprecated Vector sliceRight(int from, Factory factory); /** * Returns a new vector with the selected elements. * * @param indices the array of indices * * @return the new vector with the selected elements */ Vector select(int[] indices); /** * Returns a new vector with the selected elements. * * @param indices the array of indices * @param factory the factory of the result vector * * @return the new vector with the selected elements */ @Deprecated Vector select(int[] indices, Factory factory); /** * Applies given {@code procedure} to each element of this vector. * * @param procedure the vector procedure */ void each(VectorProcedure procedure); /** * Searches for the maximum value of the elements of this vector. * * @return the maximum value of this vector */ double max(); /** * Searches for the minimum value of the elements of this vector. * * @return the minimum value of this vector */ double min(); /** * Builds a new vector by applying given {@code function} to each element * of this vector. * * @param function the vector function * * @return the transformed vector */ Vector transform(VectorFunction function); /** * Builds a new vector by applying given {@code function} to each element * of this vector. * * @param function the vector function * @param factory the factory of result vector * * @return the transformed vector */ @Deprecated Vector transform(VectorFunction function, Factory factory); /** * Updates all elements of this vector by applying given {@code function}. * * @param function the the vector function */ void update(VectorFunction function); /** * Updates the specified element of this vector by applying given {@code function}. * * <p/> * * This method is deprecated. Use {@link Vector#updateAt(int, VectorFunction)} * instead. * * @param i element's index * @param function the vector function */ @Deprecated void update(int i, VectorFunction function); /** * Updates the specified element of this vector by applying given {@code function}. * * @param i element's index * @param function the vector function */ void updateAt(int i, VectorFunction function); /** * Folds all elements of this vector with given {@code accumulator}. * * @param accumulator the vector accumulator * * @return the accumulated value */ double fold(VectorAccumulator accumulator); /** * Checks whether this vector compiles with given {@code predicate} or not. * * @param predicate the vector predicate * * @return whether this vector compiles with predicate */ boolean is(VectorPredicate predicate); /** * Checks whether this vector compiles with given {@code predicate} or not. * * @param predicate the vector predicate * * @return whether this vector compiles with predicate */ boolean non(VectorPredicate predicate); /** * Converts this vector to matrix with only one row. * * @return the row matrix */ Matrix toRowMatrix(); /** * Converts this vector to matrix with only one row. * * @param factory the factory of result matrix * * @return the row matrix */ @Deprecated Matrix toRowMatrix(Factory factory); /** * Converts this vector to matrix with only one column. * * @return the column matrix */ Matrix toColumnMatrix(); /** * Converts this vector to matrix with only one column. * * @param factory the factory of result matrix * * @return the column matrix */ @Deprecated Matrix toColumnMatrix(Factory factory); /** * Converts this vector to a diagonal matrix. * * @return a diagonal matrix */ Matrix toDiagonalMatrix(); /** * Returns true when vector is equal to given {@code that} vector with given {@code precision}. * * @param that vector * @param precision given precision * * @return equals of this matrix to that */ public boolean equals(Vector that, double precision); /** * Converts this vector into the string representation. * * @param formatter the number formatter * * @return the vector converted to a string */ String mkString(NumberFormat formatter); /** * Converts this vector into the string representation. * * @param formatter the number formatter * @param delimiter the element's delimiter * * @return the vector converted to a string */ String mkString(NumberFormat formatter, String delimiter); /** * Returns a vector iterator. * * @return a vector iterator. */ @Override VectorIterator iterator(); /** * Pipes this vector to a given {@code operation}. * * @param operation the vector operation * (an operation that take vector and returns {@code T}) * @param <T> the result type * * @return the result of an operation applied to this vector */ <T> T apply(VectorOperation<T> operation); /** * Pipes this vector to a given {@code operation}. * * @param operation the vector-vector operation * (an operation that takes two vectors and returns {@code T}) * @param <T> the result type * @param that the right hand vector for the given operation * * @return the result of an operation applied to this and {@code that} vector */ <T> T apply(VectorVectorOperation<T> operation, Vector that); /** * Pipes this vector to a given {@code operation}. * * @param operation the vector-matrix operation * (an operation that takes vector and matrix and returns {@code T}) * @param <T> the result type * @param that the right hand matrix for the given operation * * @return the result of an operation applied to this vector and {@code that} matrix */ <T> T apply(VectorMatrixOperation<T> operation, Matrix that); /** * @return the factory of this vector */ @Deprecated Factory factory(); /** * Converts this vector using the given {@code factory}. * * @param factory the factory that creates an output vector * @param <T> type of the result vector * * @return a converted vector */ <T extends Vector> T to(VectorFactory<T> factory); /** * Converts this vector into a {@link org.la4j.vector.dense.DenseVector}. * * @return a dense vector */ DenseVector toDenseVector(); /** * Converts this vector into a {@link org.la4j.vector.sparse.SparseVector}. * * @return a sparse vector */ SparseVector toSparseVector(); }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.ssomgt.service; import com.liferay.portal.service.InvokableLocalService; /** * @author trungdk * @generated */ public class AppRole2EmployeeLocalServiceClp implements AppRole2EmployeeLocalService { public AppRole2EmployeeLocalServiceClp( InvokableLocalService invokableLocalService) { _invokableLocalService = invokableLocalService; _methodName0 = "addAppRole2Employee"; _methodParameterTypes0 = new String[] { "org.oep.ssomgt.model.AppRole2Employee" }; _methodName1 = "createAppRole2Employee"; _methodParameterTypes1 = new String[] { "long" }; _methodName2 = "deleteAppRole2Employee"; _methodParameterTypes2 = new String[] { "long" }; _methodName3 = "deleteAppRole2Employee"; _methodParameterTypes3 = new String[] { "org.oep.ssomgt.model.AppRole2Employee" }; _methodName4 = "dynamicQuery"; _methodParameterTypes4 = new String[] { }; _methodName5 = "dynamicQuery"; _methodParameterTypes5 = new String[] { "com.liferay.portal.kernel.dao.orm.DynamicQuery" }; _methodName6 = "dynamicQuery"; _methodParameterTypes6 = new String[] { "com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int" }; _methodName7 = "dynamicQuery"; _methodParameterTypes7 = new String[] { "com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int", "com.liferay.portal.kernel.util.OrderByComparator" }; _methodName8 = "dynamicQueryCount"; _methodParameterTypes8 = new String[] { "com.liferay.portal.kernel.dao.orm.DynamicQuery" }; _methodName9 = "dynamicQueryCount"; _methodParameterTypes9 = new String[] { "com.liferay.portal.kernel.dao.orm.DynamicQuery", "com.liferay.portal.kernel.dao.orm.Projection" }; _methodName10 = "fetchAppRole2Employee"; _methodParameterTypes10 = new String[] { "long" }; _methodName11 = "getAppRole2Employee"; _methodParameterTypes11 = new String[] { "long" }; _methodName12 = "getPersistedModel"; _methodParameterTypes12 = new String[] { "java.io.Serializable" }; _methodName13 = "getAppRole2Employees"; _methodParameterTypes13 = new String[] { "int", "int" }; _methodName14 = "getAppRole2EmployeesCount"; _methodParameterTypes14 = new String[] { }; _methodName15 = "updateAppRole2Employee"; _methodParameterTypes15 = new String[] { "org.oep.ssomgt.model.AppRole2Employee" }; _methodName16 = "getBeanIdentifier"; _methodParameterTypes16 = new String[] { }; _methodName17 = "setBeanIdentifier"; _methodParameterTypes17 = new String[] { "java.lang.String" }; _methodName19 = "addAppRole2Employee"; _methodParameterTypes19 = new String[] { "long", "long", "com.liferay.portal.service.ServiceContext" }; _methodName20 = "updateAppRole2Employee"; _methodParameterTypes20 = new String[] { "long", "long", "long", "com.liferay.portal.service.ServiceContext" }; _methodName21 = "updateAppRole2Employee"; _methodParameterTypes21 = new String[] { "org.oep.ssomgt.model.AppRole2Employee", "com.liferay.portal.service.ServiceContext" }; _methodName22 = "removeAppRole2Employee"; _methodParameterTypes22 = new String[] { "org.oep.ssomgt.model.AppRole2Employee" }; _methodName23 = "removeAppRole2Employee"; _methodParameterTypes23 = new String[] { "long" }; _methodName24 = "countByWorkingUnit"; _methodParameterTypes24 = new String[] { "long", "com.liferay.portal.service.ServiceContext" }; _methodName25 = "findByWorkingUnit"; _methodParameterTypes25 = new String[] { "long", "int", "int", "com.liferay.portal.service.ServiceContext" }; _methodName26 = "countByAppRoleWorkingUnit"; _methodParameterTypes26 = new String[] { "long", "long", "com.liferay.portal.service.ServiceContext" }; _methodName27 = "findByAppRoleWorkingUnit"; _methodParameterTypes27 = new String[] { "long", "long", "int", "int", "com.liferay.portal.service.ServiceContext" }; _methodName28 = "findRoleByEmployee"; _methodParameterTypes28 = new String[] { "long", "long" }; _methodName29 = "findByArrayOfAppRole"; _methodParameterTypes29 = new String[] { "long[][]", "com.liferay.portal.service.ServiceContext" }; } @Override public org.oep.ssomgt.model.AppRole2Employee addAppRole2Employee( org.oep.ssomgt.model.AppRole2Employee appRole2Employee) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName0, _methodParameterTypes0, new Object[] { ClpSerializer.translateInput( appRole2Employee) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee createAppRole2Employee( long appRole2EmployeeId) { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName1, _methodParameterTypes1, new Object[] { appRole2EmployeeId }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee deleteAppRole2Employee( long appRole2EmployeeId) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName2, _methodParameterTypes2, new Object[] { appRole2EmployeeId }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee deleteAppRole2Employee( org.oep.ssomgt.model.AppRole2Employee appRole2Employee) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName3, _methodParameterTypes3, new Object[] { ClpSerializer.translateInput( appRole2Employee) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery() { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName4, _methodParameterTypes4, new Object[] { }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.portal.kernel.dao.orm.DynamicQuery)ClpSerializer.translateOutput(returnObj); } @Override @SuppressWarnings("rawtypes") public java.util.List dynamicQuery( com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName5, _methodParameterTypes5, new Object[] { ClpSerializer.translateInput(dynamicQuery) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List)ClpSerializer.translateOutput(returnObj); } @Override @SuppressWarnings("rawtypes") public java.util.List dynamicQuery( com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start, int end) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName6, _methodParameterTypes6, new Object[] { ClpSerializer.translateInput(dynamicQuery), start, end }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List)ClpSerializer.translateOutput(returnObj); } @Override @SuppressWarnings("rawtypes") public java.util.List dynamicQuery( com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start, int end, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName7, _methodParameterTypes7, new Object[] { ClpSerializer.translateInput(dynamicQuery), start, end, ClpSerializer.translateInput(orderByComparator) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List)ClpSerializer.translateOutput(returnObj); } @Override public long dynamicQueryCount( com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName8, _methodParameterTypes8, new Object[] { ClpSerializer.translateInput(dynamicQuery) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Long)returnObj).longValue(); } @Override public long dynamicQueryCount( com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, com.liferay.portal.kernel.dao.orm.Projection projection) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName9, _methodParameterTypes9, new Object[] { ClpSerializer.translateInput(dynamicQuery), ClpSerializer.translateInput(projection) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Long)returnObj).longValue(); } @Override public org.oep.ssomgt.model.AppRole2Employee fetchAppRole2Employee( long appRole2EmployeeId) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName10, _methodParameterTypes10, new Object[] { appRole2EmployeeId }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee getAppRole2Employee( long appRole2EmployeeId) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName11, _methodParameterTypes11, new Object[] { appRole2EmployeeId }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public com.liferay.portal.model.PersistedModel getPersistedModel( java.io.Serializable primaryKeyObj) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName12, _methodParameterTypes12, new Object[] { ClpSerializer.translateInput(primaryKeyObj) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (com.liferay.portal.model.PersistedModel)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<org.oep.ssomgt.model.AppRole2Employee> getAppRole2Employees( int start, int end) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName13, _methodParameterTypes13, new Object[] { start, end }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<org.oep.ssomgt.model.AppRole2Employee>)ClpSerializer.translateOutput(returnObj); } @Override public int getAppRole2EmployeesCount() throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName14, _methodParameterTypes14, new Object[] { }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public org.oep.ssomgt.model.AppRole2Employee updateAppRole2Employee( org.oep.ssomgt.model.AppRole2Employee appRole2Employee) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName15, _methodParameterTypes15, new Object[] { ClpSerializer.translateInput( appRole2Employee) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public java.lang.String getBeanIdentifier() { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName16, _methodParameterTypes16, new Object[] { }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.lang.String)ClpSerializer.translateOutput(returnObj); } @Override public void setBeanIdentifier(java.lang.String beanIdentifier) { try { _invokableLocalService.invokeMethod(_methodName17, _methodParameterTypes17, new Object[] { ClpSerializer.translateInput(beanIdentifier) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public java.lang.Object invokeMethod(java.lang.String name, java.lang.String[] parameterTypes, java.lang.Object[] arguments) throws java.lang.Throwable { throw new UnsupportedOperationException(); } @Override public org.oep.ssomgt.model.AppRole2Employee addAppRole2Employee( long appRoleId, long employeeId, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName19, _methodParameterTypes19, new Object[] { appRoleId, employeeId, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee updateAppRole2Employee( long id, long appRoleId, long employeeId, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName20, _methodParameterTypes20, new Object[] { id, appRoleId, employeeId, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public org.oep.ssomgt.model.AppRole2Employee updateAppRole2Employee( org.oep.ssomgt.model.AppRole2Employee appRole2Employee, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName21, _methodParameterTypes21, new Object[] { ClpSerializer.translateInput(appRole2Employee), ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (org.oep.ssomgt.model.AppRole2Employee)ClpSerializer.translateOutput(returnObj); } @Override public void removeAppRole2Employee( org.oep.ssomgt.model.AppRole2Employee appRole2Employee) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableLocalService.invokeMethod(_methodName22, _methodParameterTypes22, new Object[] { ClpSerializer.translateInput(appRole2Employee) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public void removeAppRole2Employee(long id) throws com.liferay.portal.kernel.exception.PortalException, com.liferay.portal.kernel.exception.SystemException { try { _invokableLocalService.invokeMethod(_methodName23, _methodParameterTypes23, new Object[] { id }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.PortalException) { throw (com.liferay.portal.kernel.exception.PortalException)t; } if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } } @Override public int countByWorkingUnit(long workingUnitId, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName24, _methodParameterTypes24, new Object[] { workingUnitId, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public java.util.List<org.oep.ssomgt.model.AppRole2Employee> findByWorkingUnit( long workingUnitId, int startIndex, int endIndex, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName25, _methodParameterTypes25, new Object[] { workingUnitId, startIndex, endIndex, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<org.oep.ssomgt.model.AppRole2Employee>)ClpSerializer.translateOutput(returnObj); } @Override public int countByAppRoleWorkingUnit(long appRoleId, long workingUnitId, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName26, _methodParameterTypes26, new Object[] { appRoleId, workingUnitId, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return ((Integer)returnObj).intValue(); } @Override public java.util.List<org.oep.ssomgt.model.AppRole2Employee> findByAppRoleWorkingUnit( long appRoleId, long workingUnitId, int startIndex, int endIndex, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName27, _methodParameterTypes27, new Object[] { appRoleId, workingUnitId, startIndex, endIndex, ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<org.oep.ssomgt.model.AppRole2Employee>)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<org.oep.ssomgt.model.AppRole> findRoleByEmployee( long employeeId, long companyId) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName28, _methodParameterTypes28, new Object[] { employeeId, companyId }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<org.oep.ssomgt.model.AppRole>)ClpSerializer.translateOutput(returnObj); } @Override public java.util.List<org.oep.ssomgt.model.AppRole2Employee> findByArrayOfAppRole( long[] appRoleIds, com.liferay.portal.service.ServiceContext serviceContext) throws com.liferay.portal.kernel.exception.SystemException { Object returnObj = null; try { returnObj = _invokableLocalService.invokeMethod(_methodName29, _methodParameterTypes29, new Object[] { ClpSerializer.translateInput(appRoleIds), ClpSerializer.translateInput(serviceContext) }); } catch (Throwable t) { t = ClpSerializer.translateThrowable(t); if (t instanceof com.liferay.portal.kernel.exception.SystemException) { throw (com.liferay.portal.kernel.exception.SystemException)t; } if (t instanceof RuntimeException) { throw (RuntimeException)t; } else { throw new RuntimeException(t.getClass().getName() + " is not a valid exception"); } } return (java.util.List<org.oep.ssomgt.model.AppRole2Employee>)ClpSerializer.translateOutput(returnObj); } private InvokableLocalService _invokableLocalService; private String _methodName0; private String[] _methodParameterTypes0; private String _methodName1; private String[] _methodParameterTypes1; private String _methodName2; private String[] _methodParameterTypes2; private String _methodName3; private String[] _methodParameterTypes3; private String _methodName4; private String[] _methodParameterTypes4; private String _methodName5; private String[] _methodParameterTypes5; private String _methodName6; private String[] _methodParameterTypes6; private String _methodName7; private String[] _methodParameterTypes7; private String _methodName8; private String[] _methodParameterTypes8; private String _methodName9; private String[] _methodParameterTypes9; private String _methodName10; private String[] _methodParameterTypes10; private String _methodName11; private String[] _methodParameterTypes11; private String _methodName12; private String[] _methodParameterTypes12; private String _methodName13; private String[] _methodParameterTypes13; private String _methodName14; private String[] _methodParameterTypes14; private String _methodName15; private String[] _methodParameterTypes15; private String _methodName16; private String[] _methodParameterTypes16; private String _methodName17; private String[] _methodParameterTypes17; private String _methodName19; private String[] _methodParameterTypes19; private String _methodName20; private String[] _methodParameterTypes20; private String _methodName21; private String[] _methodParameterTypes21; private String _methodName22; private String[] _methodParameterTypes22; private String _methodName23; private String[] _methodParameterTypes23; private String _methodName24; private String[] _methodParameterTypes24; private String _methodName25; private String[] _methodParameterTypes25; private String _methodName26; private String[] _methodParameterTypes26; private String _methodName27; private String[] _methodParameterTypes27; private String _methodName28; private String[] _methodParameterTypes28; private String _methodName29; private String[] _methodParameterTypes29; }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codedeploy.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Represents the input of an update deployment group operation. * </p> */ public class UpdateDeploymentGroupRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The application name corresponding to the deployment group to update. * </p> */ private String applicationName; /** * <p> * The current name of the existing deployment group. * </p> */ private String currentDeploymentGroupName; /** * <p> * The new name of the deployment group, if you want to change it. * </p> */ private String newDeploymentGroupName; /** * <p> * The replacement deployment configuration name to use, if you want to * change it. * </p> */ private String deploymentConfigName; /** * <p> * The replacement set of Amazon EC2 tags to filter on, if you want to * change them. * </p> */ private com.amazonaws.internal.SdkInternalList<EC2TagFilter> ec2TagFilters; /** * <p> * The replacement set of on-premises instance tags for filter on, if you * want to change them. * </p> */ private com.amazonaws.internal.SdkInternalList<TagFilter> onPremisesInstanceTagFilters; /** * <p> * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * </p> */ private com.amazonaws.internal.SdkInternalList<String> autoScalingGroups; /** * <p> * A replacement service role's ARN, if you want to change it. * </p> */ private String serviceRoleArn; /** * <p> * The application name corresponding to the deployment group to update. * </p> * * @param applicationName * The application name corresponding to the deployment group to * update. */ public void setApplicationName(String applicationName) { this.applicationName = applicationName; } /** * <p> * The application name corresponding to the deployment group to update. * </p> * * @return The application name corresponding to the deployment group to * update. */ public String getApplicationName() { return this.applicationName; } /** * <p> * The application name corresponding to the deployment group to update. * </p> * * @param applicationName * The application name corresponding to the deployment group to * update. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withApplicationName( String applicationName) { setApplicationName(applicationName); return this; } /** * <p> * The current name of the existing deployment group. * </p> * * @param currentDeploymentGroupName * The current name of the existing deployment group. */ public void setCurrentDeploymentGroupName(String currentDeploymentGroupName) { this.currentDeploymentGroupName = currentDeploymentGroupName; } /** * <p> * The current name of the existing deployment group. * </p> * * @return The current name of the existing deployment group. */ public String getCurrentDeploymentGroupName() { return this.currentDeploymentGroupName; } /** * <p> * The current name of the existing deployment group. * </p> * * @param currentDeploymentGroupName * The current name of the existing deployment group. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withCurrentDeploymentGroupName( String currentDeploymentGroupName) { setCurrentDeploymentGroupName(currentDeploymentGroupName); return this; } /** * <p> * The new name of the deployment group, if you want to change it. * </p> * * @param newDeploymentGroupName * The new name of the deployment group, if you want to change it. */ public void setNewDeploymentGroupName(String newDeploymentGroupName) { this.newDeploymentGroupName = newDeploymentGroupName; } /** * <p> * The new name of the deployment group, if you want to change it. * </p> * * @return The new name of the deployment group, if you want to change it. */ public String getNewDeploymentGroupName() { return this.newDeploymentGroupName; } /** * <p> * The new name of the deployment group, if you want to change it. * </p> * * @param newDeploymentGroupName * The new name of the deployment group, if you want to change it. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withNewDeploymentGroupName( String newDeploymentGroupName) { setNewDeploymentGroupName(newDeploymentGroupName); return this; } /** * <p> * The replacement deployment configuration name to use, if you want to * change it. * </p> * * @param deploymentConfigName * The replacement deployment configuration name to use, if you want * to change it. */ public void setDeploymentConfigName(String deploymentConfigName) { this.deploymentConfigName = deploymentConfigName; } /** * <p> * The replacement deployment configuration name to use, if you want to * change it. * </p> * * @return The replacement deployment configuration name to use, if you want * to change it. */ public String getDeploymentConfigName() { return this.deploymentConfigName; } /** * <p> * The replacement deployment configuration name to use, if you want to * change it. * </p> * * @param deploymentConfigName * The replacement deployment configuration name to use, if you want * to change it. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withDeploymentConfigName( String deploymentConfigName) { setDeploymentConfigName(deploymentConfigName); return this; } /** * <p> * The replacement set of Amazon EC2 tags to filter on, if you want to * change them. * </p> * * @return The replacement set of Amazon EC2 tags to filter on, if you want * to change them. */ public java.util.List<EC2TagFilter> getEc2TagFilters() { if (ec2TagFilters == null) { ec2TagFilters = new com.amazonaws.internal.SdkInternalList<EC2TagFilter>(); } return ec2TagFilters; } /** * <p> * The replacement set of Amazon EC2 tags to filter on, if you want to * change them. * </p> * * @param ec2TagFilters * The replacement set of Amazon EC2 tags to filter on, if you want * to change them. */ public void setEc2TagFilters( java.util.Collection<EC2TagFilter> ec2TagFilters) { if (ec2TagFilters == null) { this.ec2TagFilters = null; return; } this.ec2TagFilters = new com.amazonaws.internal.SdkInternalList<EC2TagFilter>( ec2TagFilters); } /** * <p> * The replacement set of Amazon EC2 tags to filter on, if you want to * change them. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setEc2TagFilters(java.util.Collection)} or * {@link #withEc2TagFilters(java.util.Collection)} if you want to override * the existing values. * </p> * * @param ec2TagFilters * The replacement set of Amazon EC2 tags to filter on, if you want * to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withEc2TagFilters( EC2TagFilter... ec2TagFilters) { if (this.ec2TagFilters == null) { setEc2TagFilters(new com.amazonaws.internal.SdkInternalList<EC2TagFilter>( ec2TagFilters.length)); } for (EC2TagFilter ele : ec2TagFilters) { this.ec2TagFilters.add(ele); } return this; } /** * <p> * The replacement set of Amazon EC2 tags to filter on, if you want to * change them. * </p> * * @param ec2TagFilters * The replacement set of Amazon EC2 tags to filter on, if you want * to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withEc2TagFilters( java.util.Collection<EC2TagFilter> ec2TagFilters) { setEc2TagFilters(ec2TagFilters); return this; } /** * <p> * The replacement set of on-premises instance tags for filter on, if you * want to change them. * </p> * * @return The replacement set of on-premises instance tags for filter on, * if you want to change them. */ public java.util.List<TagFilter> getOnPremisesInstanceTagFilters() { if (onPremisesInstanceTagFilters == null) { onPremisesInstanceTagFilters = new com.amazonaws.internal.SdkInternalList<TagFilter>(); } return onPremisesInstanceTagFilters; } /** * <p> * The replacement set of on-premises instance tags for filter on, if you * want to change them. * </p> * * @param onPremisesInstanceTagFilters * The replacement set of on-premises instance tags for filter on, if * you want to change them. */ public void setOnPremisesInstanceTagFilters( java.util.Collection<TagFilter> onPremisesInstanceTagFilters) { if (onPremisesInstanceTagFilters == null) { this.onPremisesInstanceTagFilters = null; return; } this.onPremisesInstanceTagFilters = new com.amazonaws.internal.SdkInternalList<TagFilter>( onPremisesInstanceTagFilters); } /** * <p> * The replacement set of on-premises instance tags for filter on, if you * want to change them. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setOnPremisesInstanceTagFilters(java.util.Collection)} * or {@link #withOnPremisesInstanceTagFilters(java.util.Collection)} if you * want to override the existing values. * </p> * * @param onPremisesInstanceTagFilters * The replacement set of on-premises instance tags for filter on, if * you want to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withOnPremisesInstanceTagFilters( TagFilter... onPremisesInstanceTagFilters) { if (this.onPremisesInstanceTagFilters == null) { setOnPremisesInstanceTagFilters(new com.amazonaws.internal.SdkInternalList<TagFilter>( onPremisesInstanceTagFilters.length)); } for (TagFilter ele : onPremisesInstanceTagFilters) { this.onPremisesInstanceTagFilters.add(ele); } return this; } /** * <p> * The replacement set of on-premises instance tags for filter on, if you * want to change them. * </p> * * @param onPremisesInstanceTagFilters * The replacement set of on-premises instance tags for filter on, if * you want to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withOnPremisesInstanceTagFilters( java.util.Collection<TagFilter> onPremisesInstanceTagFilters) { setOnPremisesInstanceTagFilters(onPremisesInstanceTagFilters); return this; } /** * <p> * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * </p> * * @return The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. */ public java.util.List<String> getAutoScalingGroups() { if (autoScalingGroups == null) { autoScalingGroups = new com.amazonaws.internal.SdkInternalList<String>(); } return autoScalingGroups; } /** * <p> * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * </p> * * @param autoScalingGroups * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. */ public void setAutoScalingGroups( java.util.Collection<String> autoScalingGroups) { if (autoScalingGroups == null) { this.autoScalingGroups = null; return; } this.autoScalingGroups = new com.amazonaws.internal.SdkInternalList<String>( autoScalingGroups); } /** * <p> * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setAutoScalingGroups(java.util.Collection)} or * {@link #withAutoScalingGroups(java.util.Collection)} if you want to * override the existing values. * </p> * * @param autoScalingGroups * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withAutoScalingGroups( String... autoScalingGroups) { if (this.autoScalingGroups == null) { setAutoScalingGroups(new com.amazonaws.internal.SdkInternalList<String>( autoScalingGroups.length)); } for (String ele : autoScalingGroups) { this.autoScalingGroups.add(ele); } return this; } /** * <p> * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * </p> * * @param autoScalingGroups * The replacement list of Auto Scaling groups to be included in the * deployment group, if you want to change them. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withAutoScalingGroups( java.util.Collection<String> autoScalingGroups) { setAutoScalingGroups(autoScalingGroups); return this; } /** * <p> * A replacement service role's ARN, if you want to change it. * </p> * * @param serviceRoleArn * A replacement service role's ARN, if you want to change it. */ public void setServiceRoleArn(String serviceRoleArn) { this.serviceRoleArn = serviceRoleArn; } /** * <p> * A replacement service role's ARN, if you want to change it. * </p> * * @return A replacement service role's ARN, if you want to change it. */ public String getServiceRoleArn() { return this.serviceRoleArn; } /** * <p> * A replacement service role's ARN, if you want to change it. * </p> * * @param serviceRoleArn * A replacement service role's ARN, if you want to change it. * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateDeploymentGroupRequest withServiceRoleArn(String serviceRoleArn) { setServiceRoleArn(serviceRoleArn); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getApplicationName() != null) sb.append("ApplicationName: " + getApplicationName() + ","); if (getCurrentDeploymentGroupName() != null) sb.append("CurrentDeploymentGroupName: " + getCurrentDeploymentGroupName() + ","); if (getNewDeploymentGroupName() != null) sb.append("NewDeploymentGroupName: " + getNewDeploymentGroupName() + ","); if (getDeploymentConfigName() != null) sb.append("DeploymentConfigName: " + getDeploymentConfigName() + ","); if (getEc2TagFilters() != null) sb.append("Ec2TagFilters: " + getEc2TagFilters() + ","); if (getOnPremisesInstanceTagFilters() != null) sb.append("OnPremisesInstanceTagFilters: " + getOnPremisesInstanceTagFilters() + ","); if (getAutoScalingGroups() != null) sb.append("AutoScalingGroups: " + getAutoScalingGroups() + ","); if (getServiceRoleArn() != null) sb.append("ServiceRoleArn: " + getServiceRoleArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateDeploymentGroupRequest == false) return false; UpdateDeploymentGroupRequest other = (UpdateDeploymentGroupRequest) obj; if (other.getApplicationName() == null ^ this.getApplicationName() == null) return false; if (other.getApplicationName() != null && other.getApplicationName().equals(this.getApplicationName()) == false) return false; if (other.getCurrentDeploymentGroupName() == null ^ this.getCurrentDeploymentGroupName() == null) return false; if (other.getCurrentDeploymentGroupName() != null && other.getCurrentDeploymentGroupName().equals( this.getCurrentDeploymentGroupName()) == false) return false; if (other.getNewDeploymentGroupName() == null ^ this.getNewDeploymentGroupName() == null) return false; if (other.getNewDeploymentGroupName() != null && other.getNewDeploymentGroupName().equals( this.getNewDeploymentGroupName()) == false) return false; if (other.getDeploymentConfigName() == null ^ this.getDeploymentConfigName() == null) return false; if (other.getDeploymentConfigName() != null && other.getDeploymentConfigName().equals( this.getDeploymentConfigName()) == false) return false; if (other.getEc2TagFilters() == null ^ this.getEc2TagFilters() == null) return false; if (other.getEc2TagFilters() != null && other.getEc2TagFilters().equals(this.getEc2TagFilters()) == false) return false; if (other.getOnPremisesInstanceTagFilters() == null ^ this.getOnPremisesInstanceTagFilters() == null) return false; if (other.getOnPremisesInstanceTagFilters() != null && other.getOnPremisesInstanceTagFilters().equals( this.getOnPremisesInstanceTagFilters()) == false) return false; if (other.getAutoScalingGroups() == null ^ this.getAutoScalingGroups() == null) return false; if (other.getAutoScalingGroups() != null && other.getAutoScalingGroups().equals( this.getAutoScalingGroups()) == false) return false; if (other.getServiceRoleArn() == null ^ this.getServiceRoleArn() == null) return false; if (other.getServiceRoleArn() != null && other.getServiceRoleArn().equals(this.getServiceRoleArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getApplicationName() == null) ? 0 : getApplicationName() .hashCode()); hashCode = prime * hashCode + ((getCurrentDeploymentGroupName() == null) ? 0 : getCurrentDeploymentGroupName().hashCode()); hashCode = prime * hashCode + ((getNewDeploymentGroupName() == null) ? 0 : getNewDeploymentGroupName().hashCode()); hashCode = prime * hashCode + ((getDeploymentConfigName() == null) ? 0 : getDeploymentConfigName().hashCode()); hashCode = prime * hashCode + ((getEc2TagFilters() == null) ? 0 : getEc2TagFilters() .hashCode()); hashCode = prime * hashCode + ((getOnPremisesInstanceTagFilters() == null) ? 0 : getOnPremisesInstanceTagFilters().hashCode()); hashCode = prime * hashCode + ((getAutoScalingGroups() == null) ? 0 : getAutoScalingGroups().hashCode()); hashCode = prime * hashCode + ((getServiceRoleArn() == null) ? 0 : getServiceRoleArn() .hashCode()); return hashCode; } @Override public UpdateDeploymentGroupRequest clone() { return (UpdateDeploymentGroupRequest) super.clone(); } }
/* * The MIT License * * Copyright (c) 2009 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.util.collections; import htsjdk.samtools.util.*; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.TreeSet; /** * Collection to which many records can be added. After all records are added, the collection can be * iterated, and the records will be returned in order defined by the comparator. Records may be spilled * to a temporary directory if there are more records added than will fit in memory. As a result of this, * the objects returned may not be identical to the objects added to the collection, but they should be * equal as determined by the codec used to write them to disk and read them back. * * When iterating over the collection, the number of file handles required is numRecordsInCollection/maxRecordsInRam. * If this becomes a limiting factor, a file handle cache could be added. * * If Snappy DLL is available and snappy.disable system property is not set to true, then Snappy is used * to compress temporary files. */ public class SortingCollection<T> implements Iterable<T> { private static final int BUFFER_SIZE = 1024 * 128; /** * Client must implement this class, which defines the way in which records are written to and * read from file. */ public interface Codec<T> extends Cloneable { /** * Where to write encoded output * @param os */ void setOutputStream(OutputStream os); /** * Where to read encoded input from * @param is */ void setInputStream(InputStream is); /** * Write object to output stream * @param val what to write */ void encode(T val); /** * Read the next record from the input stream and convert into a java object. * @return null if no more records. Should throw exception if EOF is encountered in the middle of * a record. */ T decode(); /** * Must return a cloned copy of the codec that can be used independently of * the original instance. This is required so that multiple codecs can exist simultaneously * that each is reading a separate file. */ Codec<T> clone(); } /** Directories where files of sorted records go. */ private final File[] tmpDirs; /** The minimum amount of space free on a temp filesystem to write a file there. */ private final long TMP_SPACE_FREE = IOUtil.FIVE_GBS; /** * Used to write records to file, and used as a prototype to create codecs for reading. */ private final SortingCollection.Codec<T> codec; /** * For sorting, both when spilling records to file, and merge sorting. */ private final Comparator<T> comparator; private final int maxRecordsInRam; private int numRecordsInRam = 0; private T[] ramRecords; private boolean iterationStarted = false; private boolean doneAdding = false; /** * Set to true when all temp files have been cleaned up */ private boolean cleanedUp = false; /** * List of files in tmpDir containing sorted records */ private final List<File> files = new ArrayList<File>(); private boolean destructiveIteration = true; private TempStreamFactory tempStreamFactory = new TempStreamFactory(); /** * Prepare to accumulate records to be sorted * @param componentType Class of the record to be sorted. Necessary because of Java generic lameness. * @param codec For writing records to file and reading them back into RAM * @param comparator Defines output sort order * @param maxRecordsInRam how many records to accumulate before spilling to disk * @param tmpDir Where to write files of records that will not fit in RAM */ private SortingCollection(final Class<T> componentType, final SortingCollection.Codec<T> codec, final Comparator<T> comparator, final int maxRecordsInRam, final File... tmpDir) { if (maxRecordsInRam <= 0) { throw new IllegalArgumentException("maxRecordsInRam must be > 0"); } if (tmpDir == null || tmpDir.length == 0) { throw new IllegalArgumentException("At least one temp directory must be provided."); } this.tmpDirs = tmpDir; this.codec = codec; this.comparator = comparator; this.maxRecordsInRam = maxRecordsInRam; this.ramRecords = (T[])Array.newInstance(componentType, maxRecordsInRam); } public void add(final T rec) { if (doneAdding) { throw new IllegalStateException("Cannot add after calling doneAdding()"); } if (iterationStarted) { throw new IllegalStateException("Cannot add after calling iterator()"); } if (numRecordsInRam == maxRecordsInRam) { spillToDisk(); } ramRecords[numRecordsInRam++] = rec; } /** * This method can be called after caller is done adding to collection, in order to possibly free * up memory. If iterator() is called immediately after caller is done adding, this is not necessary, * because iterator() triggers the same freeing. */ public void doneAdding() { if (this.cleanedUp) { throw new IllegalStateException("Cannot call doneAdding() after cleanup() was called."); } if (doneAdding) { return; } doneAdding = true; if (this.files.isEmpty()) { return; } if (this.numRecordsInRam > 0) { spillToDisk(); } // Facilitate GC this.ramRecords = null; } /** * @return True if this collection is allowed to discard data during iteration in order to reduce memory * footprint, precluding a second iteration over the collection. */ public boolean isDestructiveIteration() { return destructiveIteration; } /** * Tell this collection that it is allowed to discard data during iteration in order to reduce memory footprint, * precluding a second iteration. This is true by default. */ public void setDestructiveIteration(boolean destructiveIteration) { this.destructiveIteration = destructiveIteration; } /** * Sort the records in memory, write them to a file, and clear the buffer of records in memory. */ private void spillToDisk() { try { Arrays.sort(this.ramRecords, 0, this.numRecordsInRam, this.comparator); final File f = newTempFile(); OutputStream os = null; try { os = tempStreamFactory.wrapTempOutputStream(new FileOutputStream(f), BUFFER_SIZE); this.codec.setOutputStream(os); for (int i = 0; i < this.numRecordsInRam; ++i) { this.codec.encode(ramRecords[i]); // Facilitate GC this.ramRecords[i] = null; } os.flush(); } catch (RuntimeIOException ex) { throw new RuntimeIOException("Problem writing temporary file " + f.getAbsolutePath() + ". Try setting TMP_DIR to a file system with lots of space.", ex); } finally { if (os != null) { os.close(); } } this.numRecordsInRam = 0; this.files.add(f); } catch (IOException e) { throw new RuntimeIOException(e); } } /** * Creates a new tmp file on one of the available temp filesystems, registers it for deletion * on JVM exit and then returns it. */ private File newTempFile() throws IOException { return IOUtil.newTempFile("sortingcollection.", ".tmp", this.tmpDirs, TMP_SPACE_FREE); } /** * Prepare to iterate through the records in order. This method may be called more than once, * but add() may not be called after this method has been called. */ public CloseableIterator<T> iterator() { if (this.cleanedUp) { throw new IllegalStateException("Cannot call iterator() after cleanup() was called."); } doneAdding(); this.iterationStarted = true; if (this.files.isEmpty()) { return new InMemoryIterator(); } else { return new MergingIterator(); } } /** * Delete any temporary files. After this method is called, iterator() may not be called. */ public void cleanup() { this.iterationStarted = true; this.cleanedUp = true; IOUtil.deleteFiles(this.files); } /** * Syntactic sugar around the ctor, to save some typing of type parameters * * @param componentType Class of the record to be sorted. Necessary because of Java generic lameness. * @param codec For writing records to file and reading them back into RAM * @param comparator Defines output sort order * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk * @param tmpDir Where to write files of records that will not fit in RAM */ public static <T> SortingCollection<T> newInstance(final Class<T> componentType, final SortingCollection.Codec<T> codec, final Comparator<T> comparator, final int maxRecordsInRAM, final File... tmpDir) { return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir); } /** * Syntactic sugar around the ctor, to save some typing of type parameters * * @param componentType Class of the record to be sorted. Necessary because of Java generic lameness. * @param codec For writing records to file and reading them back into RAM * @param comparator Defines output sort order * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk * @param tmpDirs Where to write files of records that will not fit in RAM */ public static <T> SortingCollection<T> newInstance(final Class<T> componentType, final SortingCollection.Codec<T> codec, final Comparator<T> comparator, final int maxRecordsInRAM, final Collection<File> tmpDirs) { return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDirs.toArray(new File[tmpDirs.size()])); } /** * Syntactic sugar around the ctor, to save some typing of type parameters. Writes files to java.io.tmpdir * * @param componentType Class of the record to be sorted. Necessary because of Java generic lameness. * @param codec For writing records to file and reading them back into RAM * @param comparator Defines output sort order * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk */ public static <T> SortingCollection<T> newInstance(final Class<T> componentType, final SortingCollection.Codec<T> codec, final Comparator<T> comparator, final int maxRecordsInRAM) { final File tmpDir = new File(System.getProperty("java.io.tmpdir")); return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir); } /** * For iteration when number of records added is less than the threshold for spilling to disk. */ class InMemoryIterator implements CloseableIterator<T> { private int iterationIndex = 0; InMemoryIterator() { Arrays.sort(SortingCollection.this.ramRecords, 0, SortingCollection.this.numRecordsInRam, SortingCollection.this.comparator); } public void close() { // nothing to do } public boolean hasNext() { return this.iterationIndex < SortingCollection.this.numRecordsInRam; } public T next() { if (!hasNext()) { throw new NoSuchElementException(); } T ret = SortingCollection.this.ramRecords[iterationIndex]; if (destructiveIteration) SortingCollection.this.ramRecords[iterationIndex] = null; ++iterationIndex; return ret; } public void remove() { throw new UnsupportedOperationException(); } } /** * For iteration when spilling to disk has occurred. * Each file is has records in sort order within the file. * This iterator automatically closes when it iterates to the end, but if not iterating * to the end it is a good idea to call close(). * * Algorithm: MergingIterator maintains a PriorityQueue of PeekFileRecordIterators. * Each PeekFileRecordIterator iterates through a file in which the records are sorted. * The comparator for PeekFileRecordIterator used by the PriorityQueue peeks at the next record from * the file, so the first element in the PriorityQueue is the file that has the next record to be emitted. * In order to get the next record, the first PeekFileRecordIterator in the PriorityQueue is popped, * the record is obtained from that iterator, and then if that iterator is not empty, it is pushed back into * the PriorityQueue. Because it now has a different record as its next element, it may go into another * location in the PriorityQueue */ class MergingIterator implements CloseableIterator<T> { private final PollableTreeSet<PeekFileRecordIterator> queue; MergingIterator() { this.queue = new PollableTreeSet<PeekFileRecordIterator>(new PeekFileRecordIteratorComparator()); int n = 0; for (final File f : SortingCollection.this.files) { final FileRecordIterator it = new FileRecordIterator(f); if (it.hasNext()) { this.queue.add(new PeekFileRecordIterator(it, n++)); } else { it.close(); } } } public boolean hasNext() { return !this.queue.isEmpty(); } public T next() { if (!hasNext()) { throw new NoSuchElementException(); } final PeekFileRecordIterator fileIterator = queue.poll(); final T ret = fileIterator.next(); if (fileIterator.hasNext()) { this.queue.add(fileIterator); } else { ((CloseableIterator<T>)fileIterator.getUnderlyingIterator()).close(); } return ret; } public void remove() { throw new UnsupportedOperationException(); } public void close() { while (!this.queue.isEmpty()) { final PeekFileRecordIterator it = this.queue.poll(); ((CloseableIterator<T>)it.getUnderlyingIterator()).close(); } } } /** * Read a file of records in format defined by the codec */ class FileRecordIterator implements CloseableIterator<T> { private final File file; private final FileInputStream is; private final Codec<T> codec; private T currentRecord = null; FileRecordIterator(final File file) { this.file = file; try { this.is = new FileInputStream(file); this.codec = SortingCollection.this.codec.clone(); this.codec.setInputStream(tempStreamFactory.wrapTempInputStream(this.is, BUFFER_SIZE)); advance(); } catch (FileNotFoundException e) { throw new RuntimeIOException(e); } } public boolean hasNext() { return this.currentRecord != null; } public T next() { if (!hasNext()) { throw new NoSuchElementException(); } final T ret = this.currentRecord; advance(); return ret; } public void remove() { throw new UnsupportedOperationException(); } private void advance() { this.currentRecord = this.codec.decode(); } public void close() { CloserUtil.close(this.is); } } /** * Just a typedef */ class PeekFileRecordIterator extends PeekIterator<T> { final int n; // A serial number used for tie-breaking in the sort PeekFileRecordIterator(final Iterator<T> underlyingIterator, final int n) { super(underlyingIterator); this.n = n; } } class PeekFileRecordIteratorComparator implements Comparator<PeekFileRecordIterator>, Serializable { private static final long serialVersionUID = 1L; public int compare(final PeekFileRecordIterator lhs, final PeekFileRecordIterator rhs) { final int result = comparator.compare(lhs.peek(), rhs.peek()); if (result == 0) return lhs.n - rhs.n; else return result; } } /** Little class that provides the Java 1.5 TreeSet with a poll() method */ static class PollableTreeSet<T> extends TreeSet<T> { PollableTreeSet(final Comparator<? super T> comparator) { super(comparator); } public T poll() { if (isEmpty()) { return null; } else { final T t = first(); remove(t); return t; } } } }
package org.grapheco.elfinder.impl; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.codec.binary.Base64; import org.apache.log4j.Logger; import org.grapheco.elfinder.controller.executor.FsItemEx; import org.grapheco.elfinder.service.FsItem; import org.grapheco.elfinder.service.FsItemFilter; import org.grapheco.elfinder.service.FsSecurityChecker; import org.grapheco.elfinder.service.FsService; import org.grapheco.elfinder.service.FsServiceConfig; import org.grapheco.elfinder.service.FsVolume; public class DefaultFsService implements FsService { FsSecurityChecker _securityChecker; FsServiceConfig _serviceConfig; Map<String, FsVolume> _volumeMap = new HashMap<String, FsVolume>(); // special characters should be encoded, avoid to be processed as a part of // URL String[][] escapes = { { "+", "_P" }, { "-", "_M" }, { "/", "_S" }, { ".", "_D" }, { "=", "_E" } }; @Override /** * find files by name pattern, this provides a simple recursively iteration based method * lucene engines can be introduced to improve it! * This searches across all volumes. * * @param filter The filter to apply to select files. * @return A collection of files that match the filter and gave the root as a parent. */ public FsItemEx[] find(FsItemFilter filter) { List<FsItemEx> results = new ArrayList<FsItemEx>(); for (FsVolume vol : _volumeMap.values()) { FsItem root = vol.getRoot(); results.addAll(findRecursively(filter, root)); } return results.toArray(new FsItemEx[0]); } /** * find files recursively in specific folder * * @param filter * The filter to apply to select files. * @param root * The location in the hierarchy to search from. * @return A collection of files that match the filter and have the root as * a parent. */ private Collection<FsItemEx> findRecursively(FsItemFilter filter, FsItem root) { List<FsItemEx> results = new ArrayList<FsItemEx>(); FsVolume vol = root.getVolume(); for (FsItem child : vol.listChildren(root)) { if (vol.isFolder(child)) { results.addAll(findRecursively(filter, child)); } else { FsItemEx item = new FsItemEx(child, this); if (filter.accepts(item)) results.add(item); } } return results; } @Override public FsItem fromHash(String hash) { for (FsVolume v : _volumeMap.values()) { String prefix = getVolumeId(v) + "_"; if (hash.equals(prefix)) { return v.getRoot(); } if (hash.startsWith(prefix)) { String localHash = hash.substring(prefix.length()); for (String[] pair : escapes) { localHash = localHash.replace(pair[1], pair[0]); } String relativePath = new String(Base64.decodeBase64(localHash)); return v.fromPath(relativePath); } } return null; } @Override public String getHash(FsItem item) throws IOException { String relativePath = item.getVolume().getPath(item); String base = new String(Base64.encodeBase64(relativePath.getBytes())); for (String[] pair : escapes) { base = base.replace(pair[0], pair[1]); } return getVolumeId(item.getVolume()) + "_" + base; } public FsSecurityChecker getSecurityChecker() { return _securityChecker; } public FsServiceConfig getServiceConfig() { return _serviceConfig; } @Override public String getVolumeId(FsVolume volume) { for (Entry<String, FsVolume> en : _volumeMap.entrySet()) { if (en.getValue() == volume) return en.getKey(); } return null; } public Map<String, FsVolume> getVolumeMap() { return _volumeMap; } public FsVolume[] getVolumes() { return _volumeMap.values().toArray(new FsVolume[0]); } public void setSecurityChecker(FsSecurityChecker securityChecker) { _securityChecker = securityChecker; } public void setServiceConfig(FsServiceConfig serviceConfig) { _serviceConfig = serviceConfig; } public void setVolumeMap(Map<String, FsVolume> volumeMap) { for (Entry<String, FsVolume> en : volumeMap.entrySet()) { addVolume(en.getKey(), en.getValue()); } } /** * @deprecated {@link #setVolumeMap(Map)} * @param volumes * The volumes available. * @throws IOException * If there is a problem with using one of the volumes. */ public void setVolumes(FsVolume[] volumes) throws IOException { Logger.getLogger(getClass()) .warn("calling setVolumes() is deprecated, please use setVolumeMap() to specify volume id explicitly"); char vid = 'A'; for (FsVolume volume : volumes) { _volumeMap.put("" + vid, volume); Logger.getLogger(this.getClass()).info( String.format("mounted %s: %s", "" + vid, volume)); vid++; } } public void addVolume(String name, FsVolume fsVolume) { _volumeMap.put(name, fsVolume); Logger.getLogger(this.getClass()).info( String.format("mounted %s: %s", name, fsVolume)); } }
/* * $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/stores/org/apache/slide/store/txfile/AbstractXMLResourceDescriptor.java,v 1.2 2006-01-22 22:49:06 peter-cvs Exp $ * $Revision: 1.2 $ * $Date: 2006-01-22 22:49:06 $ * * ==================================================================== * * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.slide.store.txfile; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Enumeration; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Vector; import org.apache.slide.common.*; import org.apache.slide.lock.LockTokenNotFoundException; import org.apache.slide.lock.NodeLock; import org.apache.slide.security.NodePermission; import org.apache.slide.structure.*; import org.apache.slide.content.*; import org.apache.slide.util.CustomSAXBuilder; import org.jdom.a.*; import org.jdom.a.input.*; import org.jdom.a.output.*; /** * Abstract class for encode all meta info of a resource into XML. * * Takes over very much code from UriProperties and AbstractUriProperties, that's why Marc is listed as author as well. * * @see FileResourceManager * @see TxXMLFileDescriptorsStore */ public abstract class AbstractXMLResourceDescriptor { protected final Format outputFormat; protected Object txId; protected String uri; protected SimpleDateFormat dateFormat; protected boolean registeredForSaving = false; /** Stored object.*/ protected ObjectNode object; /** Permissions vector. */ protected Vector permissions; /** Locks vector.*/ protected Vector locks; /** Revision descriptors.*/ protected NodeRevisionDescriptors revisionDescriptors; /** Revision descriptor hashtable.*/ protected Hashtable descriptor; protected static String booleanToString(boolean aBoolean) { return aBoolean ? "true" : "false"; } protected static Element createBindings(String aParent, String aChild, Enumeration aEnum) { Element aElement = new Element(aParent); Element childNode; ObjectNode.Binding binding; while (aEnum.hasMoreElements()) { binding = (ObjectNode.Binding) aEnum.nextElement(); childNode = new Element(aChild); childNode.setAttribute(new Attribute("name", binding.getName())); childNode.setAttribute(new Attribute("uuri", binding.getUuri())); aElement.addContent(childNode); } return aElement; } protected static Element createElements(String aParent, String aChild, Enumeration aEnum) { Element aElement = new Element(aParent); while (aEnum.hasMoreElements()) { Object aObject = aEnum.nextElement(); Element aItem = new Element(aChild); aItem.setAttribute("val", aObject.toString()); aElement.addContent(aItem); } return aElement; } protected static NodePermission decodePermission(Element aElement, String aUri) { String aRevisionNumber = aElement.getAttributeValue("revisionNumber"); String aSubject = aElement.getAttributeValue("subjectUri"); String aAction = aElement.getAttributeValue("actionUri"); boolean aInheritable = new Boolean(aElement.getAttributeValue("inheritable")).booleanValue(); boolean aNegative = new Boolean(aElement.getAttributeValue("negative")).booleanValue(); return new NodePermission(aUri, aRevisionNumber, aSubject, aAction, aInheritable, aNegative); } protected static Element encodeNodePermission(NodePermission aPermission) { Element aElementPermission = new Element("permission"); NodeRevisionNumber aRevisionNumber = aPermission.getRevisionNumber(); if (aRevisionNumber != null) { aElementPermission.setAttribute("revisionNumber", encodeRevisionNumber(aRevisionNumber)); } aElementPermission.setAttribute("subjectUri", aPermission.getSubjectUri()); aElementPermission.setAttribute("actionUri", aPermission.getActionUri()); aElementPermission.setAttribute("inheritable", booleanToString(aPermission.isInheritable())); aElementPermission.setAttribute("negative", booleanToString(aPermission.isNegative())); return aElementPermission; } protected static Element encodeRevisionDescriptor(NodeRevisionDescriptor aDescriptor) { Element aRevisions = new Element("revisions"); aRevisions.setAttribute("branchName", aDescriptor.getBranchName()); aRevisions.setAttribute("number", encodeRevisionNumber(aDescriptor.getRevisionNumber())); aRevisions.addContent(createElements("labels", "label", aDescriptor.enumerateLabels())); Element aProperties = new Element("properties"); for (Enumeration aEnum = aDescriptor.enumerateProperties(); aEnum.hasMoreElements();) { Object aObject = aEnum.nextElement(); // System.out.println("---------- encodeRevisionDescriptor aObject="+aObject+" "+aObject.getClass().getName()); NodeProperty aProp = (NodeProperty) aObject; aProperties.addContent(encodeNodeProperty(aProp)); } aRevisions.addContent(aProperties); return aRevisions; } protected static Element encodeNodeProperty(NodeProperty aProp) { Element aElement = new Element("property"); aElement.setAttribute("name", aProp.getName()); aElement.setAttribute("namespace", aProp.getNamespace()); aElement.setAttribute("value", aProp.getValue().toString()); aElement.setAttribute("type", aProp.getType()); aElement.setAttribute("protected", booleanToString(aProp.isProtected())); Element aPermissions = new Element("permissions"); for (Enumeration aEnum = aProp.enumeratePermissions(); aEnum.hasMoreElements();) { NodePermission aPermission = (NodePermission) aEnum.nextElement(); aPermissions.addContent(encodeNodePermission(aPermission)); } aElement.addContent(aPermissions); return aElement; } protected static Vector createVector(Element aElement, String aParentName, String aChildName) { Element aParent = aElement.getChild(aParentName); Vector aRet = new Vector(); // System.out.println("--------- createVector aParentName="+aParentName+" aChildName="+aChildName); List aList = aParent.getChildren(aChildName); // System.out.println("--------- createVector aList="+aList); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); aRet.addElement(aChild.getAttributeValue("val")); } return aRet; } protected static Vector createBindingVector( Element aElement, String aParentName, String aChildName, boolean parentBindings) { Element aParent = aElement.getChild(aParentName); Vector aRet = new Vector(); // System.out.println("--------- createVector aParentName="+aParentName+" aChildName="+aChildName); Iterator it = aParent.getChildren().iterator(); while (it.hasNext()) { Element aChild = (Element) it.next(); String name = aChild.getAttributeValue("name"); String uuri = aChild.getAttributeValue("uuri"); if (parentBindings) { aRet.add(new ObjectNode.ParentBinding(name, uuri)); } else { aRet.add(new ObjectNode.Binding(name, uuri)); } } return aRet; } protected static String encodeRevisionNumber(NodeRevisionNumber aRevisionNumber) { return aRevisionNumber.getMajor() + "." + aRevisionNumber.getMinor(); } protected static Object createObject(String aNomClasse, Class aTypes[], Object aArgs[]) throws UnknownObjectClassException { Class aClasse = null; try { // First, load the object's class aClasse = Class.forName(aNomClasse); Constructor aConstructor = aClasse.getConstructor(aTypes); if (aConstructor == null) aConstructor = aClasse.getSuperclass().getConstructor(aTypes); return aConstructor.newInstance(aArgs); } catch (Exception e) { throw new UnknownObjectClassException(aNomClasse); } } protected static NodeRevisionNumber decodeRevisionNumber(Element aElement) { Element aElementRevision = aElement.getChild("revision"); return new NodeRevisionNumber( Integer.parseInt(aElementRevision.getAttributeValue("major")), Integer.parseInt(aElementRevision.getAttributeValue("minor"))); } protected static NodeRevisionNumber decodeRevisionNumber(String aStr) { return (aStr == null ? null : new NodeRevisionNumber(aStr)); } protected static NodeProperty decodeNodeProperty(Element aElement, String aUri) { String aName = aElement.getAttributeValue("name"); String aValue = aElement.getAttributeValue("value"); String aNamespace = aElement.getAttributeValue("namespace"); String aType = aElement.getAttributeValue("type"); boolean aProtected = new Boolean(aElement.getAttributeValue("protected")).booleanValue(); Element aPermisionsElement = aElement.getChild("permissions"); List aList = aPermisionsElement.getChildren(); Vector aPermission = new Vector(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); aPermission.addElement(decodePermission(aChild, aUri)); } return new NodeProperty(aName, aValue, aNamespace, aType, aProtected); } /** * Creates an XML descriptor resource. * * @param uri uri of the resource * @param txId identifier for the transaction in which the descriptor is to be managed * @param characterEncoding charcter enconding used to store this descriptor in XML * @throws ServiceAccessException if anything goes wrong at system level */ public AbstractXMLResourceDescriptor( Uri uri, Object txId, String characterEncoding) throws ServiceAccessException { outputFormat = Format.getPrettyFormat(); outputFormat.setEncoding(characterEncoding); dateFormat = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z"); this.txId = txId; if (uri == null) { throw new ServiceAccessException(null, "Trying to initialize XMLResourceDescriptor with null URI"); } this.uri = uri.toString(); } // -------------- PART TAKE OVER FROM AbstractUriProperties START -------------- /** * Retrive an object from the Descriptors Store. * * @exception ServiceAccessException Error accessing the Descriptors Store * @exception ObjectNotFoundException The object to retrieve was not found */ public ObjectNode retrieveObject() throws ServiceAccessException, ObjectNotFoundException { if (object == null) { throw new ObjectNotFoundException(uri); } return object.cloneObject(); } /** * Store an object in the Descriptors Store. * * @param object Object to update * @exception ServiceAccessException Error accessing the Descriptors Store * @exception ObjectNotFoundException The object to update was not found */ public void storeObject(ObjectNode aObject) throws ServiceAccessException, ObjectNotFoundException { object = aObject.cloneObject(); } /** * Remove an object from the Descriptors Store. * * @param object Object to remove * @exception ServiceAccessException Error accessing the Descriptors Store * @exception ObjectNotFoundException The object to remove was not found */ public void removeObject(ObjectNode aObject) throws ServiceAccessException, ObjectNotFoundException { object = null; } /** * Store an object permissions in the Descriptors Store. * * @param permission Permission we want to create * @exception ServiceAccessException Error accessing the Descriptors Store */ public void grantPermission(NodePermission permission) throws ObjectNotFoundException, ServiceAccessException { if (permissions == null) permissions = new Vector(); permissions.addElement(permission.cloneObject()); } /** * Store an object permissions in the Descriptors Store. * * @param permission Permission we want to create * @exception ServiceAccessException Error accessing the Descriptors Store */ public void revokePermission(NodePermission permission) throws ObjectNotFoundException, ServiceAccessException { if (permissions != null) permissions.removeElement(permission); } /** * Revoke all the permissions on the object . * * @param permission Permission we want to create * @exception ServiceAccessException Error accessing the Descriptors Store */ public void revokePermissions() throws ObjectNotFoundException, ServiceAccessException { if (permissions != null) permissions.removeAllElements(); } /** * Store an object permissions in the Descriptors Store. * * @param permission Permission we want to create * @exception ServiceAccessException Error accessing the Descriptors Store */ public Enumeration enumeratePermissions() throws ServiceAccessException { if (permissions == null) permissions = new Vector(); return permissions.elements(); } /** * Puts a lock on a subject. * * @param lock Lock token * @exception ServiceAccessException Service access error */ public void putLock(NodeLock lock) throws ObjectNotFoundException, ServiceAccessException { if (locks == null) locks = new Vector(); locks.addElement(lock.cloneObject()); } /** * Renews a lock. * * @param lock Token to renew * @exception ServiceAccessException Service access error * @exception LockTokenNotFoundException Lock token was not found */ public void renewLock(NodeLock lock) throws LockTokenNotFoundException, ObjectNotFoundException, ServiceAccessException { if (locks == null) locks = new Vector(); boolean wasPresent = locks.removeElement(lock); if (!wasPresent) { throw new LockTokenNotFoundException(lock); } locks.addElement(lock.cloneObject()); } /** * Removes (cancels) a lock. * * @param lock Token to remove * @exception ServiceAccessException Service access error * @exception LockTokenNotFoundException Lock token was not found */ public void removeLock(NodeLock lock) throws LockTokenNotFoundException, ObjectNotFoundException, ServiceAccessException { if (locks == null) { throw new LockTokenNotFoundException(lock); } boolean wasPresent = locks.removeElement(lock); if (!wasPresent) { throw new LockTokenNotFoundException(lock); } } /** * Returns the list of locks put on a subject. * * @param subject Subject * @return Enumeration List of locks which have been put on the subject * @exception ServiceAccessException Service access error */ public Enumeration enumerateLocks() throws ServiceAccessException { if (locks == null) locks = new Vector(); return locks.elements(); } /** * Retrieve a revision descriptors. * * @exception ServiceAccessException Service access error * @exception RevisionDescriptorNotFoundException Revision descriptor * was not found */ public NodeRevisionDescriptors retrieveRevisionDescriptors() throws ServiceAccessException, RevisionDescriptorNotFoundException { if (revisionDescriptors == null) { throw new RevisionDescriptorNotFoundException(uri.toString()); } return revisionDescriptors.cloneObject(); } /** * Create new revision descriptors. * * @param revisionDescriptors Node revision descriptors * @exception ServiceAccessException Service access error */ public void createRevisionDescriptors(NodeRevisionDescriptors aRevisionDescriptors) throws ObjectNotFoundException, ServiceAccessException { revisionDescriptors = aRevisionDescriptors.cloneObject(); } /** * Update revision descriptors. * * @param revisionDescriptors Node revision descriptors * @exception ServiceAccessException Service access error * @exception RevisionDescriptorNotFoundException Revision descriptor * was not found */ public void storeRevisionDescriptors(NodeRevisionDescriptors aRevisionDescriptors) throws RevisionDescriptorNotFoundException, ObjectNotFoundException, ServiceAccessException { if (!revisionDescriptors.getUri().equals(uri.toString())) { throw new RevisionDescriptorNotFoundException(uri.toString()); } revisionDescriptors = aRevisionDescriptors.cloneObject(); } /** * Remove revision descriptors. * * @exception ServiceAccessException Service access error */ public void removeRevisionDescriptors() throws ObjectNotFoundException, ServiceAccessException { revisionDescriptors = null; } /** * Retrieve revision descriptor. * * @param revisionNumber Node revision number */ public NodeRevisionDescriptor retrieveRevisionDescriptor(NodeRevisionNumber revisionNumber) throws ServiceAccessException, RevisionDescriptorNotFoundException { Object result = null; if (descriptor != null && revisionNumber != null) result = descriptor.get(revisionNumber.toString()); if (result == null) { throw new RevisionDescriptorNotFoundException(uri.toString()); } return ((NodeRevisionDescriptor) result).cloneObject(); } /** * Create new revision descriptor. * * @param revisionDescriptor Node revision descriptor * @exception ServiceAccessException Service access error */ public void createRevisionDescriptor(NodeRevisionDescriptor aRevisionDescriptor) throws ObjectNotFoundException, ServiceAccessException { if (descriptor == null) descriptor = new Hashtable(); descriptor.put(aRevisionDescriptor.getRevisionNumber().toString(), aRevisionDescriptor.cloneObject()); } /** * Update revision descriptor. * * @param revisionDescriptors Node revision descriptor * @exception ServiceAccessException Service access error * @exception RevisionDescriptorNotFoundException Revision descriptor * was not found */ public void storeRevisionDescriptor(NodeRevisionDescriptor aRevisionDescriptor) throws RevisionDescriptorNotFoundException, ObjectNotFoundException, ServiceAccessException { String key = aRevisionDescriptor.getRevisionNumber().toString(); if (descriptor == null || !descriptor.containsKey(key)) { throw new RevisionDescriptorNotFoundException(uri.toString()); } descriptor.put(key, aRevisionDescriptor.cloneObject()); } /** * Remove revision descriptor. * * @param revisionNumber Revision number * @exception ServiceAccessException Service access error */ public void removeRevisionDescriptor(NodeRevisionNumber number) throws ObjectNotFoundException, ServiceAccessException { if (descriptor == null) return; descriptor.remove(number.toString()); } // -------------- PART TAKE OVER FROM AbstractUriProperties END -------------- public void registerForSaving() { registeredForSaving = true; } public boolean isRegisteredForSaving() { return registeredForSaving; } /** * Stores this descriptor to the resource manager. * * @throws ServiceAccessException if anything goes wrong at system level * @throws ObjectNotFoundException if the descriptor has not been created before */ public abstract void save() throws ServiceAccessException, ObjectNotFoundException; /** * Loads this descriptor from the resource manager. * * @throws ServiceAccessException if anything goes wrong at system level * @throws ObjectNotFoundException if the descriptor does not exist */ public abstract void load() throws ServiceAccessException, ObjectNotFoundException; /** * Creates this descriptor in the resource manager. * * @throws ServiceAccessException if anything goes wrong at system level * @throws ObjectAlreadyExistsException if the descriptor already exists */ public abstract void create() throws ServiceAccessException, ObjectAlreadyExistsException; /** * Deletes this descriptor from the resource manager. * * @throws ServiceAccessException if anything goes wrong at system level * @throws ObjectNotFoundException if the descriptor does not exist */ public abstract void delete() throws ServiceAccessException, ObjectNotFoundException; /** * Gets the URI of this descriptor. * * @return the URI */ public String getUri() { return uri; } /** * Gets the transaction this descriptor lives in. * * @return the transaction identifier */ public Object getTxId() { return txId; } /** * Checks if the specified object is a descriptor with the same URI in the same transaction. * * * @param o object to compare this descriptor to * @return <code>true</code> if object is equal as described above */ public boolean equals(Object o) { return ( this == o || (o != null && o instanceof XMLResourceDescriptor && ((XMLResourceDescriptor) o).uri.equals(uri) && ((XMLResourceDescriptor) o).txId.equals(txId))); } public String toString() { return txId + ": " + uri; } protected void save(OutputStream os) throws ServiceAccessException, IOException { Element aRoot = encode(); Document aDocument = new Document(aRoot); XMLOutputter aOutputter = new XMLOutputter(outputFormat); aOutputter.output(aDocument, os); os.flush(); } protected void load(InputStream is) throws ServiceAccessException, JDOMException, IOException { SAXBuilder aBuilder = CustomSAXBuilder.newInstance(); Document aDocument = aBuilder.build(is); decode(aDocument.getRootElement()); } protected void init() throws ServiceAccessException { // need to set this null, as AbstractUriProperties.retrieveObject relies on it object = null; permissions = new Vector(); locks = new Vector(); revisionDescriptors = new NodeRevisionDescriptors(uri, null, new Hashtable(), new Hashtable(), new Hashtable(), false); descriptor = new Hashtable(); } protected Element encode() throws ServiceAccessException { Element aRoot = new Element("data"); aRoot.addContent(encodeObject()); aRoot.addContent(encodePermissions()); aRoot.addContent(encodeLocks()); aRoot.addContent(encodeRevisionDescriptors()); aRoot.addContent(encodeRevisionDescriptor()); return aRoot; } protected Element encodeObject() { Element aElementObjectNode = new Element("objectnode"); if (object != null) { aElementObjectNode.setAttribute("classname", object.getClass().getName()); aElementObjectNode.setAttribute("uri", object.getUri()); if (object instanceof LinkNode) { aElementObjectNode.setAttribute("linkTo", ((LinkNode) object).getLinkedUri()); } aElementObjectNode.addContent(createBindings("children", "child", object.enumerateBindings())); aElementObjectNode.addContent(createBindings("parents", "parent", object.enumerateParentBindings())); aElementObjectNode.addContent(createElements("links", "link", object.enumerateLinks())); } else { // for null locks aElementObjectNode.setAttribute("classname", "null"); aElementObjectNode.setAttribute("uri", uri.toString()); } return aElementObjectNode; } protected Element encodePermissions() { Element aPermissions = new Element("permissions"); if (permissions == null) return aPermissions; for (int aSize = permissions.size(), i = 0; i < aSize; i++) { NodePermission aPermission = (NodePermission) permissions.elementAt(i); aPermissions.addContent(encodeNodePermission(aPermission)); } return aPermissions; } protected Element encodeLocks() { Element aElementLocks = new Element("locks"); if (locks == null) return aElementLocks; for (int aSize = locks.size(), i = 0; i < aSize; i++) { NodeLock aLock = (NodeLock) locks.elementAt(i); Element aElementLock = new Element("lock"); aElementLock.setAttribute("subjectUri", aLock.getSubjectUri()); aElementLock.setAttribute("typeUri", aLock.getTypeUri()); aElementLock.setAttribute("date", dateFormat.format(aLock.getExpirationDate())); aElementLock.setAttribute("inheritance", booleanToString(aLock.isInheritable())); aElementLock.setAttribute("exclusive", booleanToString(aLock.isExclusive())); aElementLock.setAttribute("lockId", aLock.getLockId()); aElementLock.setAttribute("owner", aLock.getOwnerInfo() == null ? "" : aLock.getOwnerInfo()); aElementLocks.addContent(aElementLock); } return aElementLocks; } protected Element encodeRevisionDescriptors() { Element aRevisionsHistory = new Element("revisionsHistory"); if (revisionDescriptors == null) return aRevisionsHistory; aRevisionsHistory.setAttribute( "initialRevision", encodeRevisionNumber(revisionDescriptors.getInitialRevision())); aRevisionsHistory.setAttribute("useVersioning", booleanToString(revisionDescriptors.isVersioned())); // System.out.println("---------- encodeRevisionDescriptors getLatestRevision="+ // revisionDescriptors.getLatestRevision()); Element aBranchesElement = new Element("branches"); Enumeration aBranches = revisionDescriptors.enumerateBranchNames(); while (aBranches.hasMoreElements()) { String aBranchName = (String) aBranches.nextElement(); Element aElementBranch = new Element("branch"); aElementBranch.setAttribute("name", aBranchName); NodeRevisionNumber aRevisionNumber = revisionDescriptors.getLatestRevision(aBranchName); aElementBranch.setAttribute("lastestRevision", encodeRevisionNumber(aRevisionNumber)); aBranchesElement.addContent(aElementBranch); } aRevisionsHistory.addContent(aBranchesElement); Element aRevisionsElement = new Element("revisions"); Enumeration aRevisions = revisionDescriptors.enumerateRevisionNumbers(); while (aRevisions.hasMoreElements()) { NodeRevisionNumber aRevisionNumber = (NodeRevisionNumber) aRevisions.nextElement(); Element aRevisionElement = new Element("branch"); aRevisionElement.setAttribute("start", encodeRevisionNumber(aRevisionNumber)); Enumeration aSuccessors = revisionDescriptors.getSuccessors(aRevisionNumber); while (aSuccessors.hasMoreElements()) { NodeRevisionNumber aSuccessorRevisionNumber = (NodeRevisionNumber) aSuccessors.nextElement(); Element aSuccessorRevisionElement = new Element("revision"); aSuccessorRevisionElement.setAttribute("number", encodeRevisionNumber(aSuccessorRevisionNumber)); aRevisionElement.addContent(aSuccessorRevisionElement); } aRevisionsElement.addContent(aRevisionElement); } aRevisionsHistory.addContent(aRevisionsElement); return aRevisionsHistory; } protected Element encodeRevisionDescriptor() { Element aRet = new Element("descriptor"); if (descriptor == null) return aRet; for (Enumeration aEnum = descriptor.elements(); aEnum.hasMoreElements();) { NodeRevisionDescriptor aRevisionDescriptor = (NodeRevisionDescriptor) aEnum.nextElement(); aRet.addContent(encodeRevisionDescriptor(aRevisionDescriptor)); } return aRet; } protected void decode(Element aRoot) throws ServiceAccessException { decodeObject(aRoot); decodePermissions(aRoot); decodeLocks(aRoot); decodeRevisionDescriptors(aRoot); decodeRevisionDescriptor(aRoot); } protected void decodeObject(Element aElement) throws ServiceAccessException { Element aElementObjectNode = aElement.getChild("objectnode"); String aClasseName = aElementObjectNode.getAttributeValue("classname"); if (!"null".equals(aClasseName)) { try { String aUri = aElementObjectNode.getAttributeValue("uri"); Vector aChilds = createBindingVector(aElementObjectNode, "children", "child", false); Vector aParents = createBindingVector(aElementObjectNode, "parents", "parent", true); Vector aLinks = createVector(aElementObjectNode, "links", "link"); // System.out.println("--------- decodeObject aChilds="+aChilds); // System.out.println("--------- decodeObject aLinks="+aLinks); Class aTypes[] = null; Object aArgs[] = null; if (aClasseName.equals(LinkNode.class.getName())) { String aLinkTo = aElementObjectNode.getAttributeValue("linkTo"); aTypes = new Class[] { String.class, Vector.class, Vector.class, String.class }; aArgs = new Object[] { aUri, aChilds, aLinks, aLinkTo }; } else { aTypes = new Class[] { String.class, Vector.class, Vector.class, Vector.class }; aArgs = new Object[] { aUri, aChilds, aParents, aLinks }; } object = (ObjectNode) createObject(aClasseName, aTypes, aArgs); object.setUri(object.getUuri()); } catch (Exception e) { e.printStackTrace(); throw new ServiceAccessException(null, e); } uri = object.getUri(); } else { object = null; uri = aElementObjectNode.getAttributeValue("uri"); } } protected void decodePermissions(Element aElement) { permissions = new Vector(); Element aPermissions = aElement.getChild("permissions"); List aList = aPermissions.getChildren(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); permissions.addElement(decodePermission(aChild, uri)); } } protected void decodeLocks(Element aElement) throws ServiceAccessException { try { locks = new Vector(); Element aElementLocks = aElement.getChild("locks"); List aList = aElementLocks.getChildren(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); String aSubject = aChild.getAttributeValue("subjectUri"); String aType = aChild.getAttributeValue("typeUri"); Date aDateExpiration = dateFormat.parse(aChild.getAttributeValue("date")); boolean aInheritable = new Boolean(aChild.getAttributeValue("inheritance")).booleanValue(); boolean aNegative = new Boolean(aChild.getAttributeValue("exclusive")).booleanValue(); String aLockId = aChild.getAttributeValue("lockId"); String ownerInfo = aChild.getAttributeValue("owner"); locks.addElement( new NodeLock(aLockId, uri, aSubject, aType, aDateExpiration, aInheritable, aNegative, ownerInfo)); } } catch (Exception e) { e.printStackTrace(); throw new ServiceAccessException(null, e); } } protected void decodeRevisionDescriptors(Element aElement) { Element aRevisionsHistory = aElement.getChild("revisionsHistory"); NodeRevisionNumber aInitialRevision = decodeRevisionNumber(aRevisionsHistory.getAttributeValue("initialRevision")); boolean aUseVersionning = new Boolean(aRevisionsHistory.getAttributeValue("useVersioning")).booleanValue(); Element aBranchesElement = aRevisionsHistory.getChild("branches"); if (aBranchesElement == null) { revisionDescriptors = new NodeRevisionDescriptors( uri, aInitialRevision, new Hashtable(), new Hashtable(), new Hashtable(), aUseVersionning); return; } List aList = aBranchesElement.getChildren(); Hashtable aLastestRevisions = new Hashtable(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); String aName = aChild.getAttributeValue("name"); NodeRevisionNumber aRevisionNumber = decodeRevisionNumber(aChild.getAttributeValue("lastestRevision")); aLastestRevisions.put(aName, aRevisionNumber); } Hashtable aBranches = new Hashtable(); Element aRevisionsElement = aRevisionsHistory.getChild("revisions"); aList = aRevisionsElement.getChildren(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); NodeRevisionNumber aStartNumber = decodeRevisionNumber(aChild.getAttributeValue("start")); List aSuccessors = aChild.getChildren(); Vector aSuccessorsNumbers = new Vector(); for (int k = 0; k < aSuccessors.size(); k++) { Element aSuccessor = (Element) aSuccessors.get(k); NodeRevisionNumber aRevisionNumber = decodeRevisionNumber(aSuccessor.getAttributeValue("number")); aSuccessorsNumbers.addElement(aRevisionNumber); } aBranches.put(aStartNumber, aSuccessorsNumbers); } revisionDescriptors = new NodeRevisionDescriptors( uri, aInitialRevision, new Hashtable(), aLastestRevisions, aBranches, aUseVersionning); } protected void decodeRevisionDescriptor(Element aParent) { descriptor = new Hashtable(); Element aElement = aParent.getChild("descriptor"); if (aElement == null) return; List aList = aElement.getChildren(); for (int i = 0; i < aList.size(); i++) { Element aChild = (Element) aList.get(i); String aBranchName = aChild.getAttributeValue("branchName"); NodeRevisionNumber aRevisionNumber = decodeRevisionNumber(aChild.getAttributeValue("number")); Vector aLabels = new Vector(); Element aLabelsElement = (Element) aChild.getChild("labels"); List aLabelList = aLabelsElement.getChildren(); for (int k = 0; k < aLabelList.size(); k++) { Element aLabel = (Element) aLabelList.get(k); aLabels.addElement(aLabel.getAttributeValue("val")); } Hashtable aProperties = new Hashtable(); Element aPropertiesElement = (Element) aChild.getChild("properties"); List aPropertiesList = aPropertiesElement.getChildren(); for (int k = 0; k < aPropertiesList.size(); k++) { Element aProperty = (Element) aPropertiesList.get(k); NodeProperty aProp = decodeNodeProperty(aProperty, uri); String key = aProperty.getAttributeValue("namespace") + aProperty.getAttributeValue("name"); aProperties.put(key, aProp); } NodeRevisionDescriptor aNode = new NodeRevisionDescriptor(aRevisionNumber, aBranchName, aLabels, aProperties); descriptor.put(aRevisionNumber.toString(), aNode); } } }
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.core; import java.util.Arrays; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.Output; import org.tensorflow.ndarray.Shape; import org.tensorflow.op.Operands; import org.tensorflow.op.RawOp; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.op.annotation.OpMetadata; import org.tensorflow.op.annotation.Operator; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.family.TType; /** * Holds state in the form of a tensor that persists across steps. * Outputs a ref to the tensor state so it may be read or modified. * TODO(zhifengc/mrry): Adds a pointer to a more detail document * about sharing states in tensorflow. * * @param <T> data type for {@code ref} output */ @OpMetadata( opType = Variable.OP_NAME, inputsClass = Variable.Inputs.class ) @Operator public final class Variable<T extends TType> extends RawOp implements Operand<T> { /** * The name of this op, as known by TensorFlow core engine */ public static final String OP_NAME = "VariableV2"; private Output<T> ref; public Variable(Operation operation) { super(operation, OP_NAME); int outputIdx = 0; ref = operation.output(outputIdx++); } /** * Factory method to create a class wrapping a new VariableV2 operation. * * @param scope current scope * @param shape The shape of the variable tensor. * @param dtype The type of elements in the variable tensor. * @param options carries optional attribute values * @param <T> data type for {@code VariableV2} output and operands * @return a new instance of Variable */ @Endpoint( describeByClass = true ) public static <T extends TType> Variable<T> create(Scope scope, Shape shape, Class<T> dtype, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "Variable"); opBuilder.setAttr("shape", shape); opBuilder.setAttr("dtype", Operands.toDataType(dtype)); if (options != null) { for (Options opts : options) { if (opts.container != null) { opBuilder.setAttr("container", opts.container); } if (opts.sharedName != null) { opBuilder.setAttr("shared_name", opts.sharedName); } } } return new Variable<>(opBuilder.build()); } /** * Sets the container option. * * @param container If non-empty, this variable is placed in the given container. * Otherwise, a default container is used. * @return this Options instance. */ public static Options container(String container) { return new Options().container(container); } /** * Sets the sharedName option. * * @param sharedName If non-empty, this variable is named in the given bucket * with this shared_name. Otherwise, the node name is used instead. * @return this Options instance. */ public static Options sharedName(String sharedName) { return new Options().sharedName(sharedName); } /** * Gets ref. * A reference to the variable tensor. * @return ref. */ public Output<T> ref() { return ref; } @Override public Output<T> asOutput() { return ref; } /** * Optional attributes for {@link org.tensorflow.op.core.Variable} */ public static class Options { private String container; private String sharedName; private Options() { } /** * Sets the container option. * * @param container If non-empty, this variable is placed in the given container. * Otherwise, a default container is used. * @return this Options instance. */ public Options container(String container) { this.container = container; return this; } /** * Sets the sharedName option. * * @param sharedName If non-empty, this variable is named in the given bucket * with this shared_name. Otherwise, the node name is used instead. * @return this Options instance. */ public Options sharedName(String sharedName) { this.sharedName = sharedName; return this; } } @OpInputsMetadata( outputsClass = Variable.class ) public static class Inputs extends RawOpInputs<Variable<?>> { /** * The shape of the variable tensor. */ public final Shape shape; /** * The type of elements in the variable tensor. */ public final DataType dtype; /** * If non-empty, this variable is placed in the given container. * Otherwise, a default container is used. */ public final String container; /** * If non-empty, this variable is named in the given bucket * with this shared_name. Otherwise, the node name is used instead. */ public final String sharedName; public Inputs(GraphOperation op) { super(new Variable<>(op), op, Arrays.asList("shape", "dtype", "container", "shared_name")); int inputIndex = 0; shape = op.attributes().getAttrShape("shape"); dtype = op.attributes().getAttrType("dtype"); container = op.attributes().getAttrString("container"); sharedName = op.attributes().getAttrString("shared_name"); } } }
/* * Copyright (c) 2014 Intellectual Reserve, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package cf.spring.servicebroker; import cf.common.JsonObject; import cf.spring.HttpBasicAuthenticator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.HttpRequestHandler; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Handles REST calls from the Cloud Controller and passes them to the appropriate service broker. * * @author Mike Heath */ public class ServiceBrokerHandler implements HttpRequestHandler { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceBrokerHandler.class); private static final Pattern URI_PATTERN = Pattern.compile("^/v2/service_instances/(.+?)(/service_bindings/(.+))?"); public static final String SERVICE_ID_PARAM = "service_id"; public static final String PLAN_ID_PARAM = "plan_id"; private final ObjectMapper mapper = new ObjectMapper(); private final HttpBasicAuthenticator authenticator; private final CatalogAccessorProvider catalogAccessorProvider; public ServiceBrokerHandler(HttpBasicAuthenticator authenticator, CatalogAccessorProvider catalogAccessorProvider) { this.authenticator = authenticator; this.catalogAccessorProvider = catalogAccessorProvider; } @Override public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (!authenticator.authenticate(request, response)) { return; } ApiVersionValidator.validateApiVersion(request); try { response.setContentType(Constants.JSON_CONTENT_TYPE); final Matcher matcher = URI_PATTERN.matcher(request.getRequestURI()); if (!matcher.matches()) { throw new NotFoundException("Resource not found"); } final String instanceId = matcher.group(1); final String bindingId = matcher.group(3); if ("put".equalsIgnoreCase(request.getMethod())) { if (bindingId == null) { final ProvisionBody provisionBody = mapper.readValue(request.getInputStream(), ProvisionBody.class); final String serviceId = provisionBody.getServiceId(); final BrokerServiceAccessor accessor = getServiceAccessor(serviceId); final ProvisionRequest provisionRequest = new ProvisionRequest( UUID.fromString(instanceId), provisionBody.getPlanId(), provisionBody.getOrganizationGuid(), provisionBody.getSpaceGuid(), provisionBody.getParameters()); final ProvisionResponse provisionResponse = accessor.provision(provisionRequest); if (provisionResponse.isCreated()) { response.setStatus(HttpServletResponse.SC_CREATED); } mapper.writeValue(response.getOutputStream(), provisionResponse); } else { final BindBody bindBody = mapper.readValue(request.getInputStream(), BindBody.class); final String serviceId = bindBody.getServiceId(); final BrokerServiceAccessor accessor = getServiceAccessor(serviceId); final BindRequest.BindingType type; final String boundResource; if (bindBody.getBindResource().getAppGuid() != null) { type = BindRequest.BindingType.APPLICATION; boundResource = bindBody.getBindResource().getAppGuid(); } else if (bindBody.getBindResource().getRoute() != null) { type = BindRequest.BindingType.ROUTE; boundResource = bindBody.getBindResource().getRoute(); } else if (bindBody.getBindResource().getCredentialClientId() != null) { type = BindRequest.BindingType.SERVICE_KEY; boundResource = bindBody.getBindResource().getCredentialClientId(); } else { type = BindRequest.BindingType.APPLICATION; boundResource = bindBody.getApplicationGuid().toString(); } final BindRequest bindRequest = new BindRequest( UUID.fromString(instanceId), UUID.fromString(bindingId), type, boundResource, bindBody.getPlanId(), bindBody.getParameters()); final BindResponse bindResponse = accessor.bind(bindRequest); if (bindResponse.isCreated()) { response.setStatus(HttpServletResponse.SC_CREATED); } mapper.writeValue(response.getOutputStream(), bindResponse); } } else if ("patch".equalsIgnoreCase(request.getMethod())) { final UpdateBody updateBody = mapper.readValue(request.getInputStream(), UpdateBody.class); final String serviceId = updateBody.getServiceId(); final BrokerServiceAccessor accessor = getServiceAccessor(serviceId); try { final UpdateRequest updateRequest = new UpdateRequest(UUID.fromString(instanceId), updateBody.getPlanId(), updateBody.getParameters(), new UpdateRequest.PreviousValues(updateBody.getPreviousValues().getServiceId(), updateBody.getPreviousValues().getPlanId(), updateBody.getPreviousValues().getOrganizationId(), updateBody.getPreviousValues().getSpaceId())); accessor.update(updateRequest); } catch (MissingResourceException e) { response.setStatus(HttpServletResponse.SC_GONE); } response.getWriter().write("{}"); } else if ("delete".equalsIgnoreCase(request.getMethod())) { final String serviceId = request.getParameter(SERVICE_ID_PARAM); final String planId = request.getParameter(PLAN_ID_PARAM); final BrokerServiceAccessor accessor = getServiceAccessor(serviceId); try { if (bindingId == null) { // Deprovision final DeprovisionRequest deprovisionRequest = new DeprovisionRequest(UUID.fromString(instanceId), planId); accessor.deprovision(deprovisionRequest); } else { // Unbind final UnbindRequest unbindRequest = new UnbindRequest(UUID.fromString(bindingId), UUID.fromString(instanceId), planId); accessor.unbind(unbindRequest); } } catch (MissingResourceException e) { response.setStatus(HttpServletResponse.SC_GONE); } response.getWriter().write("{}"); } else { response.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } } catch (ConflictException e) { response.setStatus(HttpServletResponse.SC_CONFLICT); response.getWriter().write("{}"); } catch (ServiceBrokerException e) { LOGGER.warn("An error occurred processing a service broker request", e); response.setStatus(e.getHttpResponseCode()); mapper.writeValue(response.getOutputStream(), new ErrorBody(e.getMessage())); } catch (Throwable e) { LOGGER.error(e.getMessage(), e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); mapper.writeValue(response.getOutputStream(), new ErrorBody(e.getMessage())); } } private BrokerServiceAccessor getServiceAccessor(String serviceId) { return catalogAccessorProvider.getCatalogAccessor().getServiceAccessor(serviceId); } static class ProvisionBody extends JsonObject { public static final String SERVICE_ID_FIELD = "service_id"; public static final String PLAN_ID_FIELD = "plan_id"; public static final String ORGANIZATION_GUID_FIELD = "organization_guid"; public static final String SPACE_GUID_FIELD = "space_guid"; public static final String PARAMETERS = "parameters"; private final String serviceId; private final String planId; private final UUID organizationGuid; private final UUID spaceGuid; private final Map<String, Object> parameters; public ProvisionBody( @JsonProperty(SERVICE_ID_FIELD) String serviceId, @JsonProperty(PLAN_ID_FIELD) String planId, @JsonProperty(ORGANIZATION_GUID_FIELD) UUID organizationGuid, @JsonProperty(SPACE_GUID_FIELD) UUID spaceGuid, @JsonProperty(PARAMETERS) Map<String, Object> parameters) { this.serviceId = serviceId; this.planId = planId; this.organizationGuid = organizationGuid; this.spaceGuid = spaceGuid; this.parameters = parameters == null ? Collections.emptyMap() : parameters; } @JsonProperty(SERVICE_ID_FIELD) public String getServiceId() { return serviceId; } @JsonProperty(PLAN_ID_FIELD) public String getPlanId() { return planId; } @JsonProperty(ORGANIZATION_GUID_FIELD) public UUID getOrganizationGuid() { return organizationGuid; } @JsonProperty(SPACE_GUID_FIELD) public UUID getSpaceGuid() { return spaceGuid; } @JsonProperty(PARAMETERS) public Map<String, Object> getParameters() { return parameters; } } static class UpdateBody extends JsonObject { public static final String SERVICE_ID_FIELD = "service_id"; public static final String PLAN_ID_FIELD = "plan_id"; public static final String PARAMETERS = "parameters"; public static final String PREVIOUS_VALUES = "previous_values"; private final String serviceId; private final String planId; private final Map<String, Object> parameters; private final PreviousValues previousValues; public UpdateBody( @JsonProperty(SERVICE_ID_FIELD) String serviceId, @JsonProperty(PLAN_ID_FIELD) String planId, @JsonProperty(PARAMETERS) Map<String, Object> parameters, @JsonProperty(PREVIOUS_VALUES) PreviousValues previousValues) { this.serviceId = serviceId; this.planId = planId; this.parameters = parameters == null ? Collections.emptyMap() : parameters; this.previousValues = previousValues; } @JsonProperty(SERVICE_ID_FIELD) public String getServiceId() { return serviceId; } @JsonProperty(PLAN_ID_FIELD) public String getPlanId() { return planId; } @JsonProperty(PARAMETERS) public Map<String, Object> getParameters() { return parameters; } @JsonProperty(PREVIOUS_VALUES) public PreviousValues getPreviousValues() { return previousValues; } static class PreviousValues extends JsonObject { public static final String SERVICE_ID_FIELD = "service_id"; public static final String PLAN_ID_FIELD = "plan_id"; public static final String ORGANIZATION_ID_FIELD = "organization_id"; public static final String SPACE_ID_FIELD = "space_id"; private final String serviceId; private final String planId; private final UUID organizationId; private final UUID spaceId; public PreviousValues( @JsonProperty(SERVICE_ID_FIELD) String serviceId, @JsonProperty(PLAN_ID_FIELD) String planId, @JsonProperty(ORGANIZATION_ID_FIELD) UUID organizationId, @JsonProperty(SPACE_ID_FIELD) UUID spaceId) { this.serviceId = serviceId; this.planId = planId; this.organizationId = organizationId; this.spaceId = spaceId; } @JsonProperty(SERVICE_ID_FIELD) public String getServiceId() { return serviceId; } @JsonProperty(PLAN_ID_FIELD) public String getPlanId() { return planId; } @JsonProperty(ORGANIZATION_ID_FIELD) public UUID getOrganizationId() { return organizationId; } @JsonProperty(SPACE_ID_FIELD) public UUID getSpaceId() { return spaceId; } } } static class BindBody extends JsonObject { public static final String SERVICE_ID_FIELD = "service_id"; public static final String PLAN_ID = "plan_id"; public static final String APPLICATION_GUID = "app_guid"; public static final String BIND_RESOURCE = "bind_resource"; public static final String PARAMETERS = "parameters"; private final String serviceId; private final String planId; private final UUID applicationGuid; private final BindResource bindResource; private final Map<String, Object> parameters; public BindBody( @JsonProperty(SERVICE_ID_FIELD) String serviceId, @JsonProperty(PLAN_ID) String planId, @JsonProperty(APPLICATION_GUID) UUID applicationGuid, @JsonProperty(BIND_RESOURCE) BindResource bindResource, @JsonProperty(PARAMETERS) Map<String, Object> parameters) { this.serviceId = serviceId; this.planId = planId; this.applicationGuid = applicationGuid; this.bindResource = bindResource; this.parameters = parameters == null ? Collections.emptyMap() : parameters; } public String getServiceId() { return serviceId; } public String getPlanId() { return planId; } public UUID getApplicationGuid() { return applicationGuid; } public BindResource getBindResource() { return bindResource; } public Map<String, Object> getParameters() { return parameters; } } static class BindResource extends JsonObject { public static final String APP_GUID = "app_guid"; public static final String CREDENTIAL_CLIENT_ID = "credential_client_id"; public static final String ROUTE = "route"; private final String appGuid; private final String credentialClientId; private final String route; public BindResource( @JsonProperty(APP_GUID) String appGuid, @JsonProperty(CREDENTIAL_CLIENT_ID) String credentialClientId, @JsonProperty(ROUTE) String route) { this.appGuid = appGuid; this.credentialClientId = credentialClientId; this.route = route; } public String getAppGuid() { return appGuid; } public String getCredentialClientId() { return credentialClientId; } public String getRoute() { return route; } } static class ErrorBody extends JsonObject { private final String description; ErrorBody(String description) { this.description = description; } public String getDescription() { return description; } } }
package tek.ui; import java.nio.ByteBuffer; import java.nio.FloatBuffer; import org.joml.Vector2f; import org.lwjgl.BufferUtils; import org.lwjgl.opengl.GL11; import org.lwjgl.stb.STBTTAlignedQuad; import org.lwjgl.stb.STBTTBakedChar; import org.lwjgl.stb.STBTruetype; import tek.ResourceLoader; import tek.Window; public class UIFont { private int texId; private float fontHeight = 16.0f; private STBTTBakedChar.Buffer cdata; public UIFont(String path, float fontHeight){ ByteBuffer data = ResourceLoader.getBytes(path); cdata = STBTTBakedChar.malloc(96); ByteBuffer bitmap = BufferUtils.createByteBuffer(512 * 512); STBTruetype.stbtt_BakeFontBitmap(data, 32, bitmap, 512, 512, 32, cdata); texId = GL11.glGenTextures(); GL11.glBindTexture(GL11.GL_TEXTURE_2D, texId); GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_ALPHA, 512, 512, 0, GL11.GL_ALPHA, GL11.GL_UNSIGNED_BYTE, bitmap); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, GL11.GL_LINEAR); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, GL11.GL_LINEAR); } public static void prepRender(){ GL11.glMatrixMode(GL11.GL_PROJECTION); GL11.glLoadIdentity(); GL11.glOrtho(0.0, Window.instance.getWidth(), Window.instance.getHeight(), 0.0, -1.0, 1.0); GL11.glMatrixMode(GL11.GL_MODELVIEW); } public float getWidth(String text){ float w = 0; FloatBuffer xbuf = BufferUtils.createFloatBuffer(1); FloatBuffer ybuf = BufferUtils.createFloatBuffer(1); STBTTAlignedQuad q = STBTTAlignedQuad.malloc(); for(char c : text.toCharArray()){ if(c == '\n'){ continue; }else if(c < 32 || c > 128){ continue; } STBTruetype.stbtt_GetBakedQuad(cdata, 512, 512, (int)(c - 32), xbuf, ybuf, q, true); w += q.x1() - q.x0(); } return w; } public Vector2f getWrappedSize(String text, float scale, int maxWidth){ float maxw = Float.MIN_VALUE; Vector2f s = new Vector2f(); FloatBuffer xbuf = BufferUtils.createFloatBuffer(1); FloatBuffer ybuf = BufferUtils.createFloatBuffer(1); STBTTAlignedQuad q = STBTTAlignedQuad.malloc(); for(char c : text.toCharArray()){ if(c == '\n'){ continue; }else if(c < 32 || c > 128){ continue; } STBTruetype.stbtt_GetBakedQuad(cdata, 512, 512, (int)(c - 32), xbuf, ybuf, q, true); float w = q.x1() - q.x0(); s.x += w; if(s.x >= maxWidth){ maxw = Math.max(s.x, maxw); s.x = 0; s.y += (fontHeight * 2); s.x += w; xbuf.put(0, 0); ybuf.put(0, ybuf.get(0) + (fontHeight * 2)); } } s.x = maxw; s.mul(scale); return s; } public float getHeight(){ return fontHeight; } public void printWrapped(float x, float y, float scale, String text, float r, float g, float b, int wrapWidth){ GL11.glPushMatrix(); GL11.glEnable(GL11.GL_TEXTURE_2D); GL11.glBindTexture(GL11.GL_TEXTURE_2D, texId); GL11.glColor3f(r, g, b); GL11.glScalef(scale, scale, 1f); FloatBuffer xbuf = BufferUtils.createFloatBuffer(1); FloatBuffer ybuf = BufferUtils.createFloatBuffer(1); xbuf.put(x); ybuf.put(Window.instance.getHeight() - y); xbuf.flip(); ybuf.flip(); STBTTAlignedQuad q = STBTTAlignedQuad.malloc(); GL11.glBegin(GL11.GL_QUADS); for(char c : text.toCharArray()){ if(c == '\n'){ ybuf.put(0, ybuf.get(0) + (fontHeight * 2f)); xbuf.put(0, x); continue; }else if(c < 32 || c > 128){ continue; } float nextX = q.x1() * scale; if(nextX >= wrapWidth){ xbuf.put(0, x); ybuf.put(0,ybuf.get(0) + (fontHeight * 2f)); } STBTruetype.stbtt_GetBakedQuad(cdata, 512, 512, (int)(c - 32), xbuf, ybuf, q, true); GL11.glTexCoord2f(q.s0(), q.t0()); GL11.glVertex2f(q.x0(), q.y0()); GL11.glTexCoord2f(q.s1(), q.t0()); GL11.glVertex2f(q.x1(), q.y0()); GL11.glTexCoord2f(q.s1(), q.t1()); GL11.glVertex2f(q.x1(), q.y1()); GL11.glTexCoord2f(q.s0(), q.t1()); GL11.glVertex2f(q.x0(), q.y1()); } GL11.glEnd(); GL11.glPopMatrix(); } public void print(float x, float y, float scale, String text, float r, float g, float b){ GL11.glPushMatrix(); GL11.glEnable(GL11.GL_TEXTURE_2D); GL11.glBindTexture(GL11.GL_TEXTURE_2D, texId); GL11.glColor3f(r, g, b); GL11.glScalef(scale, scale, 1f); FloatBuffer xbuf = BufferUtils.createFloatBuffer(1); FloatBuffer ybuf = BufferUtils.createFloatBuffer(1); xbuf.put(x); ybuf.put(Window.instance.getHeight() - y); xbuf.flip(); ybuf.flip(); STBTTAlignedQuad q = STBTTAlignedQuad.malloc(); GL11.glBegin(GL11.GL_QUADS); for(char c : text.toCharArray()){ if(c == '\n'){ ybuf.put(0, ybuf.get(0) + (fontHeight * 2f)); xbuf.put(0, x); continue; }else if(c < 32 || c > 128){ continue; } STBTruetype.stbtt_GetBakedQuad(cdata, 512, 512, (int)(c - 32), xbuf, ybuf, q, true); GL11.glTexCoord2f(q.s0(), q.t0()); GL11.glVertex2f(q.x0(), q.y0()); GL11.glTexCoord2f(q.s1(), q.t0()); GL11.glVertex2f(q.x1(), q.y0()); GL11.glTexCoord2f(q.s1(), q.t1()); GL11.glVertex2f(q.x1(), q.y1()); GL11.glTexCoord2f(q.s0(), q.t1()); GL11.glVertex2f(q.x0(), q.y1()); } GL11.glEnd(); GL11.glPopMatrix(); } }
package org.kie.scanner; import org.drools.compiler.kie.builder.impl.InternalKieContainer; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.drools.compiler.kie.builder.impl.InternalKieScanner; import org.drools.compiler.kie.builder.impl.MemoryKieModule; import org.drools.compiler.kie.builder.impl.ResultsImpl; import org.drools.compiler.kie.builder.impl.ZipKieModule; import org.drools.compiler.kproject.ReleaseIdImpl; import org.drools.compiler.kproject.models.KieModuleModelImpl; import org.kie.api.KieServices; import org.kie.api.builder.KieModule; import org.kie.api.builder.KieScanner; import org.kie.api.builder.Message; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.runtime.KieContainer; import org.kie.scanner.management.KieScannerMBean; import org.kie.scanner.management.KieScannerMBeanImpl; import org.kie.scanner.management.MBeanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.aether.artifact.Artifact; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import static org.drools.compiler.kie.builder.impl.KieBuilderImpl.buildKieModule; import static org.drools.compiler.kie.builder.impl.KieBuilderImpl.setDefaultsforEmptyKieModule; import static org.kie.scanner.ArtifactResolver.getResolverFor; import static org.kie.scanner.DependencyDescriptor.isFixedVersion; public class KieRepositoryScannerImpl implements InternalKieScanner { private Timer timer; private static final Logger log = LoggerFactory.getLogger(KieScanner.class); private InternalKieContainer kieContainer; private final Map<ReleaseId, DependencyDescriptor> usedDependencies = new HashMap<ReleaseId, DependencyDescriptor>(); private ArtifactResolver artifactResolver; private Status status = Status.STARTING; private KieScannerMBean mbean; public synchronized void setKieContainer(KieContainer kieContainer) { if (this.kieContainer != null) { throw new RuntimeException("Cannot change KieContainer on an already initialized KieScanner"); } this.kieContainer = (InternalKieContainer)kieContainer; ReleaseId containerReleaseId = this.kieContainer.getContainerReleaseId(); if (containerReleaseId == null) { throw new RuntimeException("The KieContainer's ReleaseId cannot be null. Are you using a KieClasspathContainer?"); } artifactResolver = getResolverFor(kieContainer.getReleaseId(), true); if (!isFixedVersion(containerReleaseId.getVersion())) { usedDependencies.put(containerReleaseId, new DependencyDescriptor(this.kieContainer.getReleaseId(), this.kieContainer.getCreationTimestamp())); } indexAtifacts(); KieScannersRegistry.register(this); status = Status.STOPPED; if( MBeanUtils.isMBeanEnabled() ) { mbean = new KieScannerMBeanImpl(this); } } private ArtifactResolver getArtifactResolver() { if (artifactResolver == null) { artifactResolver = new ArtifactResolver(); } return artifactResolver; } public synchronized KieModule loadArtifact(ReleaseId releaseId) { return loadArtifact(releaseId, null); } public synchronized KieModule loadArtifact(ReleaseId releaseId, InputStream pomXml) { ArtifactResolver resolver = pomXml != null ? ArtifactResolver.getResolverFor(pomXml) : getArtifactResolver(); Artifact artifact = resolver.resolveArtifact(releaseId); return artifact != null ? buildArtifact(artifact, resolver) : loadPomArtifact(releaseId); } public synchronized String getArtifactVersion(ReleaseId releaseId) { if (!releaseId.isSnapshot()) { return releaseId.getVersion(); } Artifact artifact = getArtifactResolver().resolveArtifact(releaseId); return artifact != null ? artifact.getVersion() : null; } public synchronized ReleaseId getScannerReleaseId() { return ((InternalKieContainer)kieContainer).getContainerReleaseId(); } public synchronized ReleaseId getCurrentReleaseId() { return kieContainer.getReleaseId(); } public synchronized Status getStatus() { return status; } private KieModule loadPomArtifact(ReleaseId releaseId) { ArtifactResolver resolver = getResolverFor(releaseId, false); if (resolver == null) { return null; } MemoryKieModule kieModule = new MemoryKieModule(releaseId); addDependencies(kieModule, resolver, resolver.getPomDirectDependencies()); build(kieModule); return kieModule; } private InternalKieModule buildArtifact(Artifact artifact, ArtifactResolver resolver) { DependencyDescriptor dependencyDescriptor = new DependencyDescriptor(artifact); ReleaseId releaseId = dependencyDescriptor.getReleaseId(); if (releaseId.isSnapshot()) { ((ReleaseIdImpl)releaseId).setSnapshotVersion(artifact.getVersion()); } ZipKieModule kieModule = createZipKieModule(releaseId, artifact.getFile()); if (kieModule != null) { addDependencies(kieModule, resolver, resolver.getArtifactDependecies(dependencyDescriptor.toString())); build(kieModule); } return kieModule; } private void addDependencies(InternalKieModule kieModule, ArtifactResolver resolver, List<DependencyDescriptor> dependencies) { for (DependencyDescriptor dep : dependencies) { InternalKieModule dependency = (InternalKieModule) KieServices.Factory.get().getRepository().getKieModule(dep.getReleaseId()); if (dependency != null) { kieModule.addKieDependency(dependency); } else { Artifact depArtifact = resolver.resolveArtifact(dep.getReleaseId()); if (depArtifact != null && isKJar(depArtifact.getFile())) { ReleaseId depReleaseId = new DependencyDescriptor(depArtifact).getReleaseId(); ZipKieModule zipKieModule = createZipKieModule(depReleaseId, depArtifact.getFile()); if (zipKieModule != null) { kieModule.addKieDependency(zipKieModule); } } } } } private static ZipKieModule createZipKieModule(ReleaseId releaseId, File jar) { KieModuleModel kieModuleModel = getKieModuleModelFromJar(jar); return kieModuleModel != null ? new ZipKieModule(releaseId, kieModuleModel, jar) : null; } private static KieModuleModel getKieModuleModelFromJar(File jar) { ZipFile zipFile = null; try { zipFile = new ZipFile( jar ); ZipEntry zipEntry = zipFile.getEntry(KieModuleModelImpl.KMODULE_JAR_PATH); KieModuleModel kieModuleModel = KieModuleModelImpl.fromXML(zipFile.getInputStream(zipEntry)); setDefaultsforEmptyKieModule(kieModuleModel); return kieModuleModel; } catch ( Exception e ) { } finally { if (zipFile != null) { try { zipFile.close(); } catch (IOException e) { } } } return null; } private ResultsImpl build(InternalKieModule kieModule) { ResultsImpl messages = new ResultsImpl(); buildKieModule(kieModule, messages); return messages; } public synchronized void start(long pollingInterval) { if (getStatus() == Status.SHUTDOWN ) { throw new IllegalStateException("The scanner was shut down and can no longer be started."); } if (pollingInterval <= 0) { throw new IllegalArgumentException("pollingInterval must be positive"); } if (timer != null) { throw new IllegalStateException("The scanner is already running"); } startScanTask(pollingInterval); } public synchronized void stop() { if (getStatus() == Status.SHUTDOWN ) { throw new IllegalStateException("The scanner was already shut down."); } if (timer != null) { timer.cancel(); timer = null; } status = Status.STOPPED; } public void shutdown() { if( getStatus() != Status.SHUTDOWN ) { stop(); // making sure it is stopped status = Status.SHUTDOWN; } } private void startScanTask(long pollingInterval) { status = Status.RUNNING; timer = new Timer(true); timer.schedule(new ScanTask(), pollingInterval, pollingInterval); } private class ScanTask extends TimerTask { public void run() { scanNow(); status = Status.RUNNING; } } public synchronized void scanNow() { if (getStatus() == Status.SHUTDOWN ) { throw new IllegalStateException("The scanner was already shut down and can no longer be used."); } try { status = Status.SCANNING; Map<DependencyDescriptor, Artifact> updatedArtifacts = scanForUpdates(); if (updatedArtifacts.isEmpty()) { status = Status.STOPPED; return; } status = Status.UPDATING; // build the dependencies first Map.Entry<DependencyDescriptor, Artifact> containerEntry = null; for (Map.Entry<DependencyDescriptor, Artifact> entry : updatedArtifacts.entrySet()) { if (entry.getKey().isSameArtifact(kieContainer.getContainerReleaseId())) { containerEntry = entry; } else { updateKieModule(entry.getKey(), entry.getValue()); } } if (containerEntry != null) { updateKieModule(containerEntry.getKey(), containerEntry.getValue()); } log.info("The following artifacts have been updated: " + updatedArtifacts); // show we catch exceptions here and shutdown the scanner if one happens? } finally { status = Status.STOPPED; } } private void updateKieModule(DependencyDescriptor oldDependency, Artifact artifact) { ReleaseId newReleaseId = new DependencyDescriptor(artifact).getReleaseId(); ZipKieModule kieModule = createZipKieModule(newReleaseId, artifact.getFile()); if (kieModule != null) { addDependencies(kieModule, artifactResolver, artifactResolver.getArtifactDependecies(newReleaseId.toString())); ResultsImpl messages = build(kieModule); if ( messages.filterMessages(Message.Level.ERROR).isEmpty()) { ((InternalKieContainer)kieContainer).updateDependencyToVersion(oldDependency.getArtifactReleaseId(), newReleaseId); oldDependency.setArtifactVersion(artifact.getVersion()); } } } private Map<DependencyDescriptor, Artifact> scanForUpdates() { Map<ReleaseId, DependencyDescriptor> replacedArtifacts = new HashMap<ReleaseId, DependencyDescriptor>(); Map<DependencyDescriptor, Artifact> newArtifacts = new HashMap<DependencyDescriptor, Artifact>(); ArtifactResolver artifactResolver = getArtifactResolver(); // WORKAROUND: since aether doesn't obey update policy we have to create a new Session for each scan artifactResolver.renewSession(); for (Map.Entry<ReleaseId, DependencyDescriptor> entry : usedDependencies.entrySet()) { Artifact newArtifact = artifactResolver.resolveArtifact(entry.getKey()); if (newArtifact == null) { continue; } DependencyDescriptor resolvedDep = new DependencyDescriptor(newArtifact); if (resolvedDep.isNewerThan(entry.getValue())) { newArtifacts.put(entry.getValue(), newArtifact); replacedArtifacts.put(entry.getKey(), resolvedDep); } } for (Map.Entry<ReleaseId, DependencyDescriptor> entry : replacedArtifacts.entrySet()) { usedDependencies.put(entry.getKey(), entry.getValue()); } return newArtifacts; } private void indexAtifacts() { Collection<DependencyDescriptor> deps = getArtifactResolver().getAllDependecies(); for (DependencyDescriptor dep : deps) { if (!dep.isFixedVersion()) { Artifact artifact = getArtifactResolver().resolveArtifact(dep.getReleaseId()); log.debug( artifact + " resolved to " + artifact.getFile() ); if (isKJar(artifact.getFile())) { usedDependencies.put(dep.getReleaseId(), new DependencyDescriptor(artifact)); } } } } private boolean isKJar(File jar) { ZipFile zipFile; try { zipFile = new ZipFile( jar ); } catch (IOException e) { throw new RuntimeException(e); } ZipEntry zipEntry = zipFile.getEntry( KieModuleModelImpl.KMODULE_JAR_PATH ); return zipEntry != null; } public synchronized KieScannerMBean getMBean() { return this.mbean; } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.security.irs; import java.util.Arrays; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanDefinition; import org.joda.beans.ImmutableBean; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import org.threeten.bp.LocalDate; import com.opengamma.financial.convention.InterestRateSwapLegConvention; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.frequency.PeriodFrequency; import com.opengamma.financial.convention.rolldate.RollDateAdjuster; /** * Class to wrap a convention and to allow overrides to the convention to be * accessed in a uniform manner. * * Currently assumes that all overrides are available if provided for a period. */ @BeanDefinition public final class FixedInterestRateSwapLegSchedule implements ImmutableBean { @PropertyDefinition private final InterestRateSwapLegConvention _convention; // overrides to convention schedules, if provided for a given period use the provided date // and ignore any roll convention etc. /** * The periods for which custom dates are provided. */ @PropertyDefinition private final int[] _dates; /** * The custom payment dates. */ @PropertyDefinition private final LocalDate[] _paymentDates; /** * The custom calculation dates. */ @PropertyDefinition private final LocalDate[] _calculationDates; public LocalDate getPaymentDate(final int date, final LocalDate startDate, final Calendar calendar) { if (getPaymentDates() != null && getPaymentDates().length != 0) { final int index = Arrays.binarySearch(_dates, date); if (index >= 0) { return _paymentDates[index]; } } // override not provided - fall back to convention final int monthsToAdvance = (int) PeriodFrequency.convertToPeriodFrequency(_convention.getPaymentFrequency()).getPeriod().toTotalMonths() * date; final RollDateAdjuster adjuster = _convention.getRollConvention().getRollDateAdjuster(monthsToAdvance); final BusinessDayConvention convention = _convention.getPaymentDayConvention(); final int settlementDays = _convention.getSettlementDays(); return convention.adjustDate(calendar, startDate.plusMonths(adjuster.getMonthsToAdjust()).minusDays(settlementDays).with(adjuster)); } public LocalDate getCalculationDate(final int date, final LocalDate startDate, final Calendar calendar) { if (getCalculationDates() != null && getCalculationDates().length != 0) { final int index = Arrays.binarySearch(_dates, date); if (index >= 0) { return _calculationDates[index]; } } // override not provided - fall back to convention final int monthsToAdvance = (int) PeriodFrequency.convertToPeriodFrequency(_convention.getCalculationFrequency()).getPeriod().toTotalMonths() * date; final RollDateAdjuster adjuster = _convention.getRollConvention().getRollDateAdjuster(monthsToAdvance); final BusinessDayConvention convention = _convention.getCalculationBusinessDayConvention(); return convention.adjustDate(calendar, startDate.plusMonths(adjuster.getMonthsToAdjust()).with(adjuster)); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code FixedInterestRateSwapLegSchedule}. * @return the meta-bean, not null */ public static FixedInterestRateSwapLegSchedule.Meta meta() { return FixedInterestRateSwapLegSchedule.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(FixedInterestRateSwapLegSchedule.Meta.INSTANCE); } /** * Returns a builder used to create an instance of the bean. * @return the builder, not null */ public static FixedInterestRateSwapLegSchedule.Builder builder() { return new FixedInterestRateSwapLegSchedule.Builder(); } private FixedInterestRateSwapLegSchedule( InterestRateSwapLegConvention convention, int[] dates, LocalDate[] paymentDates, LocalDate[] calculationDates) { this._convention = convention; this._dates = (dates != null ? dates.clone() : null); this._paymentDates = paymentDates; this._calculationDates = calculationDates; } @Override public FixedInterestRateSwapLegSchedule.Meta metaBean() { return FixedInterestRateSwapLegSchedule.Meta.INSTANCE; } @Override public <R> Property<R> property(String propertyName) { return metaBean().<R>metaProperty(propertyName).createProperty(this); } @Override public Set<String> propertyNames() { return metaBean().metaPropertyMap().keySet(); } //----------------------------------------------------------------------- /** * Gets the convention. * @return the value of the property */ public InterestRateSwapLegConvention getConvention() { return _convention; } //----------------------------------------------------------------------- /** * Gets the periods for which custom dates are provided. * @return the value of the property */ public int[] getDates() { return (_dates != null ? _dates.clone() : null); } //----------------------------------------------------------------------- /** * Gets the custom payment dates. * @return the value of the property */ public LocalDate[] getPaymentDates() { return _paymentDates; } //----------------------------------------------------------------------- /** * Gets the custom calculation dates. * @return the value of the property */ public LocalDate[] getCalculationDates() { return _calculationDates; } //----------------------------------------------------------------------- /** * Returns a builder that allows this bean to be mutated. * @return the mutable builder, not null */ public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { FixedInterestRateSwapLegSchedule other = (FixedInterestRateSwapLegSchedule) obj; return JodaBeanUtils.equal(getConvention(), other.getConvention()) && JodaBeanUtils.equal(getDates(), other.getDates()) && JodaBeanUtils.equal(getPaymentDates(), other.getPaymentDates()) && JodaBeanUtils.equal(getCalculationDates(), other.getCalculationDates()); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(getConvention()); hash = hash * 31 + JodaBeanUtils.hashCode(getDates()); hash = hash * 31 + JodaBeanUtils.hashCode(getPaymentDates()); hash = hash * 31 + JodaBeanUtils.hashCode(getCalculationDates()); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("FixedInterestRateSwapLegSchedule{"); buf.append("convention").append('=').append(getConvention()).append(',').append(' '); buf.append("dates").append('=').append(getDates()).append(',').append(' '); buf.append("paymentDates").append('=').append(getPaymentDates()).append(',').append(' '); buf.append("calculationDates").append('=').append(JodaBeanUtils.toString(getCalculationDates())); buf.append('}'); return buf.toString(); } //----------------------------------------------------------------------- /** * The meta-bean for {@code FixedInterestRateSwapLegSchedule}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code convention} property. */ private final MetaProperty<InterestRateSwapLegConvention> _convention = DirectMetaProperty.ofImmutable( this, "convention", FixedInterestRateSwapLegSchedule.class, InterestRateSwapLegConvention.class); /** * The meta-property for the {@code dates} property. */ private final MetaProperty<int[]> _dates = DirectMetaProperty.ofImmutable( this, "dates", FixedInterestRateSwapLegSchedule.class, int[].class); /** * The meta-property for the {@code paymentDates} property. */ private final MetaProperty<LocalDate[]> _paymentDates = DirectMetaProperty.ofImmutable( this, "paymentDates", FixedInterestRateSwapLegSchedule.class, LocalDate[].class); /** * The meta-property for the {@code calculationDates} property. */ private final MetaProperty<LocalDate[]> _calculationDates = DirectMetaProperty.ofImmutable( this, "calculationDates", FixedInterestRateSwapLegSchedule.class, LocalDate[].class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "convention", "dates", "paymentDates", "calculationDates"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 2039569265: // convention return _convention; case 95356549: // dates return _dates; case -522438625: // paymentDates return _paymentDates; case 739970364: // calculationDates return _calculationDates; } return super.metaPropertyGet(propertyName); } @Override public FixedInterestRateSwapLegSchedule.Builder builder() { return new FixedInterestRateSwapLegSchedule.Builder(); } @Override public Class<? extends FixedInterestRateSwapLegSchedule> beanType() { return FixedInterestRateSwapLegSchedule.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code convention} property. * @return the meta-property, not null */ public MetaProperty<InterestRateSwapLegConvention> convention() { return _convention; } /** * The meta-property for the {@code dates} property. * @return the meta-property, not null */ public MetaProperty<int[]> dates() { return _dates; } /** * The meta-property for the {@code paymentDates} property. * @return the meta-property, not null */ public MetaProperty<LocalDate[]> paymentDates() { return _paymentDates; } /** * The meta-property for the {@code calculationDates} property. * @return the meta-property, not null */ public MetaProperty<LocalDate[]> calculationDates() { return _calculationDates; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 2039569265: // convention return ((FixedInterestRateSwapLegSchedule) bean).getConvention(); case 95356549: // dates return ((FixedInterestRateSwapLegSchedule) bean).getDates(); case -522438625: // paymentDates return ((FixedInterestRateSwapLegSchedule) bean).getPaymentDates(); case 739970364: // calculationDates return ((FixedInterestRateSwapLegSchedule) bean).getCalculationDates(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code FixedInterestRateSwapLegSchedule}. */ public static final class Builder extends DirectFieldsBeanBuilder<FixedInterestRateSwapLegSchedule> { private InterestRateSwapLegConvention _convention; private int[] _dates; private LocalDate[] _paymentDates; private LocalDate[] _calculationDates; /** * Restricted constructor. */ private Builder() { } /** * Restricted copy constructor. * @param beanToCopy the bean to copy from, not null */ private Builder(FixedInterestRateSwapLegSchedule beanToCopy) { this._convention = beanToCopy.getConvention(); this._dates = (beanToCopy.getDates() != null ? beanToCopy.getDates().clone() : null); this._paymentDates = beanToCopy.getPaymentDates(); this._calculationDates = beanToCopy.getCalculationDates(); } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case 2039569265: // convention return _convention; case 95356549: // dates return _dates; case -522438625: // paymentDates return _paymentDates; case 739970364: // calculationDates return _calculationDates; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case 2039569265: // convention this._convention = (InterestRateSwapLegConvention) newValue; break; case 95356549: // dates this._dates = (int[]) newValue; break; case -522438625: // paymentDates this._paymentDates = (LocalDate[]) newValue; break; case 739970364: // calculationDates this._calculationDates = (LocalDate[]) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public Builder setString(String propertyName, String value) { setString(meta().metaProperty(propertyName), value); return this; } @Override public Builder setString(MetaProperty<?> property, String value) { super.setString(property, value); return this; } @Override public Builder setAll(Map<String, ? extends Object> propertyValueMap) { super.setAll(propertyValueMap); return this; } @Override public FixedInterestRateSwapLegSchedule build() { return new FixedInterestRateSwapLegSchedule( _convention, _dates, _paymentDates, _calculationDates); } //----------------------------------------------------------------------- /** * Sets the {@code convention} property in the builder. * @param convention the new value * @return this, for chaining, not null */ public Builder convention(InterestRateSwapLegConvention convention) { this._convention = convention; return this; } /** * Sets the {@code dates} property in the builder. * @param dates the new value * @return this, for chaining, not null */ public Builder dates(int... dates) { this._dates = dates; return this; } /** * Sets the {@code paymentDates} property in the builder. * @param paymentDates the new value * @return this, for chaining, not null */ public Builder paymentDates(LocalDate... paymentDates) { this._paymentDates = paymentDates; return this; } /** * Sets the {@code calculationDates} property in the builder. * @param calculationDates the new value * @return this, for chaining, not null */ public Builder calculationDates(LocalDate... calculationDates) { this._calculationDates = calculationDates; return this; } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("FixedInterestRateSwapLegSchedule.Builder{"); buf.append("convention").append('=').append(JodaBeanUtils.toString(_convention)).append(',').append(' '); buf.append("dates").append('=').append(JodaBeanUtils.toString(_dates)).append(',').append(' '); buf.append("paymentDates").append('=').append(JodaBeanUtils.toString(_paymentDates)).append(',').append(' '); buf.append("calculationDates").append('=').append(JodaBeanUtils.toString(_calculationDates)); buf.append('}'); return buf.toString(); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
package fr.insee.rmes.metadata.service.questionnaire; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import fr.insee.rmes.metadata.model.ColecticaItem; import fr.insee.rmes.metadata.model.ColecticaItemRef; import fr.insee.rmes.metadata.model.ColecticaItemRefList; import fr.insee.rmes.metadata.model.ItemWithParent; import fr.insee.rmes.metadata.model.ObjectColecticaPost; import fr.insee.rmes.metadata.model.Relationship; import fr.insee.rmes.metadata.model.TargetItem; import fr.insee.rmes.metadata.repository.GroupRepository; import fr.insee.rmes.metadata.repository.MetadataRepository; import fr.insee.rmes.metadata.service.MetadataService; import fr.insee.rmes.metadata.service.MetadataServiceItem; import fr.insee.rmes.metadata.service.ddiinstance.DDIInstanceService; import fr.insee.rmes.metadata.utils.DocumentBuilderUtils; import fr.insee.rmes.metadata.utils.XpathProcessor; import fr.insee.rmes.search.model.DDIItemType; import fr.insee.rmes.search.service.SearchService; import fr.insee.rmes.utils.ddi.DDIDocumentBuilder; import fr.insee.rmes.utils.ddi.UtilXML; import fr.insee.rmes.webservice.rest.RMeSException; @Service public class QuestionnaireServiceImpl implements QuestionnaireService { private final static Logger logger = LogManager.getLogger(QuestionnaireServiceImpl.class); @Autowired MetadataRepository metadataRepository; @Autowired MetadataServiceItem metadataServiceItem; @Autowired MetadataService metadataService; @Autowired DDIInstanceService ddiInstanceService; @Autowired SearchService searchService; @Autowired GroupRepository groupRepository; @Autowired XpathProcessor xpathProcessor; private String idDDIInstrument; private ColecticaItem instrument; private ColecticaItem DDIInstance; private ColecticaItem variableScheme; private ColecticaItem subGroupItem; private ColecticaItem groupItem; private ColecticaItem studyUnitItem; private ColecticaItem dataCollection; private ColecticaItem instrumentScheme; private Node groupNode; private Node subGroupNode; private Node studyUnitNode; private Node DCNode; private Node variableSchemeNode; private Node instrumentSchemeNode; private Node instrumentNode; @Override public String getQuestionnaire(String idDDIInstrument) throws Exception { this.idDDIInstrument = idDDIInstrument; // Step 1 : Get the DDIInstance, the DDIInstrument and Check type (an // Exception throws if not) ColecticaItem DDIInstrument = metadataServiceItem.getItemByType(idDDIInstrument, DDIItemType.QUESTIONNAIRE); this.instrument = DDIInstrument; ObjectColecticaPost objectColecticaPost = new ObjectColecticaPost(); List<String> itemTypes = new ArrayList<String>(); itemTypes.add(DDIItemType.INSTRUMENT_SCHEME.getUUID()); objectColecticaPost.setItemTypes(itemTypes); TargetItem targetItem = new TargetItem(); targetItem.setAgencyId(DDIInstrument.agencyId); targetItem.setIdentifier(DDIInstrument.identifier); targetItem.setVersion(Integer.valueOf(DDIInstrument.version)); objectColecticaPost.setTargetItem(targetItem); objectColecticaPost.setUseDistinctResultItem(true); objectColecticaPost.setUseDistinctTargetItem(true); Relationship[] relationshipsInstrument = metadataService.getRelationship(objectColecticaPost); String DDIidentifier = relationshipsInstrument[0].getIdentifierTriple().getIdentifier(); instrumentScheme = metadataServiceItem.getItem(DDIidentifier); dataCollection = searchItemParent(itemTypes, DDIItemType.DATA_COLLECTION, objectColecticaPost, instrumentScheme); variableScheme = mockGetVariableScheme(dataCollection); studyUnitItem = searchItemParent(itemTypes, DDIItemType.STUDY_UNIT, objectColecticaPost, dataCollection); subGroupItem = searchItemParent(itemTypes, DDIItemType.SUB_GROUP, objectColecticaPost, studyUnitItem); groupItem = searchItemParent(itemTypes, DDIItemType.GROUP, objectColecticaPost, subGroupItem); DDIInstance = searchItemParent(itemTypes, DDIItemType.DDI_INSTANCE, objectColecticaPost, groupItem); return buildQuestionnaire(); } private ColecticaItem mockGetVariableScheme(ColecticaItem dataCollection) throws Exception { // TODO: remove mock when context will be fixed return metadataServiceItem.getItem("10489bc2-11bb-4688-b56f-3886c3f81c58"); } private ColecticaItem searchItemParent(List<String> itemTypes, DDIItemType ddiItemType, ObjectColecticaPost objectColecticaPost, ColecticaItem itemChild) throws Exception { itemTypes.clear(); itemTypes.add(ddiItemType.getUUID()); objectColecticaPost.setItemTypes(itemTypes); TargetItem targetItem = new TargetItem(); targetItem.setAgencyId(itemChild.agencyId); targetItem.setIdentifier(itemChild.identifier); targetItem.setVersion(Integer.valueOf(itemChild.version)); objectColecticaPost.setTargetItem(targetItem); objectColecticaPost.setUseDistinctResultItem(true); objectColecticaPost.setUseDistinctTargetItem(true); Relationship[] relationshipsInstrument = metadataService.getRelationship(objectColecticaPost); String DDIidentifier = relationshipsInstrument[0].getIdentifierTriple().getIdentifier(); ColecticaItem item = metadataServiceItem.getItem(DDIidentifier); return item; } /** * This method build the DDI Questionnaire * * @return DDIQuestionnaire.toString() * @throws Exception */ private String buildQuestionnaire() throws Exception { // Step 1 : get all the children of the instrument (include the // instrument by default) ColecticaItemRefList listChildrenWithoutInstrument = metadataServiceItem .getChildrenRef(instrument.getIdentifier()); ColecticaItemRef instrumentTemp = null; // Step 2 : Among all of the itemsReferences, the instrument will be get // and removed from this list for (ColecticaItemRef childInstrument : listChildrenWithoutInstrument.identifiers) { if (childInstrument.identifier.equals(idDDIInstrument)) { instrumentTemp = childInstrument; } } if (instrumentTemp != null) { listChildrenWithoutInstrument.identifiers.remove(instrumentTemp); } // Step 3 : Build the group, from the // studyUnit to the group DDIDocumentBuilder docBuilder = new DDIDocumentBuilder(); ddiInstanceService.addDDIInstanceInformationToDocBuilder(DDIInstance,docBuilder); convertAsNodesWithXPath(docBuilder); appendChildsByParent(docBuilder); // Step 4 : return the filled out enveloppe // as result processItemsRessourcePackage(docBuilder, listChildrenWithoutInstrument); return docBuilder.toString(); } private void appendChildsByParent(DDIDocumentBuilder docBuilder) { removeReferences(groupNode); docBuilder.appendChild(groupNode); removeReferences(subGroupNode); docBuilder.appendChildByParent("Group", subGroupNode); removeReferences(studyUnitNode); docBuilder.appendChildByParent("SubGroup", studyUnitNode); // Step 1 : Insert the content of the // DataCollection got to the enveloppe as // a child of the StudyUnit. removeReferences(DCNode); docBuilder.appendChildByParent("StudyUnit", DCNode); removeReferences(instrumentSchemeNode); docBuilder.appendChildByParent("DataCollection", instrumentSchemeNode); docBuilder.appendChildByParent("InstrumentScheme", instrumentNode); } private void convertAsNodesWithXPath(DDIDocumentBuilder docBuilder) throws Exception { this.subGroupNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(subGroupItem.getItem(), "/Fragment[1]/*").item(0)), docBuilder); subGroupNode = DocumentBuilderUtils.getNode(UtilXML.nodeToString(subGroupNode), docBuilder); this.groupNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(groupItem.getItem(), "/Fragment[1]/*").item(0)), docBuilder); this.studyUnitNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(studyUnitItem.getItem(), "/Fragment[1]/*").item(0)), docBuilder); this.DCNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(dataCollection.getItem(), "/Fragment[1]/*").item(0)), docBuilder); this.variableSchemeNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(variableScheme.getItem(), "/Fragment[1]/*").item(0)), docBuilder); this.instrumentSchemeNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(instrumentScheme.item, "/Fragment[1]/*").item(0)), docBuilder); this.instrumentNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(instrument.item, "/Fragment[1]/*").item(0)), docBuilder); } private void processItemsRessourcePackage(DDIDocumentBuilder docBuilder, ColecticaItemRefList listItemsChildrenInstrument) throws Exception { List<ItemWithParent> parentsWithCildren = new ArrayList<ItemWithParent>(); List<ColecticaItem> items = metadataServiceItem.getItems(listItemsChildrenInstrument); // Step 1 : Insert the other references of // the studyUnit to the // enveloppe as children of // the first RessourcePackage processingSchemes(items, docBuilder, parentsWithCildren); // Step 2 : get the Ressource Packages (parents of the schemes) with // each scheme in the right package. List<ItemWithParent> rpItemsNodeString = getRessourcePackagesWithSchemes(docBuilder, parentsWithCildren); // Step 3 : Insert the Ressource Packages in the Root Document. for (ItemWithParent rpItemNodeString : rpItemsNodeString) { removeReferences(rpItemNodeString.getRessourcePackageNode()); docBuilder.appendChild(rpItemNodeString.getRessourcePackageNode()); } } private List<ItemWithParent> getRessourcePackagesWithSchemes(DDIDocumentBuilder docBuilder, List<ItemWithParent> parentsWithCildren) throws Exception { List<String> identifiersRP = new ArrayList<String>(); List<ItemWithParent> rpItemsNodeString = new ArrayList<ItemWithParent>(); for (ItemWithParent itemParentWithChildren : parentsWithCildren) { ObjectColecticaPost objectColecticaPost = new ObjectColecticaPost(); List<String> itemTypes = new ArrayList<String>(); itemTypes.add(DDIItemType.RESSOURCEPACKAGE.getUUID()); objectColecticaPost.setItemTypes(itemTypes); TargetItem targetItem = new TargetItem(); targetItem.setAgencyId(itemParentWithChildren.getParent().agencyId); targetItem.setIdentifier(itemParentWithChildren.getParent().identifier); targetItem.setVersion(Integer.valueOf(itemParentWithChildren.getParent().version)); objectColecticaPost.setTargetItem(targetItem); objectColecticaPost.setUseDistinctResultItem(true); objectColecticaPost.setUseDistinctTargetItem(true); Relationship[] relationshipsRP = metadataService.getRelationship(objectColecticaPost); String identifierRP = relationshipsRP[0].getIdentifierTriple().getIdentifier(); if (identifiersRP.contains(identifierRP)) { for (ItemWithParent rpItemNodeString : rpItemsNodeString) { if (rpItemNodeString.getItem().getIdentifier().equals(identifierRP)) { removeReferences(itemParentWithChildren.getParentNode()); rpItemNodeString.getRessourcePackageNode().appendChild(itemParentWithChildren.getParentNode()); } } } else { ColecticaItem rpItem = metadataServiceItem.getItem(identifierRP); ItemWithParent rpItemNodeString = new ItemWithParent(); rpItemNodeString.setItem(rpItem); rpItemNodeString.setRessourcePackageNode(DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(rpItem.getItem(), "/Fragment[1]/*").item(0)), docBuilder)); removeReferences(itemParentWithChildren.getParentNode()); rpItemNodeString.getRessourcePackageNode().appendChild(itemParentWithChildren.getParentNode()); rpItemNodeString.setRessourcePackage(rpItem); rpItemsNodeString.add(rpItemNodeString); identifiersRP.add(identifierRP); } } return rpItemsNodeString; } private void processingSchemes(List<ColecticaItem> items, DDIDocumentBuilder docBuilder, List<ItemWithParent> parentsWithCildren) throws Exception { List<String> identifierParentsWithCildren = new ArrayList<String>(); List<Node> itemSchemeNodes = new ArrayList<Node>(); List<ColecticaItem> itemSchemes = new ArrayList<ColecticaItem>(); List<String> itemTypes = new ArrayList<String>(); // Adding variableScheme : independent scheme ColecticaItemRefList refsVariables = metadataServiceItem.getChildrenRef(this.variableScheme.getIdentifier()); List<ColecticaItem> variables = metadataServiceItem.getItems(refsVariables); for (ColecticaItem variable : variables) { Node variableNode = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(variable.item, "/Fragment[1]/*").item(0)), docBuilder); this.variableSchemeNode.appendChild(variableNode); } ItemWithParent variableScheme = new ItemWithParent(); variableScheme.setParent(this.variableScheme); // TODO: add ParentNode and check debugging ObjectColecticaPost objectColecticaPost = new ObjectColecticaPost(); variableScheme.setRessourcePackage( searchItemParent(itemTypes, DDIItemType.RESSOURCEPACKAGE, objectColecticaPost, this.variableScheme)); variableScheme.setRessourcePackageNode(DocumentBuilderUtils.getNode( UtilXML.nodeToString( xpathProcessor.queryList(variableScheme.getRessourcePackage().item, "/Fragment[1]/*").item(0)), docBuilder)); removeReferences(variableScheme.getRessourcePackageNode()); variableScheme.setParentNode(this.variableSchemeNode); parentsWithCildren.add(variableScheme); for (DDIItemType type : DDIItemType.values()) { if (type.getName().endsWith("Scheme")) { itemTypes.add(type.getUUID()); } } for (ColecticaItem item : items) { objectColecticaPost = new ObjectColecticaPost(); Node node = DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(item.getItem(), "/Fragment[1]/*[1]").item(0)), docBuilder); removeReferences(node); TargetItem targetItem = new TargetItem(); targetItem.setAgencyId(item.agencyId); targetItem.setVersion(Integer.valueOf(item.version)); targetItem.setIdentifier(item.identifier); objectColecticaPost.setItemTypes(itemTypes); objectColecticaPost.setTargetItem(targetItem); objectColecticaPost.setUseDistinctResultItem(true); objectColecticaPost.setUseDistinctTargetItem(true); if (itemTypes.size() > 0) { Relationship[] relationshipsSchemes = metadataService.getRelationship(objectColecticaPost); for (int i = 0; i < relationshipsSchemes.length; i++) { ItemWithParent itemWithParent = new ItemWithParent(); itemWithParent.setItem(item); itemWithParent.setItemNode(DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor.queryList(item.getItem(), "/Fragment[1]/*").item(0)), docBuilder)); itemWithParent.setParent( metadataServiceItem.getItem(relationshipsSchemes[i].getIdentifierTriple().getIdentifier())); itemWithParent.setParentNode(DocumentBuilderUtils.getNode( UtilXML.nodeToString(xpathProcessor .queryList(itemWithParent.getParent().getItem(), "/Fragment[1]/*").item(0)), docBuilder)); // First adding of a parentNode if (!identifierParentsWithCildren.contains(itemWithParent.getParent().getIdentifier())) { removeReferences(itemWithParent.getParentNode()); addParentNode(parentsWithCildren, itemWithParent, identifierParentsWithCildren, itemSchemes, itemSchemeNodes); } else { // Update of the parent node with a new child Node updateParentNode(parentsWithCildren, itemWithParent); } } } } } /** * Add the parent Node and the Parent Item to the currentItem. The aim of * this method is to construct the parent tree * * @param parentsWithCildren * @param itemWithParent * @param identifierParentsWithCildren * @param itemSchemes * @param itemSchemeNodes * @throws RMeSException */ public void addParentNode(List<ItemWithParent> parentsWithCildren, ItemWithParent itemWithParent, List<String> identifierParentsWithCildren, List<ColecticaItem> itemSchemes, List<Node> itemSchemeNodes) throws RMeSException { if (itemWithParent != null && itemWithParent.getParent() != null) { parentsWithCildren.add(itemWithParent); identifierParentsWithCildren.add(itemWithParent.getParent().getIdentifier()); itemSchemes.add(itemWithParent.getParent()); itemSchemeNodes.add(itemWithParent.getParentNode()); } else { String details = (itemWithParent != null) ? itemWithParent.toString() : "itemWithParent is null"; throw new RMeSException(500, "The parentNode of a Colectica Item was not found or is unreachable.", details); } } /** * * @param parentsWithCildren * @param itemWithParent * @throws RMeSException */ public void updateParentNode(List<ItemWithParent> parentsWithCildren, ItemWithParent itemWithParent) throws RMeSException { for (ItemWithParent itemParentWithChildren : parentsWithCildren) { if (itemParentWithChildren != null && itemParentWithChildren.getParent() != null) { if (itemParentWithChildren.getParent().getIdentifier() .equals(itemWithParent.getParent().getIdentifier())) { removeReferences(itemWithParent.getItemNode()); itemParentWithChildren.getParentNode().appendChild(itemWithParent.getItemNode()); } } else { throw new RMeSException(500, "The parentNode of a Colectica Item was not found or is unreachable.", itemWithParent.toString()); } } } /** * Remove unused references of the DDIDocument builder * * @param node * (root node for searching references) */ private void removeReferences(Node node) { NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node nodeRef = children.item(i); if (nodeRef.getNodeType() == Node.ELEMENT_NODE) { if (nodeRef.getNodeName().contains("Reference")) { Node parentNode = nodeRef.getParentNode(); parentNode.removeChild(nodeRef); } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.hunspell.Dictionary; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * The AnalysisModule is the main extension point for node and index level analysis components. The lucene classes * {@link org.apache.lucene.analysis.Analyzer}, {@link org.apache.lucene.analysis.TokenFilter}, {@link org.apache.lucene.analysis.Tokenizer} * and {@link org.apache.lucene.analysis.CharFilter} can be extended in plugins and registered on node startup when the analysis module * gets loaded. Since elasticsearch needs to create multiple instances for different configurations dedicated factories need to be provided for * each of the components: * <ul> * <li> {@link org.apache.lucene.analysis.Analyzer} can be exposed via {@link AnalyzerProvider} and registered on {@link #registerAnalyzer(String, AnalysisProvider)}</li> * <li> {@link org.apache.lucene.analysis.TokenFilter} can be exposed via {@link TokenFilterFactory} and registered on {@link #registerTokenFilter(String, AnalysisProvider)}</li> * <li> {@link org.apache.lucene.analysis.Tokenizer} can be exposed via {@link TokenizerFactory} and registered on {@link #registerTokenizer(String, AnalysisProvider)}</li> * <li> {@link org.apache.lucene.analysis.CharFilter} can be exposed via {@link CharFilterFactory} and registered on {@link #registerCharFilter(String, AnalysisProvider)}</li> * </ul> * * The {@link org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider} is only a functional interface that allows to register factory constructors directly like the plugin example below: * <pre> * public class MyAnalysisPlugin extends Plugin { * \@Override * public String name() { * return "analysis-my-plugin"; * } * * \@Override * public String description() { * return "my very fast and efficient analyzer"; * } * * public void onModule(AnalysisModule module) { * module.registerAnalyzer("my-analyzer-name", MyAnalyzer::new); * } * } * </pre> */ public final class AnalysisModule extends AbstractModule { static { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("_na_").settings(build).build(); NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); } private static final IndexSettings NA_INDEX_SETTINGS; private final Environment environment; private final Map<String, AnalysisProvider<CharFilterFactory>> charFilters = new HashMap<>(); private final Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(); private final Map<String, AnalysisProvider<TokenizerFactory>> tokenizers = new HashMap<>(); private final Map<String, AnalysisProvider<AnalyzerProvider>> analyzers = new HashMap<>(); private final Map<String, org.apache.lucene.analysis.hunspell.Dictionary> knownDictionaries = new HashMap<>(); /** * Creates a new AnalysisModule */ public AnalysisModule(Environment environment) { this.environment = environment; } /** * Registers a new {@link AnalysisProvider} to create * {@link CharFilterFactory} instance per node as well as per index. */ public void registerCharFilter(String name, AnalysisProvider<CharFilterFactory> charFilter) { if (charFilter == null) { throw new IllegalArgumentException("char_filter provider must not be null"); } if (charFilters.putIfAbsent(name, charFilter) != null) { throw new IllegalArgumentException("char_filter provider for name " + name + " already registered"); } } /** * Registers a new {@link AnalysisProvider} to create * {@link TokenFilterFactory} instance per node as well as per index. */ public void registerTokenFilter(String name, AnalysisProvider<TokenFilterFactory> tokenFilter) { if (tokenFilter == null) { throw new IllegalArgumentException("token_filter provider must not be null"); } if (tokenFilters.putIfAbsent(name, tokenFilter) != null) { throw new IllegalArgumentException("token_filter provider for name " + name + " already registered"); } } /** * Registers a new {@link AnalysisProvider} to create * {@link TokenizerFactory} instance per node as well as per index. */ public void registerTokenizer(String name, AnalysisProvider<TokenizerFactory> tokenizer) { if (tokenizer == null) { throw new IllegalArgumentException("tokenizer provider must not be null"); } if (tokenizers.putIfAbsent(name, tokenizer) != null) { throw new IllegalArgumentException("tokenizer provider for name " + name + " already registered"); } } /** * Registers a new {@link AnalysisProvider} to create * {@link AnalyzerProvider} instance per node as well as per index. */ public void registerAnalyzer(String name, AnalysisProvider<AnalyzerProvider> analyzer) { if (analyzer == null) { throw new IllegalArgumentException("analyzer provider must not be null"); } if (analyzers.putIfAbsent(name, analyzer) != null) { throw new IllegalArgumentException("analyzer provider for name " + name + " already registered"); } } /** * Registers a new hunspell {@link Dictionary} that can be referenced by the given name in * hunspell analysis configuration. */ public void registerHunspellDictionary(String name, Dictionary dictionary) { if (knownDictionaries.putIfAbsent(name, dictionary) != null) { throw new IllegalArgumentException("dictionary for [" + name + "] is already registered"); } } @Override protected void configure() { try { HunspellService service = new HunspellService(environment.settings(), environment, knownDictionaries); AnalysisRegistry registry = new AnalysisRegistry(service, environment, charFilters, tokenFilters, tokenizers, analyzers); bind(HunspellService.class).toInstance(service); bind(AnalysisRegistry.class).toInstance(registry); } catch (IOException e) { throw new ElasticsearchException("failed to load hunspell service", e); } } /** * AnalysisProvider is the basic factory interface for registering analysis components like: * <ul> * <li>{@link TokenizerFactory} - see {@link AnalysisModule#registerTokenizer(String, AnalysisProvider)}</li> * <li>{@link CharFilterFactory} - see {@link AnalysisModule#registerCharFilter(String, AnalysisProvider)}</li> * <li>{@link AnalyzerProvider} - see {@link AnalysisModule#registerAnalyzer(String, AnalysisProvider)}</li> * <li>{@link TokenFilterFactory}- see {@link AnalysisModule#registerTokenFilter(String, AnalysisProvider)} )}</li> * </ul> */ public interface AnalysisProvider<T> { /** * Creates a new analysis provider. * @param indexSettings the index settings for the index this provider is created for * @param environment the nodes environment to load resources from persistent storage * @param name the name of the analysis component * @param settings the component specific settings without context prefixes * @return a new provider instance * @throws IOException if an {@link IOException} occurs */ T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException; /** * Creates a new global scope analysis provider without index specific settings not settings for the provider itself. * This can be used to get a default instance of an analysis factory without binding to an index. * * @param environment the nodes environment to load resources from persistent storage * @param name the name of the analysis component * @return a new provider instance * @throws IOException if an {@link IOException} occurs * @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns <code>true</code> */ default T get(Environment environment, String name) throws IOException { if (requiresAnalysisSettings()) { throw new IllegalArgumentException("Analysis settings required - can't instantiate analysis factory"); } return get(NA_INDEX_SETTINGS, environment, name, NA_INDEX_SETTINGS.getSettings()); } /** * If <code>true</code> the analysis component created by this provider requires certain settings to be instantiated. * it can't be created with defaults. The default is <code>false</code>. */ default boolean requiresAnalysisSettings() { return false; } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.screens.datamodeller.client.widgets.jpadomain; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.PostConstruct; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.inject.Inject; import com.google.gwt.user.client.ui.Widget; import org.kie.workbench.common.screens.datamodeller.client.command.DataModelCommandBuilder; import org.kie.workbench.common.screens.datamodeller.client.handlers.DomainHandlerRegistry; import org.kie.workbench.common.screens.datamodeller.client.handlers.jpadomain.util.RelationshipAnnotationValueHandler; import org.kie.workbench.common.screens.datamodeller.client.handlers.jpadomain.util.SequenceGeneratorValueHandler; import org.kie.workbench.common.screens.datamodeller.client.model.DataModelerPropertyEditorFieldInfo; import org.kie.workbench.common.screens.datamodeller.client.resources.i18n.Constants; import org.kie.workbench.common.screens.datamodeller.client.util.AnnotationValueHandler; import org.kie.workbench.common.screens.datamodeller.client.util.DataModelerUtils; import org.kie.workbench.common.screens.datamodeller.client.widgets.common.domain.FieldEditor; import org.kie.workbench.common.screens.datamodeller.client.widgets.jpadomain.properties.ColumnField; import org.kie.workbench.common.screens.datamodeller.client.widgets.jpadomain.properties.IdGeneratorField; import org.kie.workbench.common.screens.datamodeller.client.widgets.jpadomain.properties.RelationshipField; import org.kie.workbench.common.screens.datamodeller.client.widgets.jpadomain.properties.SequenceGeneratorField; import org.kie.workbench.common.screens.datamodeller.events.DataModelerEvent; import org.kie.workbench.common.screens.datamodeller.model.jpadomain.CascadeType; import org.kie.workbench.common.screens.datamodeller.model.jpadomain.FetchMode; import org.kie.workbench.common.screens.datamodeller.model.jpadomain.JPADomainAnnotations; import org.kie.workbench.common.screens.datamodeller.model.jpadomain.RelationType; import org.kie.workbench.common.services.datamodeller.core.Annotation; import org.kie.workbench.common.services.datamodeller.core.DataObject; import org.kie.workbench.common.services.datamodeller.core.ObjectProperty; import org.uberfire.ext.properties.editor.client.fields.BooleanField; import org.uberfire.ext.properties.editor.client.fields.TextField; import org.uberfire.ext.properties.editor.model.PropertyEditorCategory; @Dependent public class JPADataObjectFieldEditor extends FieldEditor implements JPADataObjectFieldEditorView.Presenter { private static Map<String, DataModelerPropertyEditorFieldInfo> propertyEditorFields = new HashMap<>(); private JPADataObjectFieldEditorView view; @Inject public JPADataObjectFieldEditor(JPADataObjectFieldEditorView view, DomainHandlerRegistry handlerRegistry, Event<DataModelerEvent> dataModelerEvent, DataModelCommandBuilder commandBuilder) { super(handlerRegistry, dataModelerEvent, commandBuilder); this.view = view; view.init(this); } @PostConstruct protected void init() { loadPropertyEditor(); } @Override public Widget asWidget() { return view.asWidget(); } @Override public String getName() { return "JPA_FIELD_EDITOR"; } @Override public String getDomainName() { return JPADomainEditor.JPA_DOMAIN; } @Override protected void loadDataObjectField(DataObject dataObject, ObjectProperty objectField) { clear(); setReadonly(true); if (dataObject != null && objectField != null) { this.dataObject = dataObject; this.objectField = objectField; updateIdentifierField(objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_ID_ANNOTATION)); updateColumnFields(objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION)); updateGeneratedValueField(objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_GENERATED_VALUE_ANNOTATION)); updateSequenceGeneratorField(objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_SEQUENCE_GENERATOR_ANNOTATION)); updateRelationshipField(getCurrentRelationshipAnnotation(objectField)); setReadonly(getContext() == null || getContext().isReadonly()); } loadPropertyEditor(); } @Override public void onIdentifierFieldChange(DataModelerPropertyEditorFieldInfo fieldInfo, String newValue) { if (getObjectField() != null) { Boolean doAdd = Boolean.TRUE.toString().equals(newValue); commandBuilder.buildFieldAddOrRemoveAnnotationCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_ID_ANNOTATION, doAdd).execute(); } } @Override public void onColumnFieldChange(DataModelerPropertyEditorFieldInfo fieldInfo, String newValue) { if (getObjectField() != null) { if (JPADataObjectFieldEditorView.COLUMN_NAME_FIELD.equals(fieldInfo.getKey())) { String value = DataModelerUtils.nullTrim(newValue); commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, ColumnField.NAME, value, false).execute(); } else if (JPADataObjectFieldEditorView.COLUMN_UNIQUE_FIELD.equals(fieldInfo.getKey())) { commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, ColumnField.UNIQUE, newValue, false).execute(); } else if (JPADataObjectFieldEditorView.COLUMN_NULLABLE_FIELD.equals(fieldInfo.getKey())) { commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, ColumnField.NULLABLE, newValue, false).execute(); } else if (JPADataObjectFieldEditorView.COLUMN_INSERTABLE_FIELD.equals(fieldInfo.getKey())) { commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, ColumnField.INSERTABLE, newValue, false).execute(); } else if (JPADataObjectFieldEditorView.COLUMN_UPDATABLE_FIELD.equals(fieldInfo.getKey())) { commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, ColumnField.UPDATABLE, newValue, false).execute(); } Annotation annotation = getObjectField().getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION); //If the COLUMN annotation just has the by default parameters configured just remove it. if (annotation != null && hasOnlyDefaultValues(annotation)) { commandBuilder.buildFieldAddOrRemoveAnnotationCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_COLUMN_ANNOTATION, false).execute(); } } } @Override public void onGeneratedValueFieldChange(DataModelerPropertyEditorFieldInfo fieldInfo, String newValue) { if (getObjectField() != null) { String strategy = DataModelerUtils.nullTrim((String) fieldInfo.getCurrentValue(IdGeneratorField.STRATEGY)); String generator = DataModelerUtils.nullTrim((String) fieldInfo.getCurrentValue(IdGeneratorField.GENERATOR)); if (strategy == null) { commandBuilder.buildFieldAnnotationRemoveCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_GENERATED_VALUE_ANNOTATION).execute(); } else { commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_GENERATED_VALUE_ANNOTATION, IdGeneratorField.STRATEGY, strategy, false).execute(); commandBuilder.buildFieldAnnotationValueChangeCommand(getContext(), getName(), getDataObject(), getObjectField(), JPADomainAnnotations.JAVAX_PERSISTENCE_GENERATED_VALUE_ANNOTATION, IdGeneratorField.GENERATOR, generator, false).execute(); } } } @Override public void onSequenceGeneratorFieldChange(DataModelerPropertyEditorFieldInfo fieldInfo, String newValue) { if (getObjectField() != null) { Annotation oldGenerator = getObjectField().getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_SEQUENCE_GENERATOR_ANNOTATION); SequenceGeneratorValueHandler oldGeneratorHandler = oldGenerator != null ? new SequenceGeneratorValueHandler(oldGenerator) : null; Annotation newGenerator = null; //TODO add more fine grained control to the changes if needed. By now I can just remove the old generator annotation //and add the new one. This may alter the annotations order for the given field, but it's not a problem. if (oldGeneratorHandler != null) { commandBuilder.buildFieldAnnotationRemoveCommand(getContext(), getName(), getDataObject(), getObjectField(), oldGeneratorHandler.getClassName()).execute(); } String name = DataModelerUtils.nullTrim((String) fieldInfo.getCurrentValue(SequenceGeneratorValueHandler.NAME)); String sequenceName = DataModelerUtils.nullTrim((String) fieldInfo.getCurrentValue(SequenceGeneratorValueHandler.SEQUENCE_NAME)); Integer initialValue = (Integer) fieldInfo.getCurrentValue(SequenceGeneratorValueHandler.INITIAL_VALUE); Integer allocationSize = (Integer) fieldInfo.getCurrentValue(SequenceGeneratorValueHandler.ALLOCATION_SIZE); if (name != null && !"".equals(name.trim())) { newGenerator = SequenceGeneratorValueHandler.createAnnotation(name, sequenceName, initialValue, allocationSize, getContext().getAnnotationDefinitions()); commandBuilder.buildFieldAnnotationAddCommand(getContext(), getName(), getDataObject(), getObjectField(), newGenerator).execute(); } } } @Override public void onRelationTypeFieldChange(DataModelerPropertyEditorFieldInfo fieldInfo, String newValue) { if (getObjectField() != null) { Annotation oldRelation = getCurrentRelationshipAnnotation(getObjectField()); RelationshipAnnotationValueHandler oldRelationHandler = oldRelation != null ? new RelationshipAnnotationValueHandler(oldRelation) : null; Annotation newRelation; RelationType newRelationType = (RelationType) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.RELATION_TYPE); List<CascadeType> newCascadeTypes = (List<CascadeType>) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.CASCADE); FetchMode newFetchMode = (FetchMode) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.FETCH); Boolean newOptional = (Boolean) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.OPTIONAL); String newMappedBy = DataModelerUtils.nullTrim((String) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.MAPPED_BY)); Boolean newOrphanRemoval = (Boolean) fieldInfo.getCurrentValue(RelationshipAnnotationValueHandler.ORPHAN_REMOVAL); //TODO add more fine grained control for the changes if needed. By now I can just remove the old relation annotation //and add the new one. This may alter the annotations order for the given field, but it's not a problem. if (oldRelationHandler != null) { commandBuilder.buildFieldAnnotationRemoveCommand(getContext(), getName(), getDataObject(), getObjectField(), oldRelationHandler.getClassName()).execute(); } newRelation = RelationshipAnnotationValueHandler.createAnnotation(newRelationType, newCascadeTypes, newFetchMode, newOptional, newMappedBy, newOrphanRemoval, getContext().getAnnotationDefinitions()); if (newRelation != null) { getObjectField().addAnnotation(newRelation); commandBuilder.buildFieldAnnotationAddCommand(getContext(), getName(), getDataObject(), getObjectField(), newRelation).execute(); } } } protected void loadPropertyEditor() { view.loadPropertyEditorCategories(getPropertyEditorCategories()); } protected List<PropertyEditorCategory> getPropertyEditorCategories() { final List<PropertyEditorCategory> categories = new ArrayList<>(); PropertyEditorCategory category = new PropertyEditorCategory(getIdentifierCategoryName(), 1); categories.add(category); category.withField(createIdentifierField()); category.withField(createGeneratedValueField()); category.withField(createSequenceGeneratorField()); category = new PropertyEditorCategory(getColumnCategoryName(), 2); categories.add(category); category.withField(createColumnNameField()); category.withField(createColumnUniqueField()); category.withField(createColumnNullableField()); category.withField(createColumnInsertableField()); category.withField(createColumnUpdatableField()); category = new PropertyEditorCategory(getRelationshipCategoryName(), 3); categories.add(category); category.withField(createRelationShipTypeField()); return categories; } private DataModelerPropertyEditorFieldInfo createIdentifierField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_identifier_field_label(), JPADataObjectFieldEditorView.IDENTIFIER_FIELD, "false", BooleanField.class, Constants.INSTANCE.persistence_domain_fieldEditor_identifier_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_identifier_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createGeneratedValueField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_generation_strategy_field_label(), JPADataObjectFieldEditorView.GENERATED_VALUE_FIELD, IdGeneratorField.NOT_CONFIGURED_LABEL, IdGeneratorField.class, Constants.INSTANCE.persistence_domain_fieldEditor_generation_strategy_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_generation_strategy_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createSequenceGeneratorField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_sequence_generator_field_label(), JPADataObjectFieldEditorView.SEQUENCE_GENERATOR_FIELD, SequenceGeneratorField.NOT_CONFIGURED_LABEL, SequenceGeneratorField.class, Constants.INSTANCE.persistence_domain_fieldEditor_sequence_generator_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_sequence_generator_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createColumnNameField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_column_field_label(), JPADataObjectFieldEditorView.COLUMN_NAME_FIELD, "", TextField.class, Constants.INSTANCE.persistence_domain_fieldEditor_column_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_column_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createColumnUniqueField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_unique_field_label(), JPADataObjectFieldEditorView.COLUMN_UNIQUE_FIELD, "false", BooleanField.class, Constants.INSTANCE.persistence_domain_fieldEditor_unique_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_unique_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createColumnNullableField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_nullable_field_label(), JPADataObjectFieldEditorView.COLUMN_NULLABLE_FIELD, "true", BooleanField.class, Constants.INSTANCE.persistence_domain_fieldEditor_nullable_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_nullable_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createColumnInsertableField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_insertable_field_label(), JPADataObjectFieldEditorView.COLUMN_INSERTABLE_FIELD, "true", BooleanField.class, Constants.INSTANCE.persistence_domain_fieldEditor_insertable_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_insertable_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createColumnUpdatableField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_updatable_field_label(), JPADataObjectFieldEditorView.COLUMN_UPDATABLE_FIELD, "true", BooleanField.class, Constants.INSTANCE.persistence_domain_fieldEditor_updatable_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_updatable_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createRelationShipTypeField() { return createField(Constants.INSTANCE.persistence_domain_fieldEditor_relationship_field_label(), JPADataObjectFieldEditorView.RELATIONSHIP_TYPE_FIELD, RelationshipField.NOT_CONFIGURED_LABEL, RelationshipField.class, Constants.INSTANCE.persistence_domain_fieldEditor_relationship_field_help_heading(), Constants.INSTANCE.persistence_domain_fieldEditor_relationship_field_help(), readonly); } private DataModelerPropertyEditorFieldInfo createField(String label, String key, String currentStringValue, Class<?> customFieldClass, String helpHeading, String helpText, boolean readonly) { DataModelerPropertyEditorFieldInfo fieldInfo = propertyEditorFields.get(key); if (fieldInfo == null) { fieldInfo = new DataModelerPropertyEditorFieldInfo(label, currentStringValue, customFieldClass); fieldInfo.withKey(key); if (helpHeading != null) { fieldInfo.withHelpInfo(helpHeading, helpText); } propertyEditorFields.put(key, fieldInfo); } fieldInfo.setDisabled(readonly); return fieldInfo; } private void updatePropertyEditorField(String fieldId, Annotation currentValue, String currentStringValue) { DataModelerPropertyEditorFieldInfo fieldInfo = propertyEditorFields.get(fieldId); fieldInfo.setCurrentValue(currentValue); fieldInfo.setCurrentStringValue(currentStringValue); } private DataModelerPropertyEditorFieldInfo getField(String fieldId) { return propertyEditorFields.get(fieldId); } private void updateIdentifierField(Annotation annotation) { clearIdentifierField(); if (annotation != null) { updatePropertyEditorField(JPADataObjectFieldEditorView.IDENTIFIER_FIELD, annotation, "true"); } } private void updateColumnFields(Annotation annotation) { clearColumnFields(); if (annotation != null) { String currentStringValue = AnnotationValueHandler.getStringValue(annotation, ColumnField.NAME, ""); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_NAME_FIELD, annotation, currentStringValue); currentStringValue = AnnotationValueHandler.getStringValue(annotation, ColumnField.UNIQUE, "false"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_UNIQUE_FIELD, annotation, currentStringValue); currentStringValue = AnnotationValueHandler.getStringValue(annotation, ColumnField.NULLABLE, "true"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_NULLABLE_FIELD, annotation, currentStringValue); currentStringValue = AnnotationValueHandler.getStringValue(annotation, ColumnField.INSERTABLE, "true"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_INSERTABLE_FIELD, annotation, currentStringValue); currentStringValue = AnnotationValueHandler.getStringValue(annotation, ColumnField.UPDATABLE, "true"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_UPDATABLE_FIELD, annotation, currentStringValue); } } private void updateGeneratedValueField(Annotation annotation) { clearGeneratedValueField(); if (annotation != null) { DataModelerPropertyEditorFieldInfo fieldInfo = getField(JPADataObjectFieldEditorView.GENERATED_VALUE_FIELD); String strategy = AnnotationValueHandler.getStringValue(annotation, IdGeneratorField.STRATEGY, null); String generator = AnnotationValueHandler.getStringValue(annotation, IdGeneratorField.GENERATOR, null); fieldInfo.setCurrentValue(IdGeneratorField.STRATEGY, strategy); fieldInfo.setCurrentValue(IdGeneratorField.GENERATOR, generator); updatePropertyEditorField(JPADataObjectFieldEditorView.GENERATED_VALUE_FIELD, annotation, strategy != null ? strategy : IdGeneratorField.NOT_CONFIGURED_LABEL); } } private void updateSequenceGeneratorField(Annotation annotation) { clearSequenceGeneratorField(); if (annotation != null) { SequenceGeneratorValueHandler valueHandler = new SequenceGeneratorValueHandler(annotation); DataModelerPropertyEditorFieldInfo fieldInfo = getField(JPADataObjectFieldEditorView.SEQUENCE_GENERATOR_FIELD); fieldInfo.setCurrentValue(SequenceGeneratorValueHandler.NAME, valueHandler.getName()); fieldInfo.setCurrentValue(SequenceGeneratorValueHandler.SEQUENCE_NAME, valueHandler.getSequenceName()); fieldInfo.setCurrentValue(SequenceGeneratorValueHandler.INITIAL_VALUE, valueHandler.getInitialValue()); fieldInfo.setCurrentValue(SequenceGeneratorValueHandler.ALLOCATION_SIZE, valueHandler.getAllocationSize()); updatePropertyEditorField(JPADataObjectFieldEditorView.SEQUENCE_GENERATOR_FIELD, annotation, valueHandler.getName()); } } private void updateRelationshipField(Annotation annotation) { clearRelationshipField(); if (annotation != null) { RelationshipAnnotationValueHandler valueHandler = new RelationshipAnnotationValueHandler(annotation); DataModelerPropertyEditorFieldInfo fieldInfo = getField(JPADataObjectFieldEditorView.RELATIONSHIP_TYPE_FIELD); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.RELATION_TYPE, valueHandler.getRelationType()); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.CASCADE, valueHandler.getCascade()); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.FETCH, valueHandler.getFetch()); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.OPTIONAL, valueHandler.getOptional()); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.MAPPED_BY, valueHandler.getMappedBy()); fieldInfo.setCurrentValue(RelationshipAnnotationValueHandler.ORPHAN_REMOVAL, valueHandler.getOrphanRemoval()); if (valueHandler.isOneToMany()) { fieldInfo.removeCurrentValue(RelationshipAnnotationValueHandler.OPTIONAL); } else if (valueHandler.isManyToOne()) { fieldInfo.removeCurrentValue(RelationshipAnnotationValueHandler.MAPPED_BY); fieldInfo.removeCurrentValue(RelationshipAnnotationValueHandler.ORPHAN_REMOVAL); } else if (valueHandler.isManyToMany()) { fieldInfo.removeCurrentValue(RelationshipAnnotationValueHandler.OPTIONAL); fieldInfo.removeCurrentValue(RelationshipAnnotationValueHandler.ORPHAN_REMOVAL); } updatePropertyEditorField(JPADataObjectFieldEditorView.RELATIONSHIP_TYPE_FIELD, annotation, valueHandler.getRelationType().name()); } } boolean hasOnlyDefaultValues(Annotation columnAnnotation) { String strValue; strValue = (String) columnAnnotation.getValue(ColumnField.NAME); if (strValue != null && !"".equals(strValue)) { return false; } strValue = AnnotationValueHandler.getStringValue(columnAnnotation, ColumnField.UNIQUE, null); if (strValue != null && !"false".equals(strValue)) { return false; } strValue = AnnotationValueHandler.getStringValue(columnAnnotation, ColumnField.NULLABLE, null); if (strValue != null && !"true".equals(strValue)) { return false; } strValue = AnnotationValueHandler.getStringValue(columnAnnotation, ColumnField.INSERTABLE, null); if (strValue != null && !"true".equals(strValue)) { return false; } strValue = AnnotationValueHandler.getStringValue(columnAnnotation, ColumnField.UPDATABLE, null); if (strValue != null && !"true".equals(strValue)) { return false; } return true; } public void clear() { clearIdentifierField(); clearGeneratedValueField(); clearSequenceGeneratorField(); clearColumnFields(); clearRelationshipField(); } protected void clearIdentifierField() { updatePropertyEditorField(JPADataObjectFieldEditorView.IDENTIFIER_FIELD, null, "false"); } protected void clearGeneratedValueField() { updatePropertyEditorField(JPADataObjectFieldEditorView.GENERATED_VALUE_FIELD, null, IdGeneratorField.NOT_CONFIGURED_LABEL); getField(JPADataObjectFieldEditorView.GENERATED_VALUE_FIELD).clearCurrentValues(); } protected void clearSequenceGeneratorField() { updatePropertyEditorField(JPADataObjectFieldEditorView.SEQUENCE_GENERATOR_FIELD, null, SequenceGeneratorField.NOT_CONFIGURED_LABEL); getField(JPADataObjectFieldEditorView.SEQUENCE_GENERATOR_FIELD).clearCurrentValues(); } protected void clearColumnFields() { updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_NAME_FIELD, null, null); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_UNIQUE_FIELD, null, "false"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_INSERTABLE_FIELD, null, "true"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_NULLABLE_FIELD, null, "true"); updatePropertyEditorField(JPADataObjectFieldEditorView.COLUMN_UPDATABLE_FIELD, null, "true"); } protected void clearRelationshipField() { updatePropertyEditorField(JPADataObjectFieldEditorView.RELATIONSHIP_TYPE_FIELD, null, RelationshipField.NOT_CONFIGURED_LABEL); getField(JPADataObjectFieldEditorView.RELATIONSHIP_TYPE_FIELD).clearCurrentValues(); } private Annotation getCurrentRelationshipAnnotation(ObjectProperty objectProperty) { Annotation annotation; if ((annotation = objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_ONE_TO_ONE)) != null) { return annotation; } else if ((annotation = objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_ONE_TO_MANY)) != null) { return annotation; } else if ((annotation = objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_MANY_TO_ONE)) != null) { return annotation; } else if ((annotation = objectField.getAnnotation(JPADomainAnnotations.JAVAX_PERSISTENCE_MANY_TO_MANY)) != null) { return annotation; } return null; } private String getIdentifierCategoryName() { return Constants.INSTANCE.persistence_domain_fieldEditor_identifier_category(); } private String getColumnCategoryName() { return Constants.INSTANCE.persistence_domain_fieldEditor_column_category(); } private String getRelationshipCategoryName() { return Constants.INSTANCE.persistence_domain_fieldEditor_relationship_category(); } }
// Generated from G:/mymise/in-memory-transfer-language/core/src/main/java/com/dnt/itl/grammar\ITL.g4 by ANTLR 4.5 package com.dnt.itl.grammar; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.TerminalNode; /** * This class provides an empty implementation of {@link ITLListener}, * which can be extended to create a listener which only needs to handle a subset * of the available methods. */ public class ITLBaseListener implements ITLListener { /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterQl(@NotNull ITLParser.QlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitQl(@NotNull ITLParser.QlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterSelect(@NotNull ITLParser.SelectContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitSelect(@NotNull ITLParser.SelectContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFrom(@NotNull ITLParser.FromContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFrom(@NotNull ITLParser.FromContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterWhere(@NotNull ITLParser.WhereContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitWhere(@NotNull ITLParser.WhereContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPropsSel(@NotNull ITLParser.PropsSelContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPropsSel(@NotNull ITLParser.PropsSelContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFloatVar(@NotNull ITLParser.FloatVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFloatVar(@NotNull ITLParser.FloatVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMulDiv(@NotNull ITLParser.MulDivContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMulDiv(@NotNull ITLParser.MulDivContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAddSub(@NotNull ITLParser.AddSubContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAddSub(@NotNull ITLParser.AddSubContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterParens(@NotNull ITLParser.ParensContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitParens(@NotNull ITLParser.ParensContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMapFuncVar(@NotNull ITLParser.MapFuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMapFuncVar(@NotNull ITLParser.MapFuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterReduceFuncVar(@NotNull ITLParser.ReduceFuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitReduceFuncVar(@NotNull ITLParser.ReduceFuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterIntVar(@NotNull ITLParser.IntVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitIntVar(@NotNull ITLParser.IntVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterNullVar(@NotNull ITLParser.NullVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitNullVar(@NotNull ITLParser.NullVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharVar(@NotNull ITLParser.CharVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharVar(@NotNull ITLParser.CharVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFuncVar(@NotNull ITLParser.FuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFuncVar(@NotNull ITLParser.FuncVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterDirectPropVar(@NotNull ITLParser.DirectPropVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitDirectPropVar(@NotNull ITLParser.DirectPropVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterBooleanVar(@NotNull ITLParser.BooleanVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitBooleanVar(@NotNull ITLParser.BooleanVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterStringVar(@NotNull ITLParser.StringVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitStringVar(@NotNull ITLParser.StringVarContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCollectionAgg(@NotNull ITLParser.CollectionAggContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCollectionAgg(@NotNull ITLParser.CollectionAggContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPropFullName(@NotNull ITLParser.PropFullNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPropFullName(@NotNull ITLParser.PropFullNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPropName(@NotNull ITLParser.PropNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPropName(@NotNull ITLParser.PropNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCompareBool(@NotNull ITLParser.CompareBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCompareBool(@NotNull ITLParser.CompareBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterParenBool(@NotNull ITLParser.ParenBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitParenBool(@NotNull ITLParser.ParenBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExprBool(@NotNull ITLParser.ExprBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExprBool(@NotNull ITLParser.ExprBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterNotBool(@NotNull ITLParser.NotBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitNotBool(@NotNull ITLParser.NotBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterIntegerLiteral(@NotNull ITLParser.IntegerLiteralContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitIntegerLiteral(@NotNull ITLParser.IntegerLiteralContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTrueBool(@NotNull ITLParser.TrueBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTrueBool(@NotNull ITLParser.TrueBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFalseBool(@NotNull ITLParser.FalseBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFalseBool(@NotNull ITLParser.FalseBoolContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitTerminal(@NotNull TerminalNode node) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitErrorNode(@NotNull ErrorNode node) { } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hsmf; import static org.apache.poi.POITestCase.assertContains; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import org.apache.poi.POIDataSamples; import org.apache.poi.hsmf.datatypes.ChunkBasedPropertyValue; import org.apache.poi.hsmf.datatypes.Chunks; import org.apache.poi.hsmf.datatypes.MAPIProperty; import org.apache.poi.hsmf.datatypes.PropertyValue; import org.apache.poi.hsmf.datatypes.PropertyValue.LongPropertyValue; import org.apache.poi.hsmf.datatypes.PropertyValue.TimePropertyValue; import org.apache.poi.hsmf.dev.HSMFDump; import org.apache.poi.hsmf.extractor.OutlookTextExtactor; import org.apache.poi.poifs.filesystem.NPOIFSFileSystem; import org.apache.poi.util.LocaleUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * Tests that we can read fixed sized properties, as well as variable * ones, for example Submission Dates */ public final class TestFixedSizedProperties { private static final String messageSucceeds = "53784_succeeds.msg"; private static final String messageFails = "53784_fails.msg"; private static MAPIMessage mapiMessageSucceeds; private static MAPIMessage mapiMessageFails; private static NPOIFSFileSystem fsMessageSucceeds; private static NPOIFSFileSystem fsMessageFails; private static SimpleDateFormat messageDateFormat; private static TimeZone userTimeZone; /** * Initialize this test, load up the messages. */ @BeforeClass public static void initMapi() throws Exception { POIDataSamples samples = POIDataSamples.getHSMFInstance(); fsMessageSucceeds = new NPOIFSFileSystem(samples.getFile(messageSucceeds)); fsMessageFails = new NPOIFSFileSystem(samples.getFile(messageFails)); mapiMessageSucceeds = new MAPIMessage(fsMessageSucceeds); mapiMessageFails = new MAPIMessage(fsMessageFails); messageDateFormat = new SimpleDateFormat("E, d MMM yyyy HH:mm:ss", Locale.ROOT); messageDateFormat.setTimeZone(LocaleUtil.TIMEZONE_UTC); userTimeZone = LocaleUtil.getUserTimeZone(); LocaleUtil.setUserTimeZone(LocaleUtil.TIMEZONE_UTC); } @AfterClass public static void closeFS() throws Exception { LocaleUtil.setUserTimeZone(userTimeZone); fsMessageSucceeds.close(); fsMessageFails.close(); } /** * Check we can find a sensible number of properties on a few * of our test files */ @Test public void testPropertiesFound() throws Exception { Map<MAPIProperty,List<PropertyValue>> props; props = mapiMessageSucceeds.getMainChunks().getProperties(); assertTrue(props.toString(), props.size() > 10); props = mapiMessageFails.getMainChunks().getProperties(); assertTrue(props.toString(), props.size() > 10); } /** * Check we find properties of a variety of different types */ @Test public void testPropertyValueTypes() throws Exception { Chunks mainChunks = mapiMessageSucceeds.getMainChunks(); // Ask to have the values looked up Map<MAPIProperty,List<PropertyValue>> props = mainChunks.getProperties(); HashSet<Class<? extends PropertyValue>> seenTypes = new HashSet<Class<? extends PropertyValue>>(); for (List<PropertyValue> pvs : props.values()) { for (PropertyValue pv : pvs) { seenTypes.add(pv.getClass()); } } assertTrue(seenTypes.toString(), seenTypes.size() > 3); assertTrue(seenTypes.toString(), seenTypes.contains(LongPropertyValue.class)); assertTrue(seenTypes.toString(), seenTypes.contains(TimePropertyValue.class)); assertFalse(seenTypes.toString(), seenTypes.contains(ChunkBasedPropertyValue.class)); // Ask for the raw values seenTypes.clear(); for (PropertyValue pv : mainChunks.getRawProperties().values()) { seenTypes.add(pv.getClass()); } assertTrue(seenTypes.toString(), seenTypes.size() > 3); assertTrue(seenTypes.toString(), seenTypes.contains(LongPropertyValue.class)); assertTrue(seenTypes.toString(), seenTypes.contains(TimePropertyValue.class)); assertTrue(seenTypes.toString(), seenTypes.contains(ChunkBasedPropertyValue.class)); } /** * Test to see if we can read the Date Chunk with OutlookTextExtractor. */ @Test // @Ignore("TODO Work out why the Fri 22nd vs Monday 25th problem is occurring and fix") public void testReadMessageDateSucceedsWithOutlookTextExtractor() throws Exception { OutlookTextExtactor ext = new OutlookTextExtactor(mapiMessageSucceeds); ext.setFilesystem(null); // Don't close re-used test resources here String text = ext.getText(); assertContains(text, "Date: Fri, 22 Jun 2012 18:32:54 +0000\n"); ext.close(); } /** * Test to see if we can read the Date Chunk with OutlookTextExtractor. */ @Test // @Ignore("TODO Work out why the Thu 21st vs Monday 25th problem is occurring and fix") public void testReadMessageDateFailsWithOutlookTextExtractor() throws Exception { OutlookTextExtactor ext = new OutlookTextExtactor(mapiMessageFails); ext.setFilesystem(null); // Don't close re-used test resources here String text = ext.getText(); assertContains(text, "Date: Thu, 21 Jun 2012 14:14:04 +0000\n"); ext.close(); } /** * Test to see if we can read the Date Chunk with HSMFDump. */ @Test public void testReadMessageDateSucceedsWithHSMFDump() throws IOException { PrintStream stream = new PrintStream(new ByteArrayOutputStream()); HSMFDump dump = new HSMFDump(fsMessageSucceeds); dump.dump(stream); } /** * Test to see if we can read the Date Chunk with HSMFDump. */ @Test public void testReadMessageDateFailsWithHSMFDump() throws Exception { PrintStream stream = new PrintStream(new ByteArrayOutputStream()); HSMFDump dump = new HSMFDump(fsMessageFails); dump.dump(stream); } /** * Will be based on the ClientSubmit time */ @Test public void testClientSubmitTime() throws Exception { // Check via the message date Calendar clientSubmitTime = mapiMessageSucceeds.getMessageDate(); assertEquals( "Fri, 22 Jun 2012 18:32:54", messageDateFormat.format(clientSubmitTime.getTime())); // Fetch the property value directly Map<MAPIProperty,List<PropertyValue>> props = mapiMessageSucceeds.getMainChunks().getProperties(); List<PropertyValue> pv = props.get(MAPIProperty.CLIENT_SUBMIT_TIME); assertNotNull(pv); assertEquals(1, pv.size()); clientSubmitTime = (Calendar)pv.get(0).getValue(); assertEquals( "Fri, 22 Jun 2012 18:32:54", messageDateFormat.format(clientSubmitTime.getTime())); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon.quickFix; import com.intellij.codeInsight.daemon.LightDaemonAnalyzerTestCase; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInsight.intention.impl.ShowIntentionActionsHandler; import com.intellij.lang.Commenter; import com.intellij.lang.LanguageCommenters; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.PsiFile; import com.intellij.rt.execution.junit.FileComparisonFailure; import com.intellij.testFramework.LightPlatformCodeInsightTestCase; import com.intellij.testFramework.LightPlatformTestCase; import com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.RegExp; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public abstract class LightQuickFixTestCase extends LightDaemonAnalyzerTestCase { @NonNls protected static final String BEFORE_PREFIX = "before"; @NonNls protected static final String AFTER_PREFIX = "after"; private static QuickFixTestCase myWrapper; protected boolean shouldBeAvailableAfterExecution() { return false; } @NotNull protected Pair<String, Boolean> parseActionHintImpl(@NotNull PsiFile file, @NotNull String contents) { return parseActionHint(file, contents); } private static void doTestFor(final String testName, final QuickFixTestCase quickFixTestCase) { final String relativePath = ObjectUtils.notNull(quickFixTestCase.getBasePath(), "") + "/" + BEFORE_PREFIX + testName; final String testFullPath = quickFixTestCase.getTestDataPath().replace(File.separatorChar, '/') + relativePath; final File testFile = new File(testFullPath); CommandProcessor.getInstance().executeCommand(quickFixTestCase.getProject(), () -> { try { String contents = StringUtil.convertLineSeparators(FileUtil.loadFile(testFile, CharsetToolkit.UTF8_CHARSET)); quickFixTestCase.configureFromFileText(testFile.getName(), contents); quickFixTestCase.bringRealEditorBack(); final Pair<String, Boolean> pair = quickFixTestCase.parseActionHintImpl(quickFixTestCase.getFile(), contents); final String text = pair.getFirst(); final boolean actionShouldBeAvailable = pair.getSecond().booleanValue(); quickFixTestCase.beforeActionStarted(testName, contents); try { myWrapper = quickFixTestCase; quickFixTestCase.doAction(text, actionShouldBeAvailable, testFullPath, testName); } finally { myWrapper = null; quickFixTestCase.afterActionCompleted(testName, contents); } } catch (FileComparisonFailure e){ throw e; } catch (Throwable e) { e.printStackTrace(); fail(testName); } }, "", ""); } protected void afterActionCompleted(final String testName, final String contents) { } protected void beforeActionStarted(final String testName, final String contents) { } @NotNull public static Pair<String, Boolean> parseActionHint(@NotNull PsiFile file, @NotNull String contents) { return parseActionHint(file, contents, " \"(.*)\" \"(true|false)\".*"); } @NotNull public static Pair<String, Boolean> parseActionHint(@NotNull PsiFile file, @NotNull String contents, @NotNull @NonNls @RegExp String actionPattern) { PsiFile hostFile = InjectedLanguageManager.getInstance(file.getProject()).getTopLevelFile(file); final Commenter commenter = LanguageCommenters.INSTANCE.forLanguage(hostFile.getLanguage()); String comment = commenter.getLineCommentPrefix(); if (comment == null) { comment = commenter.getBlockCommentPrefix(); } // "quick fix action text to perform" "should be available" assert comment != null : commenter; Pattern pattern = Pattern.compile("^" + comment.replace("*", "\\*") + actionPattern, Pattern.DOTALL); Matcher matcher = pattern.matcher(contents); assertTrue("No comment found in "+file.getVirtualFile(), matcher.matches()); final String text = matcher.group(1); final Boolean actionShouldBeAvailable = Boolean.valueOf(matcher.group(2)); return Pair.create(text, actionShouldBeAvailable); } public static void doAction(@NotNull String text, boolean actionShouldBeAvailable, String testFullPath, String testName, QuickFixTestCase quickFix) throws Exception { IntentionAction action = quickFix.findActionWithText(text); if (action == null) { if (actionShouldBeAvailable) { List<IntentionAction> actions = quickFix.getAvailableActions(); List<String> texts = new ArrayList<>(); for (IntentionAction intentionAction : actions) { texts.add(intentionAction.getText()); } Collection<HighlightInfo> infos = quickFix.doHighlighting(); fail("Action with text '" + text + "' is not available in test " + testFullPath + "\n" + "Available actions (" + texts.size() + "): " + texts + "\n" + actions + "\nInfos:" + infos); } } else { if (!actionShouldBeAvailable) { fail("Action '" + text + "' is available (but must not) in test " + testFullPath); } quickFix.invoke(action); UIUtil.dispatchAllInvocationEvents(); UIUtil.dispatchAllInvocationEvents(); if (!quickFix.shouldBeAvailableAfterExecution()) { final IntentionAction afterAction = quickFix.findActionWithText(text); if (afterAction != null) { fail("Action '" + text + "' is still available after its invocation in test " + testFullPath); } } String expectedFilePath = ObjectUtils.notNull(quickFix.getBasePath(), "") + "/" + AFTER_PREFIX + testName; quickFix.checkResultByFile("In file :" + expectedFilePath, expectedFilePath, false); } } protected void doAction(@NotNull String text, final boolean actionShouldBeAvailable, final String testFullPath, final String testName) throws Exception { doAction(text, actionShouldBeAvailable, testFullPath, testName, myWrapper); } protected void doAction(@NotNull String actionName) { final List<IntentionAction> available = getAvailableActions(); final IntentionAction action = findActionWithText(available, actionName); assertNotNull("Action '" + actionName + "' not found among " + available, action); invoke(action); } protected static void invoke(@NotNull IntentionAction action) throws IncorrectOperationException { ShowIntentionActionsHandler.chooseActionAndInvoke(getFile(), getEditor(), action, action.getText()); UIUtil.dispatchAllInvocationEvents(); } protected IntentionAction findActionWithText(@NotNull String text) { return findActionWithText(getAvailableActions(), text); } public static IntentionAction findActionWithText(@NotNull List<IntentionAction> actions, @NotNull String text) { for (IntentionAction action : actions) { if (text.equals(action.getText())) { return action; } } return null; } /** * @deprecated use {@link LightQuickFixParameterizedTestCase} * to get separate tests for all data files in testData directory. */ protected void doAllTests() { doAllTests(createWrapper()); } public static void doAllTests(QuickFixTestCase testCase) { final File[] files = getBeforeTestFiles(testCase); for (File file : files) { final String testName = file.getName().substring(BEFORE_PREFIX.length()); doTestFor(testName, testCase); } } @NotNull public static File[] getBeforeTestFiles(@NotNull QuickFixTestCase testCase) { assertNotNull("getBasePath() should not return null!", testCase.getBasePath()); final String testDirPath = testCase.getTestDataPath().replace(File.separatorChar, '/') + testCase.getBasePath(); File testDir = new File(testDirPath); final File[] files = testDir.listFiles((dir, name) -> name.startsWith(BEFORE_PREFIX)); if (files == null || files.length == 0) { fail("Test files not found in " + testDirPath); } return files; } protected void doSingleTest(String fileSuffix) { doTestFor(fileSuffix, createWrapper()); } protected void doSingleTest(String fileSuffix, String testDataPath) { doTestFor(fileSuffix, createWrapper(testDataPath)); } protected QuickFixTestCase createWrapper() { return createWrapper(null); } protected QuickFixTestCase createWrapper(final String testDataPath) { return new QuickFixTestCase() { public String myTestDataPath = testDataPath; @Override public String getBasePath() { return LightQuickFixTestCase.this.getBasePath(); } @Override public String getTestDataPath() { if (myTestDataPath == null) { myTestDataPath = LightQuickFixTestCase.this.getTestDataPath(); } return myTestDataPath; } @NotNull @Override public Pair<String, Boolean> parseActionHintImpl(@NotNull PsiFile file, @NotNull String contents) { return LightQuickFixTestCase.this.parseActionHintImpl(file, contents); } @Override public void beforeActionStarted(String testName, String contents) { LightQuickFixTestCase.this.beforeActionStarted(testName, contents); } @Override public void afterActionCompleted(String testName, String contents) { LightQuickFixTestCase.this.afterActionCompleted(testName, contents); } @Override public void doAction(String text, boolean actionShouldBeAvailable, String testFullPath, String testName) throws Exception { LightQuickFixTestCase.this.doAction(text, actionShouldBeAvailable, testFullPath, testName); } @Override public void checkResultByFile(String s, @NotNull String expectedFilePath, boolean b) throws Exception { LightQuickFixTestCase.this.checkResultByFile(s, expectedFilePath, b); } @Override public IntentionAction findActionWithText(String text) { return LightQuickFixTestCase.this.findActionWithText(text); } @Override public boolean shouldBeAvailableAfterExecution() { return LightQuickFixTestCase.this.shouldBeAvailableAfterExecution(); } @Override public void invoke(IntentionAction action) { LightQuickFixTestCase.invoke(action); } @NotNull @Override public List<HighlightInfo> doHighlighting() { return LightQuickFixTestCase.this.doHighlighting(); } @NotNull @Override public List<IntentionAction> getAvailableActions() { return LightQuickFixTestCase.this.getAvailableActions(); } @Override public void configureFromFileText(String name, String contents) throws IOException { LightPlatformCodeInsightTestCase.configureFromFileText(name, contents); } @Override public PsiFile getFile() { return LightPlatformCodeInsightTestCase.getFile(); } @Override public Project getProject() { return LightPlatformTestCase.getProject(); } @Override public void bringRealEditorBack() { LightPlatformCodeInsightTestCase.bringRealEditorBack(); } }; } protected List<IntentionAction> getAvailableActions() { doHighlighting(); return getAvailableActions(getEditor(), getFile()); } public static List<IntentionAction> getAvailableActions(@NotNull Editor editor, @NotNull PsiFile file) { return CodeInsightTestFixtureImpl.getAvailableIntentions(editor, file); } @NonNls protected String getBasePath() {return null;} }
/* * $Id: XMLWriter.java,v 1.4 2007-07-19 22:33:15 ofung Exp $ */ /* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can obtain * a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html * or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at glassfish/bootstrap/legal/LICENSE.txt. * Sun designates this particular file as subject to the "Classpath" exception * as provided by Sun in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the License * Header, with the fields enclosed by brackets [] replaced by your own * identifying information: "Portions Copyrighted [year] * [name of copyright owner]" * * Contributor(s): * * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.sun.xml.stream.writers; import java.io.IOException; import java.io.Writer; import com.sun.xml.stream.xerces.util.XMLStringBuffer; /** * XMLWriter. * * <code>XMLWriter</code> is not thread safe. * * For efficiency this writer buffers the input. Use <code>flush()</code> function * to explicitly write the data to underlying stream. * * This writer is designed in such a way that it atleast buffers the input to the * <code>size</code> specified. Unless <code>flush</code> is called, it guarantees that * data in chunks of size equal to or more than <code>size</code> specified will be written. * * * <code>XMLWriter</code> instance can be reused. <code>setWriter()</code> internally clears the * buffer and stores the reference to newly supplied <code>Writer</code> instance. * * @author Neeraj Bajaj Sun Microsystems, inc. * @author Sunitha Reddy Sun Microsystems, inc. */ public class XMLWriter extends Writer { private Writer writer ; private int size ; //keep the size of internal buffer more than 'size' required to avoid resizing private XMLStringBuffer buffer = new XMLStringBuffer(6 * (1 << 11) ); // 6 KB private static final int THRESHHOLD_LENGTH = 1 << 12 ; // 4 KB private static final boolean DEBUG = false; /** Creates the instance of <code>XMLWriter</code> */ public XMLWriter(Writer writer){ this(writer, THRESHHOLD_LENGTH); } /** * Creates the instnace of <code>XMLWriter</code>. * * atleast buffers the input to the * <code>size</code> specified. */ public XMLWriter(Writer writer, int size){ this.writer = writer ; this.size = size; } /** * Write a single character. The character to be written is contained in * the 16 low-order bits of the given integer value; the 16 high-order bits * are ignored. * * <p> Subclasses that intend to support efficient single-character output * should override this method. * * @param c int specifying a character to be written. * @exception IOException If an I/O error occurs */ public void write(int c) throws IOException { ensureOpen(); buffer.append((char)c); conditionalWrite(); } /** * Write an array of characters. * * @param cbuf Array of characters to be written * * @exception IOException If an I/O error occurs */ public void write(char cbuf[]) throws IOException { write(cbuf, 0, cbuf.length); } /** * Write a portion of an array of characters. * * @param cbuf Array of characters * @param off Offset from which to start writing characters * @param len Number of characters to write * * @exception IOException If an I/O error occurs */ public void write(char cbuf[], int off, int len) throws IOException{ ensureOpen(); //optimization: if data size to be written is more than the 'size' specified, //do not buffer the data but write the data straight to the underlying stream if(len > size){ //first write the data that may be present in the buffer writeBufferedData(); //write directly to stream writer.write(cbuf, off, len); }else{ buffer.append(cbuf, off, len); conditionalWrite(); } } /** * Write a portion of a string. * * @param str A String * @param off Offset from which to start writing characters * @param len Number of characters to write * * @exception IOException If an I/O error occurs */ public void write(String str, int off, int len) throws IOException { write(str.toCharArray(), off, len); } /** * Write a string. * * @param str String to be written * * @exception IOException If an I/O error occurs */ public void write(String str) throws IOException { //optimization: if data size to be written is more than the 'size' specified, //do not buffer the data but write the data straight to the underlying stream - nb. if(str.length() > size){ //first write the data that may be present in the buffer writeBufferedData(); //write directly to stream writer.write(str); }else{ buffer.append(str); conditionalWrite(); } } /** * Close the stream, flushing it first. Once a stream has been closed, * further write() or flush() invocations will cause an IOException to be * thrown. Closing a previously-closed stream, however, has no effect. * * @exception IOException If an I/O error occurs */ public void close() throws IOException { if(writer == null) return; //flush it first flush(); writer.close(); writer = null ; } /** * Flush the stream. If the stream has saved any characters from the * various write() methods in a buffer, write them immediately to their * intended destination. Then, if that destination is another character or * byte stream, flush it. Thus one flush() invocation will flush all the * buffers in a chain of Writers and OutputStreams. * * @exception IOException If an I/O error occurs */ public void flush() throws IOException { ensureOpen(); //write current data present in the buffer writeBufferedData(); writer.flush(); } /** Reset this Writer. * * see @setWriter() */ public void reset(){ this.writer = null; buffer.clear(); this.size = THRESHHOLD_LENGTH; } /** * Set the given <code>Writer</code>. * * @param Writer Writer. */ public void setWriter(Writer writer){ this.writer = writer; buffer.clear(); this.size = THRESHHOLD_LENGTH; } /** Set the given <code>Writer</code> * * @param Writer Writer. * @param int Writer will buffer the character data size, after that data is written to stream. */ public void setWriter(Writer writer, int size){ this.writer = writer; this.size = size; } /** * Returns underlying <code>Writer</code> */ protected Writer getWriter() { return writer; } /** write the buffer data, if the buffer size has increased the size specified */ private void conditionalWrite() throws IOException { if(buffer.length > size){ if(DEBUG){ System.out.println("internal buffer length " + buffer.length + " increased size limit : " + size); System.out.println("Data: ('" + new String(buffer.ch, buffer.offset, buffer.length) + "')"); } writeBufferedData(); } } /** Write the data present in the buffer to the writer. * buffer is cleared after write operation. */ private void writeBufferedData() throws IOException { writer.write(buffer.ch, buffer.offset, buffer.length); buffer.clear(); } /** Check to make sure that the stream has not been closed */ private void ensureOpen() throws IOException { if (writer == null)throw new IOException("Stream closed"); } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner.propertyInspector.editors.string; import com.intellij.ide.DataManager; import com.intellij.lang.properties.IProperty; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.DimensionService; import com.intellij.openapi.util.Pair; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.SpeedSearchBase; import com.intellij.ui.table.JBTable; import com.intellij.uiDesigner.UIDesignerBundle; import com.intellij.uiDesigner.designSurface.GuiEditor; import com.intellij.uiDesigner.lw.StringDescriptor; import gnu.trove.TObjectIntHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import javax.swing.table.TableColumnModel; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * @author Anton Katilin * @author Vladimir Kondratyev */ public final class KeyChooserDialog extends DialogWrapper{ private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.propertyInspector.editors.string.KeyChooserDialog"); private final PropertiesFile myBundle; private final String myBundleName; /** List of bundle's pairs*/ private ArrayList<Pair<String, String>> myPairs; private final JComponent myCenterPanel; /** Table with key/value pairs */ private final JTable myTable; @NonNls private static final String NULL = "null"; private final MyTableModel myModel; private final GuiEditor myEditor; private static final String OK_ACTION = "OkAction"; /** * @param bundle resource bundle to be shown. * @param bundleName name of the resource bundle to be shown. We need this * name to create StringDescriptor in {@link #getDescriptor()} method. * @param keyToPreselect describes row that should be selected in the * @param parent the parent component for the dialog. */ public KeyChooserDialog( final Component parent, @NotNull final PropertiesFile bundle, @NotNull final String bundleName, final String keyToPreselect, final GuiEditor editor ) { super(parent, true); myEditor = editor; myBundle = bundle; myBundleName = bundleName; setTitle(UIDesignerBundle.message("title.chooser.value")); // Read key/value pairs from resource bundle fillPropertyList(); // Create UI myModel = new MyTableModel(); myTable = new JBTable(myModel); myTable.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION); new MySpeedSearch(myTable); myCenterPanel = ScrollPaneFactory.createScrollPane(myTable); myTable.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER,0), OK_ACTION); myTable.getActionMap().put(OK_ACTION, new AbstractAction() { public void actionPerformed(ActionEvent e) { getOKAction().actionPerformed(e); } }); // Calculate width for "Key" columns final Project projectGuess = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(parent)); final Dimension size = DimensionService.getInstance().getSize(getDimensionServiceKey(), projectGuess); final FontMetrics metrics = myTable.getFontMetrics(myTable.getFont()); int minWidth = 200; int maxWidth = size != null ? size.width / 2 : Integer.MAX_VALUE; if (minWidth > maxWidth) { minWidth = maxWidth; } int width = minWidth; for(int i = myPairs.size() - 1; i >= 0; i--){ final Pair<String, String> pair = myPairs.get(i); width = Math.max(width, metrics.stringWidth(pair.getFirst())); } width += 20; width = Math.max(width, metrics.stringWidth(myModel.getColumnName(0))); width = Math.max(width, minWidth); width = Math.min(width, maxWidth); final TableColumnModel columnModel = myTable.getColumnModel(); final TableColumn keyColumn = columnModel.getColumn(0); keyColumn.setMaxWidth(width); keyColumn.setMinWidth(width); final TableCellRenderer defaultRenderer = myTable.getDefaultRenderer(String.class); if (defaultRenderer instanceof JComponent) { final JComponent component = (JComponent)defaultRenderer; component.putClientProperty("html.disable", Boolean.TRUE); } selectKey(keyToPreselect); init(); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent e) { doOKAction(); return true; } }.installOn(myTable); } private void fillPropertyList() { myPairs = new ArrayList<Pair<String, String>>(); final List<IProperty> properties = myBundle.getProperties(); for (IProperty property : properties) { final String key = property.getUnescapedKey(); final String value = property.getValue(); if (key != null) { myPairs.add(new Pair<String, String>(key, value != null? value : NULL)); } } Collections.sort(myPairs, new MyPairComparator()); } private void selectKey(final String keyToPreselect) { // Preselect proper row int indexToPreselect = -1; for(int i = myPairs.size() - 1; i >= 0; i--){ final Pair<String, String> pair = myPairs.get(i); if(pair.getFirst().equals(keyToPreselect)){ indexToPreselect = i; break; } } if(indexToPreselect != -1){ selectElementAt(indexToPreselect); } } @NotNull @Override protected Action[] createLeftSideActions() { return new Action[] { new NewKeyValueAction() }; } private void selectElementAt(final int index) { myTable.getSelectionModel().setSelectionInterval(index, index); myTable.scrollRectToVisible(myTable.getCellRect(index, 0, true)); } @NotNull protected String getDimensionServiceKey() { return getClass().getName(); } public JComponent getPreferredFocusedComponent() { return myTable; } /** * @return resolved string descriptor. If user chose nothing then the * method returns <code>null</code>. */ @Nullable StringDescriptor getDescriptor() { final int selectedRow = myTable.getSelectedRow(); if(selectedRow < 0 || selectedRow >= myTable.getRowCount()){ return null; } else{ final Pair<String, String> pair = myPairs.get(selectedRow); final StringDescriptor descriptor = new StringDescriptor(myBundleName, pair.getFirst()); descriptor.setResolvedValue(pair.getSecond()); return descriptor; } } protected JComponent createCenterPanel() { return myCenterPanel; } private static final class MyPairComparator implements Comparator<Pair<String, String>>{ public int compare(final Pair<String, String> p1, final Pair<String, String> p2) { return p1.getFirst().compareToIgnoreCase(p2.getFirst()); } } private final class MyTableModel extends AbstractTableModel{ public int getColumnCount() { return 2; } public String getColumnName(final int column) { if(column == 0){ return UIDesignerBundle.message("column.key"); } else if(column == 1){ return UIDesignerBundle.message("column.value"); } else{ throw new IllegalArgumentException("unknown column: " + column); } } public Class getColumnClass(final int column) { if(column == 0){ return String.class; } else if(column == 1){ return String.class; } else{ throw new IllegalArgumentException("unknown column: " + column); } } public Object getValueAt(final int row, final int column) { if(column == 0){ return myPairs.get(row).getFirst(); } else if(column == 1){ return myPairs.get(row).getSecond(); } else{ throw new IllegalArgumentException("unknown column: " + column); } } public int getRowCount() { return myPairs.size(); } public void update() { fireTableDataChanged(); } } private class MySpeedSearch extends SpeedSearchBase<JTable> { private TObjectIntHashMap<Object> myElements; private Object[] myElementsArray; public MySpeedSearch(final JTable component) { super(component); } @Override protected int convertIndexToModel(int viewIndex) { return getComponent().convertRowIndexToModel(viewIndex); } public int getSelectedIndex() { return myComponent.getSelectedRow(); } public Object[] getAllElements() { if (myElements == null) { myElements = new TObjectIntHashMap<Object>(); myElementsArray = myPairs.toArray(); for (int idx = 0; idx < myElementsArray.length; idx++) { Object element = myElementsArray[idx]; myElements.put(element, idx); } } return myElementsArray; } public String getElementText(final Object element) { //noinspection unchecked return ((Pair<String, String>)element).getFirst(); } public void selectElement(final Object element, final String selectedText) { final int index = myElements.get(element); selectElementAt(getComponent().convertRowIndexToView(index)); } } private class NewKeyValueAction extends AbstractAction { public NewKeyValueAction() { putValue(Action.NAME, UIDesignerBundle.message("key.chooser.new.property")); } public void actionPerformed(ActionEvent e) { NewKeyDialog dlg = new NewKeyDialog(getWindow()); dlg.show(); if (dlg.isOK()) { if (!StringEditorDialog.saveCreatedProperty(myBundle, dlg.getName(), dlg.getValue(), myEditor.getPsiFile())) return; fillPropertyList(); myModel.update(); selectKey(dlg.getName()); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.client.impl; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_CACHED_CONN_RETRY_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.BlockReaderFactory; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.util.ByteArrayManager; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.util.DataChecksum; import com.google.common.annotations.VisibleForTesting; /** * DFSClient configuration */ public class DfsClientConf { private final int hdfsTimeout; // timeout value for a DFS operation. private final int maxFailoverAttempts; private final int maxRetryAttempts; private final int failoverSleepBaseMillis; private final int failoverSleepMaxMillis; private final int maxBlockAcquireFailures; private final int datanodeSocketWriteTimeout; private final int ioBufferSize; private final ChecksumOpt defaultChecksumOpt; private final int writePacketSize; private final int writeMaxPackets; private final ByteArrayManager.Conf writeByteArrayManagerConf; private final int socketTimeout; private final long excludedNodesCacheExpiry; /** Wait time window (in msec) if BlockMissingException is caught */ private final int timeWindow; private final int numCachedConnRetry; private final int numBlockWriteRetry; private final int numBlockWriteLocateFollowingRetry; private final int blockWriteLocateFollowingInitialDelayMs; private final long defaultBlockSize; private final long prefetchSize; private final short defaultReplication; private final String taskId; private final FsPermission uMask; private final boolean connectToDnViaHostname; private final boolean hdfsBlocksMetadataEnabled; private final int fileBlockStorageLocationsNumThreads; private final int fileBlockStorageLocationsTimeoutMs; private final int retryTimesForGetLastBlockLength; private final int retryIntervalForGetLastBlockLength; private final long datanodeRestartTimeout; private final long slowIoWarningThresholdMs; private final ShortCircuitConf shortCircuitConf; private final long hedgedReadThresholdMillis; private final int hedgedReadThreadpoolSize; public DfsClientConf(Configuration conf) { // The hdfsTimeout is currently the same as the ipc timeout hdfsTimeout = Client.getTimeout(conf); maxRetryAttempts = conf.getInt( HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_DEFAULT); timeWindow = conf.getInt( HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, HdfsClientConfigKeys.Retry.WINDOW_BASE_DEFAULT); retryTimesForGetLastBlockLength = conf.getInt( HdfsClientConfigKeys.Retry.TIMES_GET_LAST_BLOCK_LENGTH_KEY, HdfsClientConfigKeys.Retry.TIMES_GET_LAST_BLOCK_LENGTH_DEFAULT); retryIntervalForGetLastBlockLength = conf.getInt( HdfsClientConfigKeys.Retry.INTERVAL_GET_LAST_BLOCK_LENGTH_KEY, HdfsClientConfigKeys.Retry.INTERVAL_GET_LAST_BLOCK_LENGTH_DEFAULT); maxFailoverAttempts = conf.getInt( HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_DEFAULT); failoverSleepBaseMillis = conf.getInt( HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_DEFAULT); failoverSleepMaxMillis = conf.getInt( HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_DEFAULT); maxBlockAcquireFailures = conf.getInt( DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT); datanodeSocketWriteTimeout = conf.getInt(DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY, HdfsServerConstants.WRITE_TIMEOUT); ioBufferSize = conf.getInt( CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT); defaultChecksumOpt = getChecksumOptFromConf(conf); socketTimeout = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsServerConstants.READ_TIMEOUT); /** dfs.write.packet.size is an internal config variable */ writePacketSize = conf.getInt( DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT); writeMaxPackets = conf.getInt( HdfsClientConfigKeys.Write.MAX_PACKETS_IN_FLIGHT_KEY, HdfsClientConfigKeys.Write.MAX_PACKETS_IN_FLIGHT_DEFAULT); final boolean byteArrayManagerEnabled = conf.getBoolean( HdfsClientConfigKeys.Write.ByteArrayManager.ENABLED_KEY, HdfsClientConfigKeys.Write.ByteArrayManager.ENABLED_DEFAULT); if (!byteArrayManagerEnabled) { writeByteArrayManagerConf = null; } else { final int countThreshold = conf.getInt( HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_THRESHOLD_KEY, HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_THRESHOLD_DEFAULT); final int countLimit = conf.getInt( HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_LIMIT_KEY, HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_LIMIT_DEFAULT); final long countResetTimePeriodMs = conf.getLong( HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_RESET_TIME_PERIOD_MS_KEY, HdfsClientConfigKeys.Write.ByteArrayManager.COUNT_RESET_TIME_PERIOD_MS_DEFAULT); writeByteArrayManagerConf = new ByteArrayManager.Conf( countThreshold, countLimit, countResetTimePeriodMs); } defaultBlockSize = conf.getLongBytes(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT); defaultReplication = (short) conf.getInt( DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT); taskId = conf.get("mapreduce.task.attempt.id", "NONMAPREDUCE"); excludedNodesCacheExpiry = conf.getLong( HdfsClientConfigKeys.Write.EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_KEY, HdfsClientConfigKeys.Write.EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_DEFAULT); prefetchSize = conf.getLong(HdfsClientConfigKeys.Read.PREFETCH_SIZE_KEY, 10 * defaultBlockSize); numCachedConnRetry = conf.getInt(DFS_CLIENT_CACHED_CONN_RETRY_KEY, DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT); numBlockWriteRetry = conf.getInt( HdfsClientConfigKeys.BlockWrite.RETRIES_KEY, HdfsClientConfigKeys.BlockWrite.RETRIES_DEFAULT); numBlockWriteLocateFollowingRetry = conf.getInt( HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY, HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_DEFAULT); blockWriteLocateFollowingInitialDelayMs = conf.getInt( HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_KEY, HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_DEFAULT); uMask = FsPermission.getUMask(conf); connectToDnViaHostname = conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT); hdfsBlocksMetadataEnabled = conf.getBoolean( DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED, DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT); fileBlockStorageLocationsNumThreads = conf.getInt( DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_NUM_THREADS, DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_NUM_THREADS_DEFAULT); fileBlockStorageLocationsTimeoutMs = conf.getInt( DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT_MS, DFSConfigKeys.DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT_MS_DEFAULT); datanodeRestartTimeout = conf.getLong( DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY, DFS_CLIENT_DATANODE_RESTART_TIMEOUT_DEFAULT) * 1000; slowIoWarningThresholdMs = conf.getLong( DFSConfigKeys.DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_KEY, DFSConfigKeys.DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_DEFAULT); shortCircuitConf = new ShortCircuitConf(conf); hedgedReadThresholdMillis = conf.getLong( HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_KEY, HdfsClientConfigKeys.HedgedRead.THRESHOLD_MILLIS_DEFAULT); hedgedReadThreadpoolSize = conf.getInt( HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_KEY, HdfsClientConfigKeys.HedgedRead.THREADPOOL_SIZE_DEFAULT); } private DataChecksum.Type getChecksumType(Configuration conf) { final String checksum = conf.get( DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); try { return DataChecksum.Type.valueOf(checksum); } catch(IllegalArgumentException iae) { DFSClient.LOG.warn("Bad checksum type: " + checksum + ". Using default " + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); return DataChecksum.Type.valueOf( DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); } } // Construct a checksum option from conf private ChecksumOpt getChecksumOptFromConf(Configuration conf) { DataChecksum.Type type = getChecksumType(conf); int bytesPerChecksum = conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT); return new ChecksumOpt(type, bytesPerChecksum); } /** create a DataChecksum with the given option. */ public DataChecksum createChecksum(ChecksumOpt userOpt) { // Fill in any missing field with the default. ChecksumOpt opt = ChecksumOpt.processChecksumOpt( defaultChecksumOpt, userOpt); DataChecksum dataChecksum = DataChecksum.newDataChecksum( opt.getChecksumType(), opt.getBytesPerChecksum()); if (dataChecksum == null) { throw new HadoopIllegalArgumentException("Invalid checksum type: userOpt=" + userOpt + ", default=" + defaultChecksumOpt + ", effective=null"); } return dataChecksum; } @VisibleForTesting public int getBlockWriteLocateFollowingInitialDelayMs() { return blockWriteLocateFollowingInitialDelayMs; } /** * @return the hdfsTimeout */ public int getHdfsTimeout() { return hdfsTimeout; } /** * @return the maxFailoverAttempts */ public int getMaxFailoverAttempts() { return maxFailoverAttempts; } /** * @return the maxRetryAttempts */ public int getMaxRetryAttempts() { return maxRetryAttempts; } /** * @return the failoverSleepBaseMillis */ public int getFailoverSleepBaseMillis() { return failoverSleepBaseMillis; } /** * @return the failoverSleepMaxMillis */ public int getFailoverSleepMaxMillis() { return failoverSleepMaxMillis; } /** * @return the maxBlockAcquireFailures */ public int getMaxBlockAcquireFailures() { return maxBlockAcquireFailures; } /** * @return the datanodeSocketWriteTimeout */ public int getDatanodeSocketWriteTimeout() { return datanodeSocketWriteTimeout; } /** * @return the ioBufferSize */ public int getIoBufferSize() { return ioBufferSize; } /** * @return the defaultChecksumOpt */ public ChecksumOpt getDefaultChecksumOpt() { return defaultChecksumOpt; } /** * @return the writePacketSize */ public int getWritePacketSize() { return writePacketSize; } /** * @return the writeMaxPackets */ public int getWriteMaxPackets() { return writeMaxPackets; } /** * @return the writeByteArrayManagerConf */ public ByteArrayManager.Conf getWriteByteArrayManagerConf() { return writeByteArrayManagerConf; } /** * @return the socketTimeout */ public int getSocketTimeout() { return socketTimeout; } /** * @return the excludedNodesCacheExpiry */ public long getExcludedNodesCacheExpiry() { return excludedNodesCacheExpiry; } /** * @return the timeWindow */ public int getTimeWindow() { return timeWindow; } /** * @return the numCachedConnRetry */ public int getNumCachedConnRetry() { return numCachedConnRetry; } /** * @return the numBlockWriteRetry */ public int getNumBlockWriteRetry() { return numBlockWriteRetry; } /** * @return the numBlockWriteLocateFollowingRetry */ public int getNumBlockWriteLocateFollowingRetry() { return numBlockWriteLocateFollowingRetry; } /** * @return the defaultBlockSize */ public long getDefaultBlockSize() { return defaultBlockSize; } /** * @return the prefetchSize */ public long getPrefetchSize() { return prefetchSize; } /** * @return the defaultReplication */ public short getDefaultReplication() { return defaultReplication; } /** * @return the taskId */ public String getTaskId() { return taskId; } /** * @return the uMask */ public FsPermission getUMask() { return uMask; } /** * @return the connectToDnViaHostname */ public boolean isConnectToDnViaHostname() { return connectToDnViaHostname; } /** * @return the hdfsBlocksMetadataEnabled */ public boolean isHdfsBlocksMetadataEnabled() { return hdfsBlocksMetadataEnabled; } /** * @return the fileBlockStorageLocationsNumThreads */ public int getFileBlockStorageLocationsNumThreads() { return fileBlockStorageLocationsNumThreads; } /** * @return the getFileBlockStorageLocationsTimeoutMs */ public int getFileBlockStorageLocationsTimeoutMs() { return fileBlockStorageLocationsTimeoutMs; } /** * @return the retryTimesForGetLastBlockLength */ public int getRetryTimesForGetLastBlockLength() { return retryTimesForGetLastBlockLength; } /** * @return the retryIntervalForGetLastBlockLength */ public int getRetryIntervalForGetLastBlockLength() { return retryIntervalForGetLastBlockLength; } /** * @return the datanodeRestartTimeout */ public long getDatanodeRestartTimeout() { return datanodeRestartTimeout; } /** * @return the slowIoWarningThresholdMs */ public long getSlowIoWarningThresholdMs() { return slowIoWarningThresholdMs; } /** * @return the hedgedReadThresholdMillis */ public long getHedgedReadThresholdMillis() { return hedgedReadThresholdMillis; } /** * @return the hedgedReadThreadpoolSize */ public int getHedgedReadThreadpoolSize() { return hedgedReadThreadpoolSize; } /** * @return the shortCircuitConf */ public ShortCircuitConf getShortCircuitConf() { return shortCircuitConf; } public static class ShortCircuitConf { private static final Log LOG = LogFactory.getLog(ShortCircuitConf.class); private final int socketCacheCapacity; private final long socketCacheExpiry; private final boolean useLegacyBlockReader; private final boolean useLegacyBlockReaderLocal; private final String domainSocketPath; private final boolean skipShortCircuitChecksums; private final int shortCircuitBufferSize; private final boolean shortCircuitLocalReads; private final boolean domainSocketDataTraffic; private final int shortCircuitStreamsCacheSize; private final long shortCircuitStreamsCacheExpiryMs; private final int shortCircuitSharedMemoryWatcherInterruptCheckMs; private final boolean shortCircuitMmapEnabled; private final int shortCircuitMmapCacheSize; private final long shortCircuitMmapCacheExpiryMs; private final long shortCircuitMmapCacheRetryTimeout; private final long shortCircuitCacheStaleThresholdMs; private final long keyProviderCacheExpiryMs; @VisibleForTesting public BlockReaderFactory.FailureInjector brfFailureInjector = new BlockReaderFactory.FailureInjector(); public ShortCircuitConf(Configuration conf) { socketCacheCapacity = conf.getInt( DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY, DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT); socketCacheExpiry = conf.getLong( DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY, DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT); useLegacyBlockReader = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADER, DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADER_DEFAULT); useLegacyBlockReaderLocal = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL, DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT); shortCircuitLocalReads = conf.getBoolean( HdfsClientConfigKeys.Read.ShortCircuit.KEY, HdfsClientConfigKeys.Read.ShortCircuit.DEFAULT); domainSocketDataTraffic = conf.getBoolean( DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC, DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT); domainSocketPath = conf.getTrimmed( DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY, DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_DEFAULT); if (LOG.isDebugEnabled()) { LOG.debug(DFSConfigKeys.DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL + " = " + useLegacyBlockReaderLocal); LOG.debug(HdfsClientConfigKeys.Read.ShortCircuit.KEY + " = " + shortCircuitLocalReads); LOG.debug(DFSConfigKeys.DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC + " = " + domainSocketDataTraffic); LOG.debug(DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY + " = " + domainSocketPath); } skipShortCircuitChecksums = conf.getBoolean( HdfsClientConfigKeys.Read.ShortCircuit.SKIP_CHECKSUM_KEY, HdfsClientConfigKeys.Read.ShortCircuit.SKIP_CHECKSUM_DEFAULT); shortCircuitBufferSize = conf.getInt( HdfsClientConfigKeys.Read.ShortCircuit.BUFFER_SIZE_KEY, HdfsClientConfigKeys.Read.ShortCircuit.BUFFER_SIZE_DEFAULT); shortCircuitStreamsCacheSize = conf.getInt( HdfsClientConfigKeys.Read.ShortCircuit.STREAMS_CACHE_SIZE_KEY, HdfsClientConfigKeys.Read.ShortCircuit.STREAMS_CACHE_SIZE_DEFAULT); shortCircuitStreamsCacheExpiryMs = conf.getLong( HdfsClientConfigKeys.Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_KEY, HdfsClientConfigKeys.Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_DEFAULT); shortCircuitMmapEnabled = conf.getBoolean( HdfsClientConfigKeys.Mmap.ENABLED_KEY, HdfsClientConfigKeys.Mmap.ENABLED_DEFAULT); shortCircuitMmapCacheSize = conf.getInt( HdfsClientConfigKeys.Mmap.CACHE_SIZE_KEY, HdfsClientConfigKeys.Mmap.CACHE_SIZE_DEFAULT); shortCircuitMmapCacheExpiryMs = conf.getLong( HdfsClientConfigKeys.Mmap.CACHE_TIMEOUT_MS_KEY, HdfsClientConfigKeys.Mmap.CACHE_TIMEOUT_MS_DEFAULT); shortCircuitMmapCacheRetryTimeout = conf.getLong( HdfsClientConfigKeys.Mmap.RETRY_TIMEOUT_MS_KEY, HdfsClientConfigKeys.Mmap.RETRY_TIMEOUT_MS_DEFAULT); shortCircuitCacheStaleThresholdMs = conf.getLong( HdfsClientConfigKeys.ShortCircuit.REPLICA_STALE_THRESHOLD_MS_KEY, HdfsClientConfigKeys.ShortCircuit.REPLICA_STALE_THRESHOLD_MS_DEFAULT); shortCircuitSharedMemoryWatcherInterruptCheckMs = conf.getInt( DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS, DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT); keyProviderCacheExpiryMs = conf.getLong( DFSConfigKeys.DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS, DFSConfigKeys.DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_DEFAULT); } /** * @return the socketCacheCapacity */ public int getSocketCacheCapacity() { return socketCacheCapacity; } /** * @return the socketCacheExpiry */ public long getSocketCacheExpiry() { return socketCacheExpiry; } public boolean isUseLegacyBlockReaderLocal() { return useLegacyBlockReaderLocal; } public String getDomainSocketPath() { return domainSocketPath; } public boolean isShortCircuitLocalReads() { return shortCircuitLocalReads; } public boolean isDomainSocketDataTraffic() { return domainSocketDataTraffic; } /** * @return the useLegacyBlockReader */ public boolean isUseLegacyBlockReader() { return useLegacyBlockReader; } /** * @return the skipShortCircuitChecksums */ public boolean isSkipShortCircuitChecksums() { return skipShortCircuitChecksums; } /** * @return the shortCircuitBufferSize */ public int getShortCircuitBufferSize() { return shortCircuitBufferSize; } /** * @return the shortCircuitStreamsCacheSize */ public int getShortCircuitStreamsCacheSize() { return shortCircuitStreamsCacheSize; } /** * @return the shortCircuitStreamsCacheExpiryMs */ public long getShortCircuitStreamsCacheExpiryMs() { return shortCircuitStreamsCacheExpiryMs; } /** * @return the shortCircuitSharedMemoryWatcherInterruptCheckMs */ public int getShortCircuitSharedMemoryWatcherInterruptCheckMs() { return shortCircuitSharedMemoryWatcherInterruptCheckMs; } /** * @return the shortCircuitMmapEnabled */ public boolean isShortCircuitMmapEnabled() { return shortCircuitMmapEnabled; } /** * @return the shortCircuitMmapCacheSize */ public int getShortCircuitMmapCacheSize() { return shortCircuitMmapCacheSize; } /** * @return the shortCircuitMmapCacheExpiryMs */ public long getShortCircuitMmapCacheExpiryMs() { return shortCircuitMmapCacheExpiryMs; } /** * @return the shortCircuitMmapCacheRetryTimeout */ public long getShortCircuitMmapCacheRetryTimeout() { return shortCircuitMmapCacheRetryTimeout; } /** * @return the shortCircuitCacheStaleThresholdMs */ public long getShortCircuitCacheStaleThresholdMs() { return shortCircuitCacheStaleThresholdMs; } /** * @return the keyProviderCacheExpiryMs */ public long getKeyProviderCacheExpiryMs() { return keyProviderCacheExpiryMs; } public String confAsString() { StringBuilder builder = new StringBuilder(); builder.append("shortCircuitStreamsCacheSize = "). append(shortCircuitStreamsCacheSize). append(", shortCircuitStreamsCacheExpiryMs = "). append(shortCircuitStreamsCacheExpiryMs). append(", shortCircuitMmapCacheSize = "). append(shortCircuitMmapCacheSize). append(", shortCircuitMmapCacheExpiryMs = "). append(shortCircuitMmapCacheExpiryMs). append(", shortCircuitMmapCacheRetryTimeout = "). append(shortCircuitMmapCacheRetryTimeout). append(", shortCircuitCacheStaleThresholdMs = "). append(shortCircuitCacheStaleThresholdMs). append(", socketCacheCapacity = "). append(socketCacheCapacity). append(", socketCacheExpiry = "). append(socketCacheExpiry). append(", shortCircuitLocalReads = "). append(shortCircuitLocalReads). append(", useLegacyBlockReaderLocal = "). append(useLegacyBlockReaderLocal). append(", domainSocketDataTraffic = "). append(domainSocketDataTraffic). append(", shortCircuitSharedMemoryWatcherInterruptCheckMs = "). append(shortCircuitSharedMemoryWatcherInterruptCheckMs). append(", keyProviderCacheExpiryMs = "). append(keyProviderCacheExpiryMs); return builder.toString(); } } }
package algorithms.bipartite; import algorithms.util.PairInt; import gnu.trove.iterator.TObjectIntIterator; import gnu.trove.map.TIntIntMap; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.Arrays; import java.util.logging.Logger; import junit.framework.TestCase; import thirdparty.HungarianAlgorithm; /** * * @author nichole */ public class MinCostUnbalancedAssignment3Test extends TestCase { private Logger log = Logger.getLogger(this.getClass().getName()); public MinCostUnbalancedAssignment3Test() { } public void test00() { log.info("test00"); Graph g = getTestGraph00(); GraphWithoutWeights g2 = new GraphWithoutWeights(g); HopcroftKarp hk = new HopcroftKarp(); int[] matched = hk.hopcroftKarpV0(g2); log.info("hk matched=" + Arrays.toString(matched)); MinCostUnbalancedAssignment bipartite = new MinCostUnbalancedAssignment(); TIntIntMap m = new TIntIntHashMap(); ResidualDigraph rM = new ResidualDigraph(g, m); m = bipartite.hopcroftKarp(g, 3); assertEquals(3, m.size()); } public void test0() { log.info("test0"); // test graphs on pg 49 Graph g = getTestGraph0(); /* for the hopcroft karp portion 5 3 or 2 3 is equivalent 4 4 3 0 1 1 0 2 */ MinCostUnbalancedAssignment bipartite = new MinCostUnbalancedAssignment(); TIntIntMap m = bipartite.flowAssign(g); assertTrue(3 <= m.size()); assertEquals(m.get(1), 2); assertEquals(m.get(3), 0); assertEquals(m.get(4), 3); } public void test1() throws Exception { log.info("test1"); SecureRandom sr = SecureRandom.getInstance("SHA1PRNG"); long seed = System.currentTimeMillis(); //seed = 1466321947621L; sr.setSeed(seed); log.info("SEED=" + seed); for (int size = 10; size <= 100; size *= 10) { for (int maxCost = 10; maxCost <= 10000; maxCost *= 10) { log.info("size=" + size + " maxCost=" + maxCost); Graph g = getTestGraph1(size, maxCost, sr); long t0 = System.currentTimeMillis(); MinCostUnbalancedAssignment bipartite = new MinCostUnbalancedAssignment(); TIntIntMap m = bipartite.flowAssign(g); long t1 = System.currentTimeMillis(); long tSec = (t1 - t0); System.out.println(tSec + " msec for flowAssign"); log.info("size=" + size + " scale=" + maxCost + " m.size=" + m.size()); assertEquals(size, m.size()); for (int i = 0; i < size; ++i) { assertEquals((size - 1 - i), m.get(i)); } assertNotNull(bipartite.getFinalFlowNetwork()); float[][] matrix = convert(g); long t2 = System.currentTimeMillis(); HungarianAlgorithm ha = new HungarianAlgorithm(); int[][] m2 = ha.computeAssignments(matrix); long t3 = System.currentTimeMillis(); tSec = (t3 - t2); System.out.println(tSec + " msec for hungarian"); log.info("size=" + size + " scale=" + maxCost + " m.size=" + m.size()); for (int i = 0; i < size; ++i) { int idx1 = m2[i][0]; int idx2 = m2[i][1]; assertEquals((size - 1 - idx1), idx2); } } } } public void test2() { try { log.info("test2"); int size = 10; int scale = 100;//1000000; log.info("size=" + size + " scale=" + scale); SecureRandom sr = SecureRandom.getInstance("SHA1PRNG"); long seed = System.currentTimeMillis(); //seed = 10070693215754L; sr.setSeed(seed); log.info("seed=" + seed); for (int nTest = 0; nTest < 10; ++nTest) { log.info("nTest=" + nTest); Graph g = getTestGraph2(size, scale, sr); MinCostUnbalancedAssignment bipartite = new MinCostUnbalancedAssignment(); TIntIntMap m = bipartite.flowAssign(g); log.info("size=" + size + " scale=" + scale + " m.size=" + m.size()); assertEquals(size, m.size()); for (int i = 0; i < size; ++i) { assertEquals((size - 1 - i), m.get(i)); } assertNotNull(bipartite.getFinalFlowNetwork()); } } catch(Throwable t) { t.printStackTrace(); fail(t.getMessage()); } } public void test3() { log.info("test3"); int size = 10; int maxCost = 10; /* - graph of size n for both sets - random number of edges for each - all edges have same cost --> expecting same results as hopcroft-karp, that is a maximal matching, but the random links may create less than maximal possibilities. the method is mostly to exercise the code to explore it further. */ try { SecureRandom sr = SecureRandom.getInstance("SHA1PRNG"); long seed = System.currentTimeMillis(); //seed = 1464995162443L; sr.setSeed(seed); log.info("SEED=" + seed); Graph g = getTestGraph3(sr, size, maxCost); MinCostUnbalancedAssignment bipartite = new MinCostUnbalancedAssignment(); HopcroftKarp hk = new HopcroftKarp(); int[] matched = hk.hopcroftKarpV0(new GraphWithoutWeights(g)); int nExpected = matched.length; log.info("size=" + size + " hk size=" + nExpected); TIntIntMap m = bipartite.flowAssign(g); log.info("size=" + size + " hk size=" + nExpected + " m.size=" + m.size()); assertEquals(nExpected, m.size()); assertNotNull(bipartite.getFinalFlowNetwork()); } catch(Throwable t) { //t.printStackTrace(); } } private Graph getTestGraph00() { TObjectIntMap<PairInt> weights = new TObjectIntHashMap<PairInt>(); /* 0 1 2 L 0 1 2 R */ weights.put(new PairInt(0, 0), 1); weights.put(new PairInt(0, 1), 2); weights.put(new PairInt(1, 0), 2); weights.put(new PairInt(1, 1), 1); weights.put(new PairInt(1, 2), 1); weights.put(new PairInt(2, 1), 2); weights.put(new PairInt(2, 2), 1); Graph g = new Graph(3, 3, weights, true); return g; } private Graph getTestGraph0() { TObjectIntMap<PairInt> weights = new TObjectIntHashMap<PairInt>(); /* for best cost, w/o regard to maximizing nMatches: 1 2 3 0 4 3 unmatched: 0,2,5 1,4 ------------------ for maximizing number of matches, then min cost: results are same as Hopcroft-Karp for this example which is a matching size of 5: 5 3 or 2 3 is equivalent 4 4 3 0 1 1 0 2 ----- so far, the full algorithm progresses to this, but can find no further augmenting paths: 0 1 1 1 3 4 4 3 <--- this one is min-cost, optimal for the pair but it prevents 5 3 */ weights.put(new PairInt(0, 0), 2); weights.put(new PairInt(0, 2), 3); weights.put(new PairInt(1, 1), 2); weights.put(new PairInt(1, 2), 1); weights.put(new PairInt(2, 3), 2); weights.put(new PairInt(3, 0), 1); weights.put(new PairInt(3, 1), 2); weights.put(new PairInt(3, 4), 3); weights.put(new PairInt(4, 3), 1); weights.put(new PairInt(4, 4), 2); weights.put(new PairInt(5, 3), 2); /* 5 3 or 2 3 is equivalent 4 4 3 0 1 1 0 2 */ Graph g = new Graph(6, 5, weights, true); return g; } private Graph getTestGraph1(int size, int maxCost, SecureRandom sr) throws NoSuchAlgorithmException { /* - graph of size n for both sets - highly connected, that is each vertex connected to all right vertexes - one min-cost best cost for a vertex and all other edges have larger costs than those to make the graph easier to write. - want the cost to use a multiple so can test that correct solution is obtained no matter the magnitude of costs (the weight scaling needs such exploration for the internal values of q and eps) - make graphs of size of power of 10 from 10 to 1 million. */ TObjectIntMap<PairInt> weights = new TObjectIntHashMap<PairInt>(); int minCostUpper = maxCost/10; if (minCostUpper < 2) { minCostUpper = 2; } for (int i = 0; i < size; ++i) { for (int j = 0; j < size; ++j) { int cost; if (j == (size - 1 - i)) { cost = sr.nextInt(minCostUpper - 1) + 1; } else { cost = minCostUpper + sr.nextInt(maxCost - minCostUpper); } weights.put(new PairInt(i, j), cost); } } // add an extra edge with same answer as best answer for the node // 0 --> 3 --> 9 // 0 --> 3 --> 8 // 1 --> 3 --> 8 PairInt p = new PairInt(0, size-1); int c = weights.get(p); PairInt p1 = new PairInt(1, size-2); int c1 = weights.get(p1); PairInt p2 = new PairInt(2, size-3); int c2 = weights.get(p2); if ((c <= c1) && (c <= c2)) { weights.put(p1, c); weights.put(p2, c); } else if ((c1 <= c) && (c1 <= c2)) { weights.put(p, c1); weights.put(p2, c1); } else if ((c2 <= c) && (c2 <= c1)) { weights.put(p1, c2); weights.put(p2, c2); } Graph g = new Graph(size, size, weights, true); return g; } private float[][] convert(Graph g) { int n1 = g.getNLeft(); int n2 = g.getNRight(); float[][] cost = new float[n1][n2]; for (int i = 0; i < n1; ++i) { cost[i] = new float[n2]; Arrays.fill(cost[i], Float.MAX_VALUE); } TObjectIntIterator<PairInt> iter = g.getEdgeWeights().iterator(); for (int i = g.getEdgeWeights().size(); i-- > 0;) { iter.advance(); PairInt p = iter.key(); cost[p.getX()][p.getY()] = iter.value(); } return cost; } private Graph getTestGraph2(int size, int maxCost, SecureRandom sr) throws NoSuchAlgorithmException { /* - graph of size n for both sets - one edge only for each vertex - a random cost for the edges - graphs of size powers of 10, from 10 to 100,000? */ TObjectIntMap<PairInt> weights = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < size; ++i) { int cost = sr.nextInt(maxCost - 1) + 1; weights.put(new PairInt(i, (size - 1 - i)), cost); } Graph g = new Graph(size, size, weights, true); return g; } private Graph getTestGraph3(SecureRandom sr, int size, int maxCost) throws NoSuchAlgorithmException { /* - graph of size n for both sets - random number of edges for each - all edges have same cost --> expecting same results as hopcroft-karp */ TObjectIntMap<PairInt> weights = new TObjectIntHashMap<PairInt>(); int cost = sr.nextInt(maxCost - 1) + 1; for (int i = 0; i < size; ++i) { for (int j = 0; j < size; ++j) { if (sr.nextBoolean()) { continue; } weights.put(new PairInt(i, j), cost); } } Graph g = new Graph(size, size, weights, true); return g; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.search; import static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery; import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.QueryBuilder; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; public class MatchQuery { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(MappedFieldType.class)); static final float DEFAULT_BOOST = 1.0f; public enum Type { /** * The text is analyzed and terms are added to a boolean query. */ BOOLEAN(0), /** * The text is analyzed and used as a phrase query. */ PHRASE(1), /** * The text is analyzed and used in a phrase query, with the last term acting as a prefix. */ PHRASE_PREFIX(2); private final int ordinal; Type(int ordinal) { this.ordinal = ordinal; } } public enum ZeroTermsQuery { NONE(0), ALL(1), // this is used internally to make sure that query_string and simple_query_string // ignores query part that removes all tokens. NULL(2); private final int ordinal; ZeroTermsQuery(int ordinal) { this.ordinal = ordinal; } } /** * the default phrase slop */ public static final int DEFAULT_PHRASE_SLOP = 0; /** * the default leniency setting */ public static final boolean DEFAULT_LENIENCY = false; /** * the default zero terms query */ public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE; protected final QueryShardContext context; protected Analyzer analyzer; protected BooleanClause.Occur occur = BooleanClause.Occur.SHOULD; protected boolean enablePositionIncrements = true; protected int phraseSlop = DEFAULT_PHRASE_SLOP; protected Fuzziness fuzziness = null; protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength; protected int maxExpansions = FuzzyQuery.defaultMaxExpansions; protected boolean transpositions = FuzzyQuery.defaultTranspositions; protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod; protected boolean lenient = DEFAULT_LENIENCY; protected ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY; protected Float commonTermsCutoff = null; protected boolean autoGenerateSynonymsPhraseQuery = true; public MatchQuery(QueryShardContext context) { this.context = context; } public void setAnalyzer(String analyzerName) { this.analyzer = context.getMapperService().getIndexAnalyzers().get(analyzerName); if (analyzer == null) { throw new IllegalArgumentException("No analyzer found for [" + analyzerName + "]"); } } public void setAnalyzer(Analyzer analyzer) { this.analyzer = analyzer; } public void setOccur(BooleanClause.Occur occur) { this.occur = occur; } public void setCommonTermsCutoff(Float cutoff) { this.commonTermsCutoff = cutoff; } public void setEnablePositionIncrements(boolean enablePositionIncrements) { this.enablePositionIncrements = enablePositionIncrements; } public void setPhraseSlop(int phraseSlop) { this.phraseSlop = phraseSlop; } public void setFuzziness(Fuzziness fuzziness) { this.fuzziness = fuzziness; } public void setFuzzyPrefixLength(int fuzzyPrefixLength) { this.fuzzyPrefixLength = fuzzyPrefixLength; } public void setMaxExpansions(int maxExpansions) { this.maxExpansions = maxExpansions; } public void setTranspositions(boolean transpositions) { this.transpositions = transpositions; } public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) { this.fuzzyRewriteMethod = fuzzyRewriteMethod; } public void setLenient(boolean lenient) { this.lenient = lenient; } public void setZeroTermsQuery(ZeroTermsQuery zeroTermsQuery) { this.zeroTermsQuery = zeroTermsQuery; } public void setAutoGenerateSynonymsPhraseQuery(boolean enabled) { this.autoGenerateSynonymsPhraseQuery = enabled; } protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { if (analyzer == null) { return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType); } else { return analyzer; } } public Query parse(Type type, String fieldName, Object value) { MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { return newUnmappedFieldQuery(fieldName); } final String field = fieldType.name(); Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE); assert analyzer != null; /* * If a keyword analyzer is used, we know that further analysis isn't * needed and can immediately return a term query. */ if (analyzer == Lucene.KEYWORD_ANALYZER) { return blendTermQuery(new Term(fieldName, value.toString()), fieldType); } MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); builder.setEnablePositionIncrements(this.enablePositionIncrements); if (fieldType.hasPositions()) { builder.setAutoGenerateMultiTermSynonymsPhraseQuery(this.autoGenerateSynonymsPhraseQuery); } else { builder.setAutoGenerateMultiTermSynonymsPhraseQuery(false); } Query query = null; switch (type) { case BOOLEAN: if (commonTermsCutoff == null) { query = builder.createBooleanQuery(field, value.toString(), occur); } else { query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff); } break; case PHRASE: query = builder.createPhraseQuery(field, value.toString(), phraseSlop); break; case PHRASE_PREFIX: query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions); break; default: throw new IllegalStateException("No type found for [" + type + "]"); } if (query == null) { return zeroTermsQuery(); } else { return query; } } protected final Query termQuery(MappedFieldType fieldType, BytesRef value, boolean lenient) { try { return new TermQuery(new Term(fieldType.name(), value)); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(fieldType.name(), e); } throw e; } } protected Query zeroTermsQuery() { switch (zeroTermsQuery) { case NULL: return null; case NONE: return Queries.newMatchNoDocsQuery("Matching no documents because no terms present"); case ALL: return Queries.newMatchAllQuery(); default: throw new IllegalStateException("unknown zeroTermsQuery " + zeroTermsQuery); } } private class MatchQueryBuilder extends QueryBuilder { private final MappedFieldType mapper; /** * Creates a new QueryBuilder using the given analyzer. */ MatchQueryBuilder(Analyzer analyzer, MappedFieldType mapper) { super(analyzer); this.mapper = mapper; } @Override protected Query newTermQuery(Term term, float boost) { return blendTermQuery(term, mapper); } @Override protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { try { checkForPositions(field); Query query = phraseQuery(field, stream, slop, enablePositionIncrements); if (query instanceof PhraseQuery) { // synonyms that expand to multiple terms can return a phrase query. return blendPhraseQuery((PhraseQuery) query, mapper); } return query; } catch (IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } catch (IllegalArgumentException e) { if (lenient == false) { DEPRECATION_LOGGER.deprecated(e.getMessage()); } return newLenientFieldQuery(field, e); } } @Override protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { try { checkForPositions(field); return multiPhraseQuery(field, stream, slop, enablePositionIncrements); } catch (IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); } throw e; } catch (IllegalArgumentException e) { if (lenient == false) { DEPRECATION_LOGGER.deprecated(e.getMessage()); } return newLenientFieldQuery(field, e); } } private void checkForPositions(String field) { if (mapper.hasPositions() == false) { throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); } } /** * Checks if graph analysis should be enabled for the field depending * on the provided {@link Analyzer} */ protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, String queryText, boolean quoted, int phraseSlop) { assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; // Use the analyzer to get all the tokens, and then build an appropriate // query based on the analysis chain. try (TokenStream source = analyzer.tokenStream(field, queryText)) { if (source.hasAttribute(DisableGraphAttribute.class)) { /* A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details. */ setEnableGraphQueries(false); } Query query = super.createFieldQuery(source, operator, field, quoted, phraseSlop); setEnableGraphQueries(true); return query; } catch (IOException e) { throw new RuntimeException("Error analyzing query text", e); } } public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); return toMultiPhrasePrefix(query, phraseSlop, maxExpansions); } private Query toMultiPhrasePrefix(final Query query, int phraseSlop, int maxExpansions) { float boost = 1; Query innerQuery = query; while (innerQuery instanceof BoostQuery) { BoostQuery bq = (BoostQuery) innerQuery; boost *= bq.getBoost(); innerQuery = bq.getQuery(); } if (query instanceof SpanQuery) { return toSpanQueryPrefix((SpanQuery) query, boost); } final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery(); prefixQuery.setMaxExpansions(maxExpansions); prefixQuery.setSlop(phraseSlop); if (innerQuery instanceof PhraseQuery) { PhraseQuery pq = (PhraseQuery) innerQuery; Term[] terms = pq.getTerms(); int[] positions = pq.getPositions(); for (int i = 0; i < terms.length; i++) { prefixQuery.add(new Term[]{terms[i]}, positions[i]); } return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); } else if (innerQuery instanceof MultiPhraseQuery) { MultiPhraseQuery pq = (MultiPhraseQuery) innerQuery; Term[][] terms = pq.getTermArrays(); int[] positions = pq.getPositions(); for (int i = 0; i < terms.length; i++) { prefixQuery.add(terms[i], positions[i]); } return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); } else if (innerQuery instanceof TermQuery) { prefixQuery.add(((TermQuery) innerQuery).getTerm()); return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); } return query; } private Query toSpanQueryPrefix(SpanQuery query, float boost) { if (query instanceof SpanTermQuery) { SpanMultiTermQueryWrapper<PrefixQuery> ret = new SpanMultiTermQueryWrapper<>(new PrefixQuery(((SpanTermQuery) query).getTerm())); return boost == 1 ? ret : new BoostQuery(ret, boost); } else if (query instanceof SpanNearQuery) { SpanNearQuery spanNearQuery = (SpanNearQuery) query; SpanQuery[] clauses = spanNearQuery.getClauses(); if (clauses[clauses.length - 1] instanceof SpanTermQuery) { clauses[clauses.length - 1] = new SpanMultiTermQueryWrapper<>( new PrefixQuery(((SpanTermQuery) clauses[clauses.length - 1]).getTerm()) ); } SpanNearQuery newQuery = new SpanNearQuery(clauses, spanNearQuery.getSlop(), spanNearQuery.isInOrder()); return boost == 1 ? newQuery : new BoostQuery(newQuery, boost); } else if (query instanceof SpanOrQuery) { SpanOrQuery orQuery = (SpanOrQuery) query; SpanQuery[] clauses = new SpanQuery[orQuery.getClauses().length]; for (int i = 0; i < clauses.length; i++) { clauses[i] = (SpanQuery) toSpanQueryPrefix(orQuery.getClauses()[i], 1); } return boost == 1 ? new SpanOrQuery(clauses) : new BoostQuery(new SpanOrQuery(clauses), boost); } else { return query; } } public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur); if (booleanQuery instanceof BooleanQuery) { BooleanQuery bq = (BooleanQuery) booleanQuery; return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency); } return booleanQuery; } private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); for (BooleanClause clause : bq.clauses()) { if ((clause.getQuery() instanceof TermQuery) == false) { return bq; } query.add(((TermQuery) clause.getQuery()).getTerm()); } return query; } } /** * Called when a phrase query is built with {@link QueryBuilder#analyzePhrase(String, TokenStream, int)} * Subclass can override this function to blend this query to multiple fields. */ protected Query blendPhraseQuery(PhraseQuery query, MappedFieldType fieldType) { return query; } protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { return new SynonymQuery(terms); } protected Query blendTermQuery(Term term, MappedFieldType fieldType) { if (fuzziness != null) { try { fieldType.failIfNotIndexed(); Query query = new FuzzyQuery(term, fuzziness.asDistance(term.text()), fuzzyPrefixLength, maxExpansions, transpositions); if (query instanceof FuzzyQuery) { QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); } return query; } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(fieldType.name(), e); } else { throw e; } } } return termQuery(fieldType, term.bytes(), lenient); } private static Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); mpqb.setSlop(slop); TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); int position = -1; List<Term> multiTerms = new ArrayList<>(); stream.reset(); while (stream.incrementToken()) { int positionIncrement = posIncrAtt.getPositionIncrement(); if (positionIncrement > 0 && multiTerms.size() > 0) { if (enablePositionIncrements) { mpqb.add(multiTerms.toArray(new Term[0]), position); } else { mpqb.add(multiTerms.toArray(new Term[0])); } multiTerms.clear(); } position += positionIncrement; multiTerms.add(new Term(field, termAtt.getBytesRef())); } if (enablePositionIncrements) { mpqb.add(multiTerms.toArray(new Term[0]), position); } else { mpqb.add(multiTerms.toArray(new Term[0])); } return mpqb.build(); } private static Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { PhraseQuery.Builder builder = new PhraseQuery.Builder(); builder.setSlop(slop); TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); int position = -1; stream.reset(); while (stream.incrementToken()) { if (enablePosIncrements) { position += posIncrAtt.getPositionIncrement(); } else { position += 1; } builder.add(new Term(field, termAtt.getBytesRef()), position); } return builder.build(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.generation; import com.intellij.application.options.CodeStyle; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.CommentUtil; import com.intellij.codeInsight.actions.MultiCaretCodeInsightActionHandler; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.ide.highlighter.custom.CustomFileTypeLexer; import com.intellij.lang.*; import com.intellij.lexer.Lexer; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.highlighter.HighlighterIterator; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.impl.AbstractFileType; import com.intellij.openapi.fileTypes.impl.CustomSyntaxTableFileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.codeStyle.Indent; import com.intellij.psi.templateLanguages.MultipleLangCommentProvider; import com.intellij.psi.templateLanguages.OuterLanguageElement; import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtilBase; import com.intellij.ui.LightweightHint; import com.intellij.util.containers.IntArrayList; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.ArrayList; import java.util.List; public class CommentByBlockCommentHandler extends MultiCaretCodeInsightActionHandler { private Project myProject; private Editor myEditor; private Caret myCaret; private @NotNull PsiFile myFile; private Document myDocument; private Commenter myCommenter; private CommenterDataHolder mySelfManagedCommenterData; private String myWarning; private RangeMarker myWarningLocation; @Override public void invoke(@NotNull Project project, @NotNull Editor editor, @NotNull Caret caret, @NotNull PsiFile file) { myProject = project; myEditor = editor; myCaret = caret; myFile = file; myWarning = null; myWarningLocation = null; myDocument = editor.getDocument(); FeatureUsageTracker.getInstance().triggerFeatureUsed("codeassists.comment.block"); final Commenter commenter = findCommenter(myFile, myEditor, caret); if (commenter == null) return; myCommenter = commenter; final String prefix; final String suffix; if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; mySelfManagedCommenterData = selfManagingCommenter.createBlockCommentingState( caret.getSelectionStart(), caret.getSelectionEnd(), myDocument, myFile ); if (mySelfManagedCommenterData == null) { mySelfManagedCommenterData = SelfManagingCommenter.EMPTY_STATE; } prefix = selfManagingCommenter.getBlockCommentPrefix( caret.getSelectionStart(), myDocument, mySelfManagedCommenterData ); suffix = selfManagingCommenter.getBlockCommentSuffix( caret.getSelectionEnd(), myDocument, mySelfManagedCommenterData ); } else { prefix = commenter.getBlockCommentPrefix(); suffix = commenter.getBlockCommentSuffix(); } if (prefix == null || suffix == null) return; TextRange commentedRange = findCommentedRange(commenter); if (commentedRange != null) { final int commentStart = commentedRange.getStartOffset(); final int commentEnd = commentedRange.getEndOffset(); int selectionStart = commentStart; int selectionEnd = commentEnd; if (myCaret.hasSelection()) { selectionStart = myCaret.getSelectionStart(); selectionEnd = myCaret.getSelectionEnd(); } if ((commentStart < selectionStart || commentStart >= selectionEnd) && (commentEnd <= selectionStart || commentEnd > selectionEnd)) { commentRange(selectionStart, selectionEnd, prefix, suffix, commenter); } else { uncommentRange(commentedRange, trim(prefix), trim(suffix), commenter); } } else { if (myCaret.hasSelection()) { int selectionStart = myCaret.getSelectionStart(); int selectionEnd = myCaret.getSelectionEnd(); if (commenter instanceof IndentedCommenter) { final Boolean value = ((IndentedCommenter)commenter).forceIndentedLineComment(); if (value != null && value == Boolean.TRUE) { selectionStart = myDocument.getLineStartOffset(myDocument.getLineNumber(selectionStart)); selectionEnd = myDocument.getLineEndOffset(myDocument.getLineNumber(selectionEnd)); } } commentRange(selectionStart, selectionEnd, prefix, suffix, commenter); } else { EditorUtil.fillVirtualSpaceUntilCaret(editor); int caretOffset = myCaret.getOffset(); if (commenter instanceof IndentedCommenter) { final Boolean value = ((IndentedCommenter)commenter).forceIndentedLineComment(); if (value != null && value == Boolean.TRUE) { final int lineNumber = myDocument.getLineNumber(caretOffset); final int start = myDocument.getLineStartOffset(lineNumber); final int end = myDocument.getLineEndOffset(lineNumber); commentRange(start, end, prefix, suffix, commenter); return; } } myDocument.insertString(caretOffset, prefix + suffix); myCaret.moveToOffset(caretOffset + prefix.length()); } } showMessageIfNeeded(); } private void showMessageIfNeeded() { if (myWarning != null) { myEditor.getScrollingModel().disableAnimation(); myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); myEditor.getScrollingModel().enableAnimation(); LogicalPosition hintPosition = myCaret.getLogicalPosition(); if (myWarningLocation != null) { LogicalPosition targetPosition = myEditor.offsetToLogicalPosition(myWarningLocation.getStartOffset()); Point targetPoint = myEditor.logicalPositionToXY(targetPosition); if (myEditor.getScrollingModel().getVisibleArea().contains(targetPoint)) { hintPosition = targetPosition; } } LightweightHint hint = new LightweightHint(HintUtil.createInformationLabel(myWarning)); Point p = HintManagerImpl.getHintPosition(hint, myEditor, hintPosition, HintManager.ABOVE); HintManagerImpl.getInstanceImpl().showEditorHint(hint, myEditor, p, 0, 0, false); } } @Nullable private static String trim(String s) { return s == null ? null : s.trim(); } private boolean testSelectionForNonComments() { if (!myCaret.hasSelection()) { return true; } TextRange range = new TextRange(myCaret.getSelectionStart(), myCaret.getSelectionEnd() - 1); for (PsiElement element = myFile.findElementAt(range.getStartOffset()); element != null && range.intersects(element.getTextRange()); element = element.getNextSibling()) { if (element instanceof OuterLanguageElement) { if (!isInjectedWhiteSpace(range, (OuterLanguageElement)element)) { return false; } } else { if (!isWhiteSpaceOrComment(element, range)) { return false; } } } return true; } private boolean isInjectedWhiteSpace(@NotNull TextRange range, @NotNull OuterLanguageElement element) { PsiElement psi = element.getContainingFile().getViewProvider().getPsi(element.getLanguage()); if (psi == null) { return false; } List<PsiElement> injectedElements = PsiTreeUtil.getInjectedElements(element); for (PsiElement el : injectedElements) { if (!isWhiteSpaceOrComment(el, range)) { return false; } } return true; } private boolean isWhiteSpaceOrComment(@NotNull PsiElement element, @NotNull TextRange range) { final TextRange textRange = element.getTextRange(); TextRange intersection = range.intersection(textRange); if (intersection == null) { return false; } intersection = TextRange.create(Math.max(intersection.getStartOffset() - textRange.getStartOffset(), 0), Math.min(intersection.getEndOffset() - textRange.getStartOffset(), textRange.getLength())); return isWhiteSpaceOrComment(element) || intersection.substring(element.getText()).trim().length() == 0; } private static boolean isWhiteSpaceOrComment(PsiElement element) { return element instanceof PsiWhiteSpace || PsiTreeUtil.getParentOfType(element, PsiComment.class, false) != null; } @Nullable private TextRange findCommentedRange(final Commenter commenter) { final CharSequence text = myDocument.getCharsSequence(); final FileType fileType = myFile.getFileType(); if (fileType instanceof CustomSyntaxTableFileType) { Lexer lexer = new CustomFileTypeLexer(((CustomSyntaxTableFileType)fileType).getSyntaxTable()); final int caretOffset = myCaret.getOffset(); int commentStart = CharArrayUtil.lastIndexOf(text, commenter.getBlockCommentPrefix(), caretOffset); if (commentStart == -1) return null; lexer.start(text, commentStart, text.length()); if (lexer.getTokenType() == CustomHighlighterTokenType.MULTI_LINE_COMMENT && lexer.getTokenEnd() >= caretOffset) { return new TextRange(commentStart, lexer.getTokenEnd()); } return null; } final String prefix; final String suffix; // Custom uncommenter is able to find commented block inside of selected text final String selectedText = myCaret.getSelectedText(); if ((commenter instanceof CustomUncommenter) && selectedText != null) { final TextRange commentedRange = ((CustomUncommenter)commenter).findMaximumCommentedRange(selectedText); if (commentedRange == null) { return null; } // Uncommenter returns range relative to text start, so we need to shift it to make abosolute. return commentedRange.shiftRight(myCaret.getSelectionStart()); } if (commenter instanceof SelfManagingCommenter) { SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; prefix = selfManagingCommenter.getBlockCommentPrefix( myCaret.getSelectionStart(), myDocument, mySelfManagedCommenterData ); suffix = selfManagingCommenter.getBlockCommentSuffix( myCaret.getSelectionEnd(), myDocument, mySelfManagedCommenterData ); } else { prefix = trim(commenter.getBlockCommentPrefix()); suffix = trim(commenter.getBlockCommentSuffix()); } if (prefix == null || suffix == null) return null; TextRange commentedRange; if (commenter instanceof SelfManagingCommenter) { commentedRange = ((SelfManagingCommenter)commenter).getBlockCommentRange( myCaret.getSelectionStart(), myCaret.getSelectionEnd(), myDocument, mySelfManagedCommenterData ); } else { if (!testSelectionForNonComments()) { return null; } commentedRange = getSelectedComments(text, prefix, suffix); } if (commentedRange == null) { PsiElement comment = findCommentAtCaret(); if (comment != null) { String commentText = comment.getText(); if (commentText.startsWith(prefix) && commentText.endsWith(suffix)) { commentedRange = comment.getTextRange(); } } } return commentedRange; } @Nullable private TextRange getSelectedComments(CharSequence text, String prefix, String suffix) { TextRange commentedRange = null; if (myCaret.hasSelection()) { int selectionStart = myCaret.getSelectionStart(); selectionStart = CharArrayUtil.shiftForward(text, selectionStart, " \t\n"); int selectionEnd = myCaret.getSelectionEnd() - 1; selectionEnd = CharArrayUtil.shiftBackward(text, selectionEnd, " \t\n") + 1; if (selectionEnd - selectionStart >= prefix.length() + suffix.length() && CharArrayUtil.regionMatches(text, selectionStart, prefix) && CharArrayUtil.regionMatches(text, selectionEnd - suffix.length(), suffix)) { commentedRange = new TextRange(selectionStart, selectionEnd); } } return commentedRange; } @Nullable private static Commenter findCommenter(PsiFile file, Editor editor, Caret caret) { final FileType fileType = file.getFileType(); if (fileType instanceof AbstractFileType) { return ((AbstractFileType)fileType).getCommenter(); } Language lang = PsiUtilBase.getLanguageInEditor(caret, file.getProject()); return getCommenter(file, editor, lang, lang); } @Nullable public static Commenter getCommenter(final PsiFile file, final Editor editor, final Language lineStartLanguage, final Language lineEndLanguage) { final FileViewProvider viewProvider = file.getViewProvider(); for (MultipleLangCommentProvider provider : MultipleLangCommentProvider.EP_NAME.getExtensions()) { if (provider.canProcess(file, viewProvider)) { return provider.getLineCommenter(file, editor, lineStartLanguage, lineEndLanguage); } } final Language fileLanguage = file.getLanguage(); Language lang = lineStartLanguage == null || LanguageCommenters.INSTANCE.forLanguage(lineStartLanguage) == null || fileLanguage.getBaseLanguage() == lineStartLanguage // file language is a more specific dialect of the line language ? fileLanguage : lineStartLanguage; if (viewProvider instanceof TemplateLanguageFileViewProvider && lang == ((TemplateLanguageFileViewProvider)viewProvider).getTemplateDataLanguage()) { lang = viewProvider.getBaseLanguage(); } return LanguageCommenters.INSTANCE.forLanguage(lang); } @Nullable private PsiElement findCommentAtCaret() { int offset = myCaret.getOffset(); TextRange range = new TextRange(myCaret.getSelectionStart(), myCaret.getSelectionEnd()); if (offset == range.getEndOffset()) { offset--; } if (offset <= range.getStartOffset()) { offset++; } PsiElement comment = getCommentAtOffset(offset); if (comment == null || myCaret.hasSelection() && !range.contains(comment.getTextRange())) { return null; } return comment; } @Nullable private PsiComment getCommentAtOffset(int offset) { PsiElement elt = myFile.getViewProvider().findElementAt(offset); if (elt == null) return null; return PsiTreeUtil.getParentOfType(elt, PsiComment.class, false); } public void commentRange(int startOffset, int endOffset, String commentPrefix, String commentSuffix, Commenter commenter) { if (breaksExistingComment(startOffset, true) || breaksExistingComment(endOffset, false)) { myWarning = CodeInsightBundle.message("block.comment.intersects.existing.comment"); return; } final CharSequence chars = myDocument.getCharsSequence(); LogicalPosition caretPosition = myCaret.getLogicalPosition(); if (startOffset == 0 || chars.charAt(startOffset - 1) == '\n') { if (endOffset == myDocument.getTextLength() || endOffset > 0 && chars.charAt(endOffset - 1) == '\n') { CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myProject); CommonCodeStyleSettings settings = CodeStyle.getLanguageSettings(myFile); String space; if (!settings.BLOCK_COMMENT_AT_FIRST_COLUMN) { final FileType fileType = myFile.getFileType(); int line1 = myEditor.offsetToLogicalPosition(startOffset).line; int line2 = myEditor.offsetToLogicalPosition(endOffset - 1).line; Indent minIndent = CommentUtil.getMinLineIndent(myProject, myDocument, line1, line2, fileType); if (minIndent == null) { minIndent = codeStyleManager.zeroIndent(); } space = codeStyleManager.fillIndent(minIndent, fileType); } else { space = ""; } final StringBuilder nestingPrefix = new StringBuilder(space).append(commentPrefix); if (!commentPrefix.endsWith("\n")) { nestingPrefix.append("\n"); } final StringBuilder nestingSuffix = new StringBuilder(space); nestingSuffix.append(commentSuffix.startsWith("\n") ? commentSuffix.substring(1) : commentSuffix); nestingSuffix.append("\n"); TextRange range = insertNestedComments(startOffset, endOffset, nestingPrefix.toString(), nestingSuffix.toString(), commenter); if (range != null) { myCaret.setSelection(range.getStartOffset(), range.getEndOffset()); LogicalPosition pos = new LogicalPosition(caretPosition.line + 1, caretPosition.column); myCaret.moveToLogicalPosition(pos); } return; } } TextRange range = insertNestedComments(startOffset, endOffset, commentPrefix, commentSuffix, commenter); if (range != null) { myCaret.setSelection(range.getStartOffset(), range.getEndOffset()); LogicalPosition pos = new LogicalPosition(caretPosition.line, caretPosition.column + commentPrefix.length()); myCaret.moveToLogicalPosition(pos); } } private boolean breaksExistingComment(int offset, boolean includingAfterLineComment) { if (!(myCommenter instanceof CodeDocumentationAwareCommenter) || !(myEditor instanceof EditorEx) || offset == 0) return false; CodeDocumentationAwareCommenter commenter = (CodeDocumentationAwareCommenter)myCommenter; HighlighterIterator it = ((EditorEx)myEditor).getHighlighter().createIterator(offset - 1); IElementType tokenType = it.getTokenType(); return (tokenType != null && (it.getEnd() > offset && (tokenType == commenter.getLineCommentTokenType() || tokenType == commenter.getBlockCommentTokenType() || tokenType == commenter.getDocumentationCommentTokenType()) || includingAfterLineComment && it.getEnd() == offset && tokenType == commenter.getLineCommentTokenType() && !(commenter instanceof CommenterWithLineSuffix))); } private boolean canDetectBlockComments() { return myEditor instanceof EditorEx && myCommenter instanceof CodeDocumentationAwareCommenter && ((CodeDocumentationAwareCommenter)myCommenter).getBlockCommentTokenType() != null; } // should be called only if 'canDetectBlockComments' returns 'true' private TextRange getBlockCommentAt(int offset) { CodeDocumentationAwareCommenter commenter = (CodeDocumentationAwareCommenter)myCommenter; HighlighterIterator it = ((EditorEx)myEditor).getHighlighter().createIterator(offset); if (it.getTokenType() == commenter.getBlockCommentTokenType()) { return new TextRange(it.getStart(), it.getEnd()); } if (docCommentIsBlockComment(commenter)) { PsiComment comment = getCommentAtOffset(offset); if (comment != null && commenter.isDocumentationComment(comment)) { return comment.getTextRange(); } } return null; } private static boolean docCommentIsBlockComment(CodeDocumentationAwareCommenter commenter) { return commenter.getBlockCommentPrefix() != null && commenter.getDocumentationCommentPrefix() != null && commenter.getDocumentationCommentPrefix().startsWith(commenter.getBlockCommentPrefix()) && commenter.getBlockCommentSuffix() != null && commenter.getDocumentationCommentSuffix() != null && commenter.getDocumentationCommentSuffix().endsWith(commenter.getBlockCommentSuffix()); } private int doBoundCommentingAndGetShift(int offset, String commented, int skipLength, String toInsert, boolean skipBrace, TextRange selection) { if (commented == null && (offset == selection.getStartOffset() || offset + (skipBrace ? skipLength : 0) == selection.getEndOffset())) { return 0; } if (commented == null) { myDocument.insertString(offset + (skipBrace ? skipLength : 0), toInsert); return toInsert.length(); } else { myDocument.replaceString(offset, offset + skipLength, commented); return commented.length() - skipLength; } } private TextRange insertNestedComments(int startOffset, int endOffset, String commentPrefix, String commentSuffix, Commenter commenter) { if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; return selfManagingCommenter.insertBlockComment( startOffset, endOffset, myDocument, mySelfManagedCommenterData ); } String normalizedPrefix = commentPrefix.trim(); String normalizedSuffix = commentSuffix.trim(); IntArrayList nestedCommentPrefixes = new IntArrayList(); IntArrayList nestedCommentSuffixes = new IntArrayList(); String commentedPrefix = commenter.getCommentedBlockCommentPrefix(); String commentedSuffix = commenter.getCommentedBlockCommentSuffix(); CharSequence chars = myDocument.getCharsSequence(); boolean canDetectBlockComments = canDetectBlockComments(); boolean warnAboutNestedComments = false; for (int i = startOffset; i < endOffset; ++i) { if (CharArrayUtil.regionMatches(chars, i, normalizedPrefix)) { if (commentedPrefix == null && canDetectBlockComments) { TextRange commentRange = getBlockCommentAt(i); // skipping prefixes outside of comments (e.g. in string literals) and inside comments if (commentRange == null || commentRange.getStartOffset() != i) continue; else warnAboutNestedComments = true; } nestedCommentPrefixes.add(i); } else if (CharArrayUtil.regionMatches(chars, i, normalizedSuffix)) { if (commentedSuffix == null && canDetectBlockComments) { TextRange commentRange = getBlockCommentAt(i); if (commentRange == null) { myWarning = CodeInsightBundle.message("block.comment.wrapping.suffix"); myWarningLocation = myDocument.createRangeMarker(i, i); return null; } } nestedCommentSuffixes.add(i); } } if (warnAboutNestedComments) { myWarning = CodeInsightBundle.message("block.comment.nested.comment", nestedCommentPrefixes.size()); myWarningLocation = myDocument.createRangeMarker(nestedCommentPrefixes.get(0), nestedCommentPrefixes.get(0) + normalizedPrefix.length()); } int shift = 0; if (!(commentedSuffix == null && !nestedCommentSuffixes.isEmpty() && nestedCommentSuffixes.get(nestedCommentSuffixes.size() - 1) + commentSuffix.length() == endOffset)) { myDocument.insertString(endOffset, commentSuffix); shift += commentSuffix.length(); } // process nested comments in back order int i = nestedCommentPrefixes.size() - 1; int j = nestedCommentSuffixes.size() - 1; final TextRange selection = new TextRange(startOffset, endOffset); while (i >= 0 && j >= 0) { final int prefixIndex = nestedCommentPrefixes.get(i); final int suffixIndex = nestedCommentSuffixes.get(j); if (prefixIndex > suffixIndex) { shift += doBoundCommentingAndGetShift(prefixIndex, commentedPrefix, normalizedPrefix.length(), commentSuffix, false, selection); --i; } else { //if (insertPos < myDocument.getTextLength() && Character.isWhitespace(myDocument.getCharsSequence().charAt(insertPos))) { // insertPos = suffixIndex + commentSuffix.length(); //} shift += doBoundCommentingAndGetShift(suffixIndex, commentedSuffix, normalizedSuffix.length(), commentPrefix, true, selection); --j; } } while (i >= 0) { final int prefixIndex = nestedCommentPrefixes.get(i); shift += doBoundCommentingAndGetShift(prefixIndex, commentedPrefix, normalizedPrefix.length(), commentSuffix, false, selection); --i; } while (j >= 0) { final int suffixIndex = nestedCommentSuffixes.get(j); shift += doBoundCommentingAndGetShift(suffixIndex, commentedSuffix, normalizedSuffix.length(), commentPrefix, true, selection); --j; } if (!(commentedPrefix == null && !nestedCommentPrefixes.isEmpty() && nestedCommentPrefixes.get(0) == startOffset)) { myDocument.insertString(startOffset, commentPrefix); shift += commentPrefix.length(); } RangeMarker marker = myDocument.createRangeMarker(startOffset, endOffset + shift); try { return processDocument(myDocument, marker, commenter, true); } finally { marker.dispose(); } } static TextRange processDocument(Document document, RangeMarker marker, Commenter commenter, boolean escape) { if (commenter instanceof EscapingCommenter) { if (escape) { ((EscapingCommenter)commenter).escape(document, marker); } else { ((EscapingCommenter)commenter).unescape(document, marker); } } return TextRange.create(marker.getStartOffset(), marker.getEndOffset()); } private static int getNearest(String text, String pattern, int position) { int result = text.indexOf(pattern, position); return result == -1 ? text.length() : result; } static void commentNestedComments(@NotNull Document document, TextRange range, Commenter commenter) { final int offset = range.getStartOffset(); final IntArrayList toReplaceWithComments = new IntArrayList(); final IntArrayList prefixes = new IntArrayList(); final String text = document.getCharsSequence().subSequence(range.getStartOffset(), range.getEndOffset()).toString(); final String commentedPrefix = commenter.getCommentedBlockCommentPrefix(); final String commentedSuffix = commenter.getCommentedBlockCommentSuffix(); final String commentPrefix = commenter.getBlockCommentPrefix(); final String commentSuffix = commenter.getBlockCommentSuffix(); int nearestSuffix = getNearest(text, commentedSuffix, 0); int nearestPrefix = getNearest(text, commentedPrefix, 0); int level = 0; int lastSuffix = -1; for (int i = Math.min(nearestPrefix, nearestSuffix); i < text.length(); i = Math.min(nearestPrefix, nearestSuffix)) { if (i > nearestPrefix) { nearestPrefix = getNearest(text, commentedPrefix, i); continue; } if (i > nearestSuffix) { nearestSuffix = getNearest(text, commentedSuffix, i); continue; } if (i == nearestPrefix) { if (level <= 0) { if (lastSuffix != -1) { toReplaceWithComments.add(lastSuffix); } level = 1; lastSuffix = -1; toReplaceWithComments.add(i); prefixes.add(i); } else { level++; } nearestPrefix = getNearest(text, commentedPrefix, nearestPrefix + 1); } else { lastSuffix = i; level--; nearestSuffix = getNearest(text, commentedSuffix, nearestSuffix + 1); } } if (lastSuffix != -1) { toReplaceWithComments.add(lastSuffix); } int prefixIndex = prefixes.size() - 1; for (int i = toReplaceWithComments.size() - 1; i >= 0; i--) { int position = toReplaceWithComments.get(i); if (prefixIndex >= 0 && position == prefixes.get(prefixIndex)) { prefixIndex--; document.replaceString(offset + position, offset + position + commentedPrefix.length(), commentPrefix); } else { document.replaceString(offset + position, offset + position + commentedSuffix.length(), commentSuffix); } } } private TextRange expandRange(int delOffset1, int delOffset2) { CharSequence chars = myDocument.getCharsSequence(); int offset1 = CharArrayUtil.shiftBackward(chars, delOffset1 - 1, " \t"); if (offset1 < 0 || chars.charAt(offset1) == '\n' || chars.charAt(offset1) == '\r') { int offset2 = CharArrayUtil.shiftForward(chars, delOffset2, " \t"); if (offset2 == myDocument.getTextLength() || chars.charAt(offset2) == '\r' || chars.charAt(offset2) == '\n') { delOffset1 = offset1 + 1; if (offset2 < myDocument.getTextLength()) { delOffset2 = offset2 + 1; if (chars.charAt(offset2) == '\r' && offset2 + 1 < myDocument.getTextLength() && chars.charAt(offset2 + 1) == '\n') { delOffset2++; } } } } return new TextRange(delOffset1, delOffset2); } private Couple<TextRange> findCommentBlock(TextRange range, String commentPrefix, String commentSuffix) { CharSequence chars = myDocument.getCharsSequence(); int startOffset = range.getStartOffset(); boolean endsProperly = CharArrayUtil.regionMatches(chars, range.getEndOffset() - commentSuffix.length(), commentSuffix); TextRange start = expandRange(startOffset, startOffset + commentPrefix.length()); TextRange end; if (endsProperly) { end = expandRange(range.getEndOffset() - commentSuffix.length(), range.getEndOffset()); } else { end = new TextRange(range.getEndOffset(), range.getEndOffset()); } return Couple.of(start, end); } public void uncommentRange(TextRange range, String commentPrefix, String commentSuffix, Commenter commenter) { if (commenter instanceof SelfManagingCommenter) { final SelfManagingCommenter selfManagingCommenter = (SelfManagingCommenter)commenter; selfManagingCommenter.uncommentBlockComment( range.getStartOffset(), range.getEndOffset(), myDocument, mySelfManagedCommenterData ); return; } String text = myDocument.getCharsSequence().subSequence(range.getStartOffset(), range.getEndOffset()).toString(); int startOffset = range.getStartOffset(); //boolean endsProperly = CharArrayUtil.regionMatches(chars, range.getEndOffset() - commentSuffix.length(), commentSuffix); List<Couple<TextRange>> ranges = new ArrayList<>(); if (commenter instanceof CustomUncommenter) { /* In case of custom uncommenter, we need to ask it for list of [commentOpen-start,commentOpen-end], [commentClose-start,commentClose-end] and shift if according to current offset */ CustomUncommenter customUncommenter = (CustomUncommenter)commenter; for (Couple<TextRange> coupleFromCommenter : customUncommenter.getCommentRangesToDelete(text)) { TextRange openComment = coupleFromCommenter.first.shiftRight(startOffset); TextRange closeComment = coupleFromCommenter.second.shiftRight(startOffset); ranges.add(Couple.of(openComment, closeComment)); } } else { // If commenter is not custom, we need to get this list by our selves int position = 0; while (true) { int start = getNearest(text, commentPrefix, position); if (start == text.length()) { break; } position = start; int end = getNearest(text, commentSuffix, position + commentPrefix.length()) + commentSuffix.length(); position = end; Couple<TextRange> pair = findCommentBlock(new TextRange(start + startOffset, end + startOffset), commentPrefix, commentSuffix); ranges.add(pair); } } RangeMarker marker = myDocument.createRangeMarker(range); try { for (int i = ranges.size() - 1; i >= 0; i--) { Couple<TextRange> toDelete = ranges.get(i); myDocument.deleteString(toDelete.first.getStartOffset(), toDelete.first.getEndOffset()); int shift = toDelete.first.getEndOffset() - toDelete.first.getStartOffset(); myDocument.deleteString(toDelete.second.getStartOffset() - shift, toDelete.second.getEndOffset() - shift); if (commenter.getCommentedBlockCommentPrefix() != null) { commentNestedComments(myDocument, new TextRange(toDelete.first.getEndOffset() - shift, toDelete.second.getStartOffset() - shift), commenter); } } processDocument(myDocument, marker, commenter, false); } finally { marker.dispose(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.txn.compactor; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StringableMap; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionType; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.junit.After; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; /** * Tests for the worker thread and its MR jobs. * todo: most delta files in this test suite use txn id range, i.e. [N,N+M] * That means that they all look like they were created by compaction or by streaming api. * Delta files created by SQL should have [N,N] range (and a suffix in v1.3 and later) * Need to change some of these to have better test coverage. */ public class TestWorker extends CompactorTest { static final private String CLASS_NAME = TestWorker.class.getName(); static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); public TestWorker() throws Exception { super(); } @Test public void nothing() throws Exception { // Test that the whole things works when there's nothing in the queue. This is just a // survival test. startWorker(); } @Test public void stringableMap() throws Exception { // Empty map case StringableMap m = new StringableMap(new HashMap<String, String>()); String s = m.toString(); Assert.assertEquals("0:", s); m = new StringableMap(s); Assert.assertEquals(0, m.size()); Map<String, String> base = new HashMap<String, String>(); base.put("mary", "poppins"); base.put("bert", null); base.put(null, "banks"); m = new StringableMap(base); s = m.toString(); m = new StringableMap(s); Assert.assertEquals(3, m.size()); Map<String, Boolean> saw = new HashMap<String, Boolean>(3); saw.put("mary", false); saw.put("bert", false); saw.put(null, false); for (Map.Entry<String, String> e : m.entrySet()) { saw.put(e.getKey(), true); if ("mary".equals(e.getKey())) Assert.assertEquals("poppins", e.getValue()); else if ("bert".equals(e.getKey())) Assert.assertNull(e.getValue()); else if (null == e.getKey()) Assert.assertEquals("banks", e.getValue()); else Assert.fail("Unexpected value " + e.getKey()); } Assert.assertEquals(3, saw.size()); Assert.assertTrue(saw.get("mary")); Assert.assertTrue(saw.get("bert")); Assert.assertTrue(saw.get(null)); } @Test public void stringableList() throws Exception { // Empty list case CompactorMR.StringableList ls = new CompactorMR.StringableList(); String s = ls.toString(); Assert.assertEquals("0:", s); ls = new CompactorMR.StringableList(s); Assert.assertEquals(0, ls.size()); ls = new CompactorMR.StringableList(); ls.add(new Path("/tmp")); ls.add(new Path("/usr")); s = ls.toString(); Assert.assertTrue("Expected 2:4:/tmp4:/usr or 2:4:/usr4:/tmp, got " + s, "2:4:/tmp4:/usr".equals(s) || "2:4:/usr4:/tmp".equals(s)); ls = new CompactorMR.StringableList(s); Assert.assertEquals(2, ls.size()); boolean sawTmp = false, sawUsr = false; for (Path p : ls) { if ("/tmp".equals(p.toString())) sawTmp = true; else if ("/usr".equals(p.toString())) sawUsr = true; else Assert.fail("Unexpected path " + p.toString()); } Assert.assertTrue(sawTmp); Assert.assertTrue(sawUsr); } @Test public void inputSplit() throws Exception { String basename = "/warehouse/foo/base_1"; String delta1 = "/warehouse/foo/delta_2_3"; String delta2 = "/warehouse/foo/delta_4_7"; HiveConf conf = new HiveConf(); Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest"); FileSystem fs = FileSystem.get(conf); FSDataOutputStream os = fs.create(file); for (int i = 0; i < 10; i++) { os.writeBytes("mary had a little lamb its fleece was white as snow\n"); } os.close(); List<Path> files = new ArrayList<Path>(1); files.add(file); Path[] deltas = new Path[2]; deltas[0] = new Path(delta1); deltas[1] = new Path(delta2); CompactorMR.CompactorInputSplit split = new CompactorMR.CompactorInputSplit(conf, 3, files, new Path(basename), deltas); Assert.assertEquals(520L, split.getLength()); String[] locations = split.getLocations(); Assert.assertEquals(1, locations.length); Assert.assertEquals("localhost", locations[0]); ByteArrayOutputStream buf = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(buf); split.write(out); split = new CompactorMR.CompactorInputSplit(); DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray())); split.readFields(in); Assert.assertEquals(3, split.getBucket()); Assert.assertEquals(basename, split.getBaseDir().toString()); deltas = split.getDeltaDirs(); Assert.assertEquals(2, deltas.length); Assert.assertEquals(delta1, deltas[0].toString()); Assert.assertEquals(delta2, deltas[1].toString()); } @Test public void inputSplitNullBase() throws Exception { String delta1 = "/warehouse/foo/delta_2_3"; String delta2 = "/warehouse/foo/delta_4_7"; HiveConf conf = new HiveConf(); Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest"); FileSystem fs = FileSystem.get(conf); FSDataOutputStream os = fs.create(file); for (int i = 0; i < 10; i++) { os.writeBytes("mary had a little lamb its fleece was white as snow\n"); } os.close(); List<Path> files = new ArrayList<Path>(1); files.add(file); Path[] deltas = new Path[2]; deltas[0] = new Path(delta1); deltas[1] = new Path(delta2); CompactorMR.CompactorInputSplit split = new CompactorMR.CompactorInputSplit(conf, 3, files, null, deltas); ByteArrayOutputStream buf = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(buf); split.write(out); split = new CompactorMR.CompactorInputSplit(); DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray())); split.readFields(in); Assert.assertEquals(3, split.getBucket()); Assert.assertNull(split.getBaseDir()); deltas = split.getDeltaDirs(); Assert.assertEquals(2, deltas.length); Assert.assertEquals(delta1, deltas[0].toString()); Assert.assertEquals(delta2, deltas[1].toString()); } @Test public void sortedTable() throws Exception { List<Order> sortCols = new ArrayList<Order>(1); sortCols.add(new Order("b", 1)); Table t = newTable("default", "st", false, new HashMap<String, String>(), sortCols, false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 24L, 2); addDeltaFile(t, null, 21L, 24L, 4); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "st", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); // There should still be four directories in the location. FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(4, stat.length); } @Test public void sortedPartition() throws Exception { List<Order> sortCols = new ArrayList<Order>(1); sortCols.add(new Order("b", 1)); Table t = newTable("default", "sp", true, new HashMap<String, String>(), sortCols, false); Partition p = newPartition(t, "today", sortCols); addBaseFile(t, p, 20L, 20); addDeltaFile(t, p, 21L, 22L, 2); addDeltaFile(t, p, 23L, 24L, 2); addDeltaFile(t, p, 21L, 24L, 4); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "sp", CompactionType.MINOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); startWorker(); // There should still be four directories in the location. FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation())); Assert.assertEquals(4, stat.length); } @Test public void minorTableWithBase() throws Exception { LOG.debug("Starting minorTableWithBase"); Table t = newTable("default", "mtwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewDelta = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 24))) { sawNewDelta = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(208L, buckets[0].getLen()); Assert.assertEquals(208L, buckets[1].getLen()); } else { LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewDelta); } /** * todo: fix https://issues.apache.org/jira/browse/HIVE-9995 * @throws Exception */ @Test public void minorWithOpenInMiddle() throws Exception { LOG.debug("Starting minorWithOpenInMiddle"); Table t = newTable("default", "mtwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 25L, 3); addLengthFile(t, null, 23L, 25L, 3); addDeltaFile(t, null, 26L, 27L, 2); burnThroughTransactions(27, new HashSet<Long>(Arrays.asList(23L)), null); CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents Arrays.sort(stat); Assert.assertEquals("base_20", stat[0].getPath().getName()); Assert.assertEquals(makeDeltaDirNameCompacted(21, 22), stat[1].getPath().getName()); Assert.assertEquals(makeDeltaDirName(23, 25), stat[2].getPath().getName()); Assert.assertEquals(makeDeltaDirName(26, 27), stat[3].getPath().getName()); } @Test public void minorWithAborted() throws Exception { LOG.debug("Starting minorWithAborted"); Table t = newTable("default", "mtwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 25L, 3); addLengthFile(t, null, 23L, 25L, 3); addDeltaFile(t, null, 26L, 27L, 2); burnThroughTransactions(27, null, new HashSet<Long>(Arrays.asList(24L, 25L))); CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(5, stat.length); // Find the new delta file and make sure it has the right contents Arrays.sort(stat); Assert.assertEquals("base_20", stat[0].getPath().getName()); Assert.assertEquals(makeDeltaDirName(21, 22), stat[1].getPath().getName()); Assert.assertEquals(makeDeltaDirNameCompacted(21, 27), stat[2].getPath().getName()); Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName()); Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName()); } @Test public void minorPartitionWithBase() throws Exception { Table t = newTable("default", "mpwb", true); Partition p = newPartition(t, "today"); addBaseFile(t, p, 20L, 20); addDeltaFile(t, p, 21L, 22L, 2); addDeltaFile(t, p, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "mpwb", CompactionType.MINOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still be four directories in the location. FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewDelta = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 24))) { sawNewDelta = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(208L, buckets[0].getLen()); Assert.assertEquals(208L, buckets[1].getLen()); } else { LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewDelta); } @Test public void minorTableNoBase() throws Exception { LOG.debug("Starting minorTableWithBase"); Table t = newTable("default", "mtnb", false); addDeltaFile(t, null, 1L, 2L, 2); addDeltaFile(t, null, 3L, 4L, 2); burnThroughTransactions(5); CompactionRequest rqst = new CompactionRequest("default", "mtnb", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(3, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewDelta = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(1, 4))) { sawNewDelta = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(208L, buckets[0].getLen()); Assert.assertEquals(208L, buckets[1].getLen()); } else { LOG.debug("This is not the delta file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewDelta); } @Test public void majorTableWithBase() throws Exception { LOG.debug("Starting majorTableWithBase"); Table t = newTable("default", "matwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "matwb", CompactionType.MAJOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewBase = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals("base_0000024")) { sawNewBase = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(624L, buckets[0].getLen()); Assert.assertEquals(624L, buckets[1].getLen()); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewBase); } @Test public void minorNoBaseLotsOfDeltas() throws Exception { compactNoBaseLotsOfDeltas(CompactionType.MINOR); } @Test public void majorNoBaseLotsOfDeltas() throws Exception { compactNoBaseLotsOfDeltas(CompactionType.MAJOR); } private void compactNoBaseLotsOfDeltas(CompactionType type) throws Exception { conf.setIntVar(HiveConf.ConfVars.COMPACTOR_MAX_NUM_DELTA, 2); Table t = newTable("default", "mapwb", true); Partition p = newPartition(t, "today"); // addBaseFile(t, p, 20L, 20); addDeltaFile(t, p, 21L, 21L, 2); addDeltaFile(t, p, 23L, 23L, 2); //make it look like streaming API use case addDeltaFile(t, p, 25L, 29L, 2); addDeltaFile(t, p, 31L, 32L, 3); //make it looks like 31-32 has been compacted, but not cleaned addDeltaFile(t, p, 31L, 33L, 5); addDeltaFile(t, p, 35L, 35L, 1); /*since COMPACTOR_MAX_NUM_DELTA=2, we expect files 1,2 to be minor compacted by 1 job to produce delta_21_23 * 3,5 to be minor compacted by 2nd job (file 4 is obsolete) to make delta_25_33 (4th is skipped) * * and then the 'requested' * minor compaction to combine delta_21_23, delta_25_33 and delta_35_35 to make delta_21_35 * or major compaction to create base_35*/ burnThroughTransactions(35); CompactionRequest rqst = new CompactionRequest("default", "mapwb", type); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation())); Assert.assertEquals(9, stat.length); // Find the new delta file and make sure it has the right contents BitSet matchesFound = new BitSet(9); for (int i = 0; i < stat.length; i++) { if(stat[i].getPath().getName().equals(makeDeltaDirName(21,21))) { matchesFound.set(0); } else if(stat[i].getPath().getName().equals(makeDeltaDirName(23, 23))) { matchesFound.set(1); } else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(25, 29))) { matchesFound.set(2); } else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(31, 32))) { matchesFound.set(3); } else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(31, 33))) { matchesFound.set(4); } else if(stat[i].getPath().getName().equals(makeDeltaDirName(35, 35))) { matchesFound.set(5); } else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,23))) { matchesFound.set(6); } else if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(25,33))) { matchesFound.set(7); } switch (type) { //yes, both do set(8) case MINOR: if(stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21,35))) { matchesFound.set(8); } break; case MAJOR: if(stat[i].getPath().getName().equals(AcidUtils.baseDir(35))) { matchesFound.set(8); } break; default: throw new IllegalStateException(); } } StringBuilder sb = null; for(int i = 0; i < stat.length; i++) { if(!matchesFound.get(i)) { if(sb == null) { sb = new StringBuilder("Some files are missing at index: "); } sb.append(i).append(","); } } if (sb != null) { Assert.assertTrue(sb.toString(), false); } } @Test public void majorPartitionWithBase() throws Exception { LOG.debug("Starting majorPartitionWithBase"); Table t = newTable("default", "mapwb", true); Partition p = newPartition(t, "today"); addBaseFile(t, p, 20L, 20); addDeltaFile(t, p, 21L, 22L, 2); addDeltaFile(t, p, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "mapwb", CompactionType.MAJOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still be four directories in the location. FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewBase = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals("base_0000024")) { sawNewBase = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(624L, buckets[0].getLen()); Assert.assertEquals(624L, buckets[1].getLen()); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewBase); } @Test public void majorTableNoBase() throws Exception { LOG.debug("Starting majorTableNoBase"); Table t = newTable("default", "matnb", false); addDeltaFile(t, null, 1L, 2L, 2); addDeltaFile(t, null, 3L, 4L, 2); burnThroughTransactions(4); CompactionRequest rqst = new CompactionRequest("default", "matnb", CompactionType.MAJOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should now be 3 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(3, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewBase = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals("base_0000004")) { sawNewBase = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(104L, buckets[0].getLen()); Assert.assertEquals(104L, buckets[1].getLen()); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewBase); } @Test public void majorTableLegacy() throws Exception { LOG.debug("Starting majorTableLegacy"); Table t = newTable("default", "matl", false); addLegacyFile(t, null, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "matl", CompactionType.MAJOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); //Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewBase = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals("base_0000024")) { sawNewBase = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); Assert.assertEquals(624L, buckets[0].getLen()); Assert.assertEquals(624L, buckets[1].getLen()); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewBase); } @Test public void minorTableLegacy() throws Exception { LOG.debug("Starting minorTableLegacy"); Table t = newTable("default", "mtl", false); addLegacyFile(t, null, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "mtl", CompactionType.MINOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); // Find the new delta file and make sure it has the right contents boolean sawNewDelta = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 24))) { sawNewDelta = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewDelta); } @Test public void majorPartitionWithBaseMissingBuckets() throws Exception { LOG.debug("Starting majorPartitionWithBaseMissingBuckets"); Table t = newTable("default", "mapwbmb", true); Partition p = newPartition(t, "today"); addBaseFile(t, p, 20L, 20, 2, false); addDeltaFile(t, p, 21L, 22L, 2, 2, false); addDeltaFile(t, p, 23L, 26L, 4); burnThroughTransactions(27); CompactionRequest rqst = new CompactionRequest("default", "mapwbmb", CompactionType.MAJOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still be four directories in the location. FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(p.getSd().getLocation())); Assert.assertEquals(4, stat.length); // Find the new delta file and make sure it has the right contents boolean sawNewBase = false; for (int i = 0; i < stat.length; i++) { if (stat[i].getPath().getName().equals("base_0000026")) { sawNewBase = true; FileStatus[] buckets = fs.listStatus(stat[i].getPath()); Assert.assertEquals(2, buckets.length); Assert.assertTrue(buckets[0].getPath().getName().matches("bucket_0000[01]")); Assert.assertTrue(buckets[1].getPath().getName().matches("bucket_0000[01]")); // Bucket 0 should be small and bucket 1 should be large, make sure that's the case Assert.assertTrue( ("bucket_00000".equals(buckets[0].getPath().getName()) && 104L == buckets[0].getLen() && "bucket_00001".equals(buckets[1].getPath().getName()) && 676L == buckets[1] .getLen()) || ("bucket_00000".equals(buckets[1].getPath().getName()) && 104L == buckets[1].getLen() && "bucket_00001".equals(buckets[0].getPath().getName()) && 676L == buckets[0] .getLen()) ); } else { LOG.debug("This is not the file you are looking for " + stat[i].getPath().getName()); } } Assert.assertTrue(sawNewBase); } @Test public void majorWithOpenInMiddle() throws Exception { LOG.debug("Starting majorWithOpenInMiddle"); Table t = newTable("default", "mtwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 25L, 3); addLengthFile(t, null, 23L, 25L, 3); addDeltaFile(t, null, 26L, 27L, 2); burnThroughTransactions(27, new HashSet<Long>(Arrays.asList(23L)), null); CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MAJOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(5, stat.length); // Find the new delta file and make sure it has the right contents Arrays.sort(stat); Assert.assertEquals("base_0000022", stat[0].getPath().getName()); Assert.assertEquals("base_20", stat[1].getPath().getName()); Assert.assertEquals(makeDeltaDirName(21, 22), stat[2].getPath().getName()); Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName()); Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName()); } @Test public void majorWithAborted() throws Exception { LOG.debug("Starting majorWithAborted"); Table t = newTable("default", "mtwb", false); addBaseFile(t, null, 20L, 20); addDeltaFile(t, null, 21L, 22L, 2); addDeltaFile(t, null, 23L, 25L, 3); addLengthFile(t, null, 23L, 25L, 3); addDeltaFile(t, null, 26L, 27L, 2); burnThroughTransactions(27, null, new HashSet<Long>(Arrays.asList(24L, 25L))); CompactionRequest rqst = new CompactionRequest("default", "mtwb", CompactionType.MAJOR); txnHandler.compact(rqst); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertEquals("ready for cleaning", compacts.get(0).getState()); // There should still now be 5 directories in the location FileSystem fs = FileSystem.get(conf); FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation())); Assert.assertEquals(5, stat.length); // Find the new delta file and make sure it has the right contents Arrays.sort(stat); Assert.assertEquals("base_0000027", stat[0].getPath().getName()); Assert.assertEquals("base_20", stat[1].getPath().getName()); Assert.assertEquals(makeDeltaDirName(21, 22), stat[2].getPath().getName()); Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName()); Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName()); } @Override boolean useHive130DeltaDirName() { return false; } @Test public void droppedTable() throws Exception { Table t = newTable("default", "dt", false); addDeltaFile(t, null, 1L, 2L, 2); addDeltaFile(t, null, 3L, 4L, 2); burnThroughTransactions(4); CompactionRequest rqst = new CompactionRequest("default", "dt", CompactionType.MAJOR); txnHandler.compact(rqst); ms.dropTable("default", "dt"); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(compacts.get(0).getState())); } @Test public void droppedPartition() throws Exception { Table t = newTable("default", "dp", true); Partition p = newPartition(t, "today"); addBaseFile(t, p, 20L, 20); addDeltaFile(t, p, 21L, 22L, 2); addDeltaFile(t, p, 23L, 24L, 2); burnThroughTransactions(25); CompactionRequest rqst = new CompactionRequest("default", "dp", CompactionType.MINOR); rqst.setPartitionname("ds=today"); txnHandler.compact(rqst); ms.dropPartition("default", "dp", Collections.singletonList("today"), true); startWorker(); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); List<ShowCompactResponseElement> compacts = rsp.getCompacts(); Assert.assertEquals(1, compacts.size()); Assert.assertTrue(TxnStore.SUCCEEDED_RESPONSE.equals(rsp.getCompacts().get(0).getState())); } @After public void tearDown() throws Exception { compactorTestCleanup(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicemix.jbi.runtime; import java.util.HashMap; import javax.jbi.messaging.DeliveryChannel; import javax.jbi.messaging.ExchangeStatus; import javax.jbi.messaging.Fault; import javax.jbi.messaging.InOnly; import javax.jbi.messaging.InOptionalOut; import javax.jbi.messaging.InOut; import javax.jbi.messaging.MessageExchange.Role; import javax.jbi.messaging.MessageExchangeFactory; import javax.jbi.messaging.MessagingException; import javax.jbi.messaging.NormalizedMessage; import javax.jbi.messaging.RobustInOnly; import javax.xml.namespace.QName; import junit.framework.TestCase; import org.apache.servicemix.JbiConstants; import org.apache.servicemix.components.util.ComponentSupport; import org.apache.servicemix.jbi.ExchangeTimeoutException; import org.apache.servicemix.jbi.jaxp.StringSource; import org.apache.servicemix.jbi.runtime.impl.ComponentRegistryImpl; import org.apache.servicemix.nmr.core.ServiceMix; import org.apache.servicemix.nmr.api.service.ServiceHelper; import org.apache.servicemix.nmr.api.Endpoint; public class MEPExchangeTest extends TestCase { public static final String PAYLOAD = "<payload/>"; public static final String RESPONSE = "<response/>"; private TestComponent provider; private TestComponent consumer; private ExchangeCompletedListener listener; public static class TestComponent extends ComponentSupport { public TestComponent(QName service, String endpoint) { super(service, endpoint); } public DeliveryChannel getChannel() throws MessagingException { return getContext().getDeliveryChannel(); } } public void setUp() throws Exception { ServiceMix smx = new ServiceMix(); smx.init(); ComponentRegistryImpl reg = new ComponentRegistryImpl(); reg.setNmr(smx); listener = new ExchangeCompletedListener(); smx.getListenerRegistry().register(listener, new HashMap<String, Object>()); // Create components provider = new TestComponent(new QName("provider"), "endpoint"); consumer = new TestComponent(new QName("consumer"), "endpoint"); // Register components reg.register(new SimpleComponentWrapper(provider), ServiceHelper.createMap(ComponentRegistry.NAME, "provider")); reg.register(new SimpleComponentWrapper(consumer), ServiceHelper.createMap(ComponentRegistry.NAME, "consumer")); } public void tearDown() throws Exception { if (listener != null) { listener.assertExchangeCompleted(); } } public void testInOnly() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOnly mec = mef.createInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); assertEquals(Role.CONSUMER, mec.getRole()); try { mec.setMessage(null, "in"); fail("Message is null"); } catch (Exception e) { // ok } try { mec.setMessage(mec.createMessage(), "in"); fail("Message already set"); } catch (Exception e) { // ok } try { mec.setMessage(mec.createMessage(), "out"); fail("Out not supported"); } catch (Exception e) { // ok } try { mec.setMessage(mec.createFault(), "fault"); fail("Fault not supported"); } catch (Exception e) { // ok } consumer.getChannel().send(mec); // Provider side InOnly mep = (InOnly) provider.getChannel().accept(1000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertEquals(Role.PROVIDER, mep.getRole()); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); // Consumer side assertSame(mec, consumer.getChannel().accept(1000L)); assertEquals(ExchangeStatus.DONE, mec.getStatus()); assertEquals(Role.CONSUMER, mec.getRole()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOnlyWithError() throws Exception { MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOnly mec = mef.createInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); assertEquals(Role.CONSUMER, mec.getRole()); consumer.getChannel().send(mec); // Provider side InOnly mep = (InOnly) provider.getChannel().accept(1000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertEquals(Role.PROVIDER, mep.getRole()); mep.setError(new Exception()); provider.getChannel().send(mep); // Consumer side assertSame(mec, consumer.getChannel().accept(1000L)); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); assertEquals(Role.CONSUMER, mec.getRole()); // Check we can not send the exchange anymore try { mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOnlySync() throws Exception { // Create thread to answer new Thread(new Runnable() { public void run() { try { // Provider side InOnly mep = (InOnly) provider.getChannel().accept(10000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertEquals(Boolean.TRUE, mep.getProperty(JbiConstants.SEND_SYNC)); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); } catch (Exception e) { e.printStackTrace(); fail(); } } }).start(); // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOnly mec = mef.createInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); boolean result = consumer.getChannel().sendSync(mec, 10000L); assertTrue(result); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOnlySyncWithTimeoutBeforeAccept() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOnly mec = mef.createInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); boolean result = consumer.getChannel().sendSync(mec, 100L); assertFalse(result); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOnlySyncWithTimeoutAfterAccept() throws Exception { // Create thread to answer Thread t = new Thread(new Runnable() { public void run() { try { // Provider side InOnly mep = (InOnly) provider.getChannel().accept(10000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertEquals(Boolean.TRUE, mep.getProperty(JbiConstants.SEND_SYNC)); Thread.sleep(100L); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); } catch (ExchangeTimeoutException e) { // ok } catch (Exception e) { e.printStackTrace(); fail(); } } }); t.start(); // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOnly mec = mef.createInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); boolean result = consumer.getChannel().sendSync(mec, 50L); assertFalse(result); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in t.join(); } public void testInOut() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOut mep = (InOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); // Consumer side mec = (InOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Provider site assertSame(mep, provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutSync() throws Exception { // Create thread to answer new Thread(new Runnable() { public void run() { try { // Provider side InOut mep = (InOut) provider.getChannel().accept(10000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); NormalizedMessage m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); } catch (Exception e) { e.printStackTrace(); fail(); } } }).start(); // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().sendSync(mec, 10000L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Provider site assertNotNull(provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutSyncSync() throws Exception { // Create thread to answer Thread t = new Thread(new Runnable() { public void run() { try { // Provider side InOut mep = (InOut) provider.getChannel().accept(10000L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); NormalizedMessage m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().sendSync(mep); assertEquals(ExchangeStatus.DONE, mep.getStatus()); } catch (Exception e) { e.printStackTrace(); fail(); } } }); t.start(); // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().sendSync(mec, 10000L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Wait until other thread end t.join(100L); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutWithFault() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOut mep = (InOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); Fault f = mep.createFault(); f.setContent(new StringSource(RESPONSE)); mep.setFault(f); provider.getChannel().send(mep); // Consumer side mec = (InOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); assertNotNull(mec.getFault()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Provider site assertSame(mep, provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutWithFaultAndError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOut mep = (InOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); Fault f = mep.createFault(); f.setContent(new StringSource(RESPONSE)); mep.setFault(f); provider.getChannel().send(mep); // Consumer side mec = (InOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); assertNotNull(mec.getFault()); mec.setStatus(ExchangeStatus.ERROR); consumer.getChannel().send(mec); // Provider site assertSame(mep, provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); try { consumer.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutWithError1() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOut mep = (InOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setStatus(ExchangeStatus.ERROR); provider.getChannel().send(mep); // Consumer side mec = (InOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); try { mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOutWithError2() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOut mec = mef.createInOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOut mep = (InOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); provider.getChannel().send(mep); // Consumer side mec = (InOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.ERROR); consumer.getChannel().send(mec); // Provider site assertSame(mep, provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithRep() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Provider site mep = (InOptionalOut) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithoutRep() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithProviderFault() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setFault(mep.createFault()); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); assertNotNull(mec.getFault()); mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); // Provider site mep = (InOptionalOut) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithProviderError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setStatus(ExchangeStatus.ERROR); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); try { mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithRepAndConsumerFault() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setFault(mec.createFault()); consumer.getChannel().send(mec); // Provider site mep = (InOptionalOut) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertNotNull(mep.getFault()); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithRepAndConsumerError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setStatus(ExchangeStatus.ERROR); consumer.getChannel().send(mec); // Provider site mep = (InOptionalOut) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mep.getStatus()); try { mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testInOptOutWithRepFaultAndError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); InOptionalOut mec = mef.createInOptionalOutExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side InOptionalOut mep = (InOptionalOut) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); m = mep.createMessage(); m.setContent(new StringSource(RESPONSE)); mep.setOutMessage(m); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); mec.setFault(mec.createFault()); consumer.getChannel().send(mec); // Provider site assertSame(mep, provider.getChannel().accept(100L)); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); assertNotNull(mep.getFault()); mep.setStatus(ExchangeStatus.ERROR); provider.getChannel().send(mep); // Consumer side mec = (InOptionalOut) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); try { mec.setStatus(ExchangeStatus.DONE); consumer.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testRobustInOnly() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); RobustInOnly mec = mef.createRobustInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side RobustInOnly mep = (RobustInOnly) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); // Consumer side mec = (RobustInOnly) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mec.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testRobustInOnlyWithFault() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); RobustInOnly mec = mef.createRobustInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side RobustInOnly mep = (RobustInOnly) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setFault(mep.createFault()); provider.getChannel().send(mep); // Consumer side mec = (RobustInOnly) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); assertNotNull(mec.getFault()); mec.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mec); // Provider site mep = (RobustInOnly) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.DONE, mep.getStatus()); // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testRobustInOnlyWithError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); RobustInOnly mec = mef.createRobustInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side RobustInOnly mep = (RobustInOnly) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setStatus(ExchangeStatus.ERROR); provider.getChannel().send(mep); // Consumer side mec = (RobustInOnly) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mec.getStatus()); try { mec.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mec); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } public void testRobustInOnlyWithFaultAndError() throws Exception { // Send message exchange MessageExchangeFactory mef = consumer.getChannel().createExchangeFactoryForService(new QName("provider")); RobustInOnly mec = mef.createRobustInOnlyExchange(); NormalizedMessage m = mec.createMessage(); m.setContent(new StringSource(PAYLOAD)); mec.setInMessage(m); consumer.getChannel().send(mec); // Provider side RobustInOnly mep = (RobustInOnly) provider.getChannel().accept(100L); assertNotNull(mep); assertEquals(ExchangeStatus.ACTIVE, mep.getStatus()); mep.setFault(mep.createFault()); provider.getChannel().send(mep); // Consumer side mec = (RobustInOnly) consumer.getChannel().accept(100L); assertEquals(ExchangeStatus.ACTIVE, mec.getStatus()); assertNotNull(mec.getFault()); mec.setError(new Exception()); provider.getChannel().send(mec); // Provider site mep = (RobustInOnly) provider.getChannel().accept(100L); assertEquals(ExchangeStatus.ERROR, mep.getStatus()); try { mep.setStatus(ExchangeStatus.DONE); provider.getChannel().send(mep); fail("Exchange status is ERROR"); } catch (Exception e) { // ok } // Nothing left assertNull(consumer.getChannel().accept(100L)); // receive in assertNull(provider.getChannel().accept(100L)); // receive in } }
/* * Copyright 2001-2008 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.finders; import java.util.*; import static org.scalatest.finders.LocationUtils.getParentOfType; import static org.scalatest.finders.LocationUtils.isValidName; import static org.scalatest.finders.utils.StringUtils.is; public class FlatSpecFinder implements Finder { public Selection find(AstNode node) { Selection result = null; while (result == null) { if (node instanceof ConstructorBlock) result = getAllTestSelection(node.className(), node.children()); else if (node instanceof MethodInvocation) { MethodInvocation invocation = (MethodInvocation) node; if (is(invocation.name(), "of", "in", "should", "must")) { ConstructorBlock constructor = getParentOfType(node, ConstructorBlock.class); if (constructor != null) { AstNode scopeNode = getScopeNode(node, constructor.children()); if (scopeNode != null) { String prefix = getPrefix((MethodInvocation) scopeNode); result = prefix == null ? null : getNodeTestSelection(node, prefix, constructor.children()); } } } } if (result == null) { if (node.parent() != null) node = node.parent(); else break; } } return result; } private Selection getAllTestSelection(String className, AstNode[] constructorChildren) { String prefix = null; List<String> testNames = new ArrayList<>(); for (AstNode child : constructorChildren) { if (isScope(child)) prefix = getPrefix((MethodInvocation) child); if (prefix != null && child instanceof MethodInvocation && child.name().equals("in")) { String testName = getTestName(prefix, (MethodInvocation) child); if (testName != null) { testNames.add(testName); } } } return new Selection(className, className, testNames.toArray(new String[0])); } private String getPrefix(MethodInvocation invocation) { String result = null; while (result == null) { if (invocation.name().equals("of")) //result = invocation.target().toString(); if (invocation.args()[0].canBePartOfTestName()) { result = invocation.args()[0].toString(); } else { break; } else if (invocation.target() instanceof MethodInvocation) { MethodInvocation invocationTarget = (MethodInvocation) invocation.target(); if (is(invocationTarget.name(), "should", "must", "taggedAs")) { invocation = invocationTarget; } else if (invocationTarget.canBePartOfTestName()) { result = invocationTarget.toString(); } else { break; } } else if (invocation.target().canBePartOfTestName()) { result = invocation.target().toString(); } else { break; } } return result; } private AstNode getScopeNode(AstNode node, AstNode[] constructorChildren) { AstNode topLevelNode = null; while (node != null && topLevelNode == null) { if (node.parent() instanceof ConstructorBlock) topLevelNode = node; else node = node.parent(); } if (topLevelNode != null) { if (isScope(topLevelNode)) return topLevelNode; else { List<AstNode> beforeTopLevelNodeList = new ArrayList<>(); for (AstNode child : constructorChildren) { if (!child.equals(topLevelNode)) beforeTopLevelNodeList.add(child); else break; } AstNode scopeNode = null; for (int i = beforeTopLevelNodeList.size() - 1; i >= 0; i--) { AstNode tnode = beforeTopLevelNodeList.get(i); if (isScope(tnode)) { scopeNode = tnode; break; } } if (scopeNode == null) { AstNode tNode = beforeTopLevelNodeList.isEmpty() ? topLevelNode : beforeTopLevelNodeList.get(0); if (isScope(node, true)) { scopeNode = tNode; } } return scopeNode; } } else return null; } private boolean isScope(AstNode node) { return isScope(node, false); } private boolean isScope(AstNode node, boolean allowIt) { if (node instanceof MethodInvocation) { MethodInvocation invocation = (MethodInvocation) node; return invocation.name().equals("of") || isScopeTagged(invocation, allowIt) || (invocation.name().equals("in") && invocation.target() != null && invocation.target() instanceof MethodInvocation && isScopeTagged((MethodInvocation) invocation.target(), allowIt)); } else return false; } private boolean isScopeTagged(MethodInvocation invocation, boolean allowIt) { return (invocation.name().equals("taggedAs") && invocation.target() instanceof MethodInvocation && isScopeShould((MethodInvocation) invocation.target(), allowIt)) || isScopeShould(invocation, allowIt); } private boolean isScopeShould(MethodInvocation invocation, boolean allowIt) { return is(invocation.name(), "should", "must") && invocation.args().length > 0 && invocation.target() != null && (allowIt || !isHeadKeyword(invocation)); } private boolean isHeadKeyword(MethodInvocation invocation) { String name = invocation.target().name(); return is(name, "it", "they", "ignore"); } private Selection getNodeTestSelection(AstNode node, String prefix, AstNode[] constructorChildren) { if (node instanceof ConstructorBlock) { List<String> testNames = getTestNamesFromChildren(prefix, Arrays.asList(node.children())); return new Selection(node.className(), prefix.length() > 0 ? prefix : node.className(), testNames.toArray(new String[0])); } else if (node instanceof MethodInvocation) { MethodInvocation invocation = (MethodInvocation) node; String name = invocation.name(); switch (name) { case "of": List<AstNode> constructorChildrenList = Arrays.asList(constructorChildren); int nodeIdx = constructorChildrenList.indexOf(node); if (nodeIdx >= 0) { List<AstNode> startList = constructorChildrenList.subList(nodeIdx + 1, constructorChildrenList.size()); List<AstNode> subList = new ArrayList<>(); for (AstNode snode : startList) { if (!isScope(snode)) subList.add(snode); else break; } List<String> testNames = getTestNamesFromChildren(prefix, subList); return new Selection(node.className(), prefix, testNames.toArray(new String[0])); } else return null; case "should": case "must": AstNode parent = invocation.parent(); if (parent instanceof MethodInvocation && parent.name().equals("in")) { String testName = getTestName(prefix, (MethodInvocation) parent); return testName != null ? new Selection(invocation.className(), testName, new String[]{testName}) : null; } else return null; case "in": String testName = getTestName(prefix, invocation); return testName != null ? new Selection(invocation.className(), testName, new String[]{testName}) : null; default: return null; } } else return null; } private List<String> getTestNamesFromChildren(String prefix, List<AstNode> children) { Set<String> validSet = new HashSet<>(); validSet.add("in"); List<String> testNameList = new ArrayList<>(); for (AstNode node : children) { if (node instanceof MethodInvocation && isValidName(node.name(), validSet)) { MethodInvocation invocation = (MethodInvocation) node; String testName = getTestName(prefix, invocation); if (testName != null) { testNameList.add(testName); } } } return testNameList; } private String getTargetString(AstNode target, String postfix) { if (target == null) return postfix; else { if (target instanceof MethodInvocation && target.name().equals("should") && ((MethodInvocation) target).args()[0].canBePartOfTestName()) return "should " + ((MethodInvocation) target).args()[0]; else if (target instanceof MethodInvocation && target.name().equals("must") && ((MethodInvocation) target).args()[0].canBePartOfTestName()) return "must " + ((MethodInvocation) target).args()[0]; else if (target instanceof MethodInvocation && target.name().equals("taggedAs")) { return getTargetString(((MethodInvocation) target).target(), postfix); } else if (target.canBePartOfTestName()) return target.toString(); else return null; } } private String getTestName(String prefix, MethodInvocation invocation) { String name = getTargetString(invocation.target(), ""); return prefix == null || name == null ? null : prefix + " " + name; } }
/* * (c) Copyright Christian P. Fries, Germany. All rights reserved. Contact: email@christian-fries.de. * * Created on 20.05.2005 */ package net.finmath.marketdata.model.curves; import java.io.Serializable; import java.lang.ref.SoftReference; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.threeten.bp.LocalDate; import net.finmath.interpolation.RationalFunctionInterpolation; import net.finmath.marketdata.model.AnalyticModelInterface; import net.finmath.time.FloatingpointDate; /** * This class represents a curve build from a set of points in 2D. * * It provides different interpolation and extrapolation methods applied to a transformation of the input point, * examples are * <ul> * <li>linear interpolation of the input points</li> * <li>linear interpolation of the log of the input points</li> * <li>linear interpolation of the log of the input points divided by their respective time</li> * <li>cubic spline interpolation of the input points (or a function of the input points) (the curve will be C<sup>1</sup>).</li> * <li>Akima interpolation of the input points (or a function of the input points).</li> * <li>etc.</li> * </ul> * * <br> * * For the interpolation methods provided see {@link net.finmath.marketdata.model.curves.Curve.InterpolationMethod}. * For the extrapolation methods provided see {@link net.finmath.marketdata.model.curves.Curve.ExtrapolationMethod}. * For the possible interpolation entities see {@link net.finmath.marketdata.model.curves.Curve.InterpolationEntity}. * * To construct the curve, please use the inner class CurveBuilder (a builder pattern). * * For a demo on how to construct and/or calibrate a curve see, e.g. * net.finmath.tests.marketdata.curves.CurveTest. * * @author Christian Fries */ public class Curve extends AbstractCurve implements Serializable, Cloneable { /** * Possible interpolation methods. * * @author Christian Fries */ public enum InterpolationMethod { /** Constant interpolation. Synonym of PIECEWISE_CONSTANT_LEFTPOINT. **/ PIECEWISE_CONSTANT, /** Constant interpolation. Right continuous, i.e. using the value of the left end point of the interval. **/ PIECEWISE_CONSTANT_LEFTPOINT, /** Constant interpolation using the value of the right end point of the interval. **/ PIECEWISE_CONSTANT_RIGHTPOINT, /** Linear interpolation. **/ LINEAR, /** Cubic spline interpolation. **/ CUBIC_SPLINE, /** Akima interpolation (C1 sub-spline interpolation). **/ AKIMA, /** Akima interpolation (C1 sub-spline interpolation) with a smoothing in the weights. **/ AKIMA_CONTINUOUS, /** Harmonic spline interpolation (C1 sub-spline interpolation). **/ HARMONIC_SPLINE, /** Harmonic spline interpolation (C1 sub-spline interpolation) with a monotonic filtering at the boundary points. **/ HARMONIC_SPLINE_WITH_MONOTONIC_FILTERING } /** * Possible extrapolation methods. * * @author Christian Fries */ public enum ExtrapolationMethod { /** Extrapolation using the interpolation function of the adjacent interval **/ DEFAULT, /** Constant extrapolation. **/ CONSTANT, /** Linear extrapolation. **/ LINEAR } /** * Possible interpolation entities. * @author Christian Fries */ public enum InterpolationEntity { /** Interpolation is performed on the native point values, i.e. value(t) **/ VALUE, /** Interpolation is performed on the log of the point values, i.e. log(value(t)) **/ LOG_OF_VALUE, /** Interpolation is performed on the log of the point values divided by their respective time, i.e. log(value(t))/t **/ LOG_OF_VALUE_PER_TIME } private static class Point implements Comparable<Point>, Serializable { private static final long serialVersionUID = 8857387999991917430L; public double time; public double value; public boolean isParameter; /** * @param time The time (or x-value) of the point. * @param value The value (or y-value) of the point. * @param isParameter A boolean specifying if this point is considered a "degree of freedom", e.g., in a calibration. */ public Point(double time, double value, boolean isParameter) { super(); this.time = time; this.value = value; this.isParameter = isParameter; } @Override public int compareTo(Point point) { // Ordering of the curve points with respect to time. if(this.time < point.time) return -1; if(this.time > point.time) return +1; return 0; } @Override public Object clone() { return new Point(time,value,isParameter); } } /** * A builder (following the builder pattern) for Curve objects. * Allows to successively construct a curve object by adding points. * * @author Christian Fries */ public static class CurveBuilder implements CurveBuilderInterface { private Curve curve = null; /** * Build a curve. */ public CurveBuilder() { curve = new Curve(null, null); } /** * Build a curve with a given name and given reference date. * * @param name The name of this curve. * @param referenceDate The reference date for this curve, i.e., the date which defined t=0. */ public CurveBuilder(String name, LocalDate referenceDate) { curve = new Curve(name, referenceDate); } /** * Build a curve by cloning a given curve. * * @param curve A curve to be used as starting point for the new curve. * @throws CloneNotSupportedException Thrown, when the curve could not be cloned. */ public CurveBuilder(Curve curve) throws CloneNotSupportedException { this.curve = (Curve)curve.clone(); } /* (non-Javadoc) * @see net.finmath.marketdata.model.curves.CurveBuilderInterface#build() */ @Override public CurveInterface build() throws CloneNotSupportedException { Curve buildCurve = curve; curve = null; return buildCurve; } /** * Set the interpolation method of the curve. * * @param interpolationMethod The interpolation method of the curve. * @return A self reference to this curve build object. */ public CurveBuilderInterface setInterpolationMethod(InterpolationMethod interpolationMethod) { curve.interpolationMethod = interpolationMethod; return this; } /** * Set the extrapolation method of the curve. * * @param extrapolationMethod The extrapolation method of the curve. * @return A self reference to this curve build object. */ public CurveBuilderInterface setExtrapolationMethod(ExtrapolationMethod extrapolationMethod) { curve.extrapolationMethod = extrapolationMethod; return this; } /** * Set the interpolationEntity of the curve. * * @param interpolationEntity The interpolation entity of the curve. * @return A self reference to this curve build object. */ public CurveBuilderInterface setInterpolationEntity(InterpolationEntity interpolationEntity) { curve.interpolationEntity = interpolationEntity; return this; } /* (non-Javadoc) * @see net.finmath.marketdata.model.curves.CurveBuilderInterface#addPoint(double, double, boolean) */ @Override public CurveBuilderInterface addPoint(double time, double value, boolean isParameter) { curve.addPoint(time, value, isParameter); return this; } } private ArrayList<Point> points = new ArrayList<Point>(); private ArrayList<Point> pointsBeingParameters = new ArrayList<Point>(); private InterpolationMethod interpolationMethod = InterpolationMethod.CUBIC_SPLINE; private ExtrapolationMethod extrapolationMethod = ExtrapolationMethod.CONSTANT; private InterpolationEntity interpolationEntity = InterpolationEntity.LOG_OF_VALUE; private RationalFunctionInterpolation rationalFunctionInterpolation = null; private final Object rationalFunctionInterpolationLazyInitLock = new Object(); private SoftReference<Map<Double, Double>> curveCacheReference = null; private static final long serialVersionUID = -4126228588123963885L; static NumberFormat formatterReal = NumberFormat.getInstance(Locale.US); /** * Create a curve with a given name, reference date and an interpolation method from given points * * @param name The name of this curve. * @param referenceDate The reference date for this curve, i.e., the date which defined t=0. * @param interpolationMethod The interpolation method used for the curve. * @param extrapolationMethod The extrapolation method used for the curve. * @param interpolationEntity The entity interpolated/extrapolated. * @param times A vector of times. * @param values A vector of corresponding values. */ public Curve(String name, LocalDate referenceDate, InterpolationMethod interpolationMethod, ExtrapolationMethod extrapolationMethod, InterpolationEntity interpolationEntity, double[] times, double[] values) { super(name, referenceDate); this.interpolationMethod = interpolationMethod; this.extrapolationMethod = extrapolationMethod; this.interpolationEntity = interpolationEntity; if(times.length != values.length) throw new IllegalArgumentException("Length of times not equal to length of values."); for(int i=0; i<times.length; i++) this.addPoint(times[i], values[i], false); } /** * Create a curve with a given name, reference date and an interpolation method. * * @param name The name of this curve. * @param referenceDate The reference date for this curve, i.e., the date which defined t=0. * @param interpolationMethod The interpolation method used for the curve. * @param extrapolationMethod The extrapolation method used for the curve. * @param interpolationEntity The entity interpolated/extrapolated. */ protected Curve(String name, LocalDate referenceDate, InterpolationMethod interpolationMethod, ExtrapolationMethod extrapolationMethod, InterpolationEntity interpolationEntity) { super(name, referenceDate); this.interpolationMethod = interpolationMethod; this.extrapolationMethod = extrapolationMethod; this.interpolationEntity = interpolationEntity; } /** * Create a curve with a given name, reference date. * * @param name The name of this curve. * @param referenceDate The reference date for this curve, i.e., the date which defined t=0. */ private Curve(String name, LocalDate referenceDate) { super(name, referenceDate); } @Override public double getValue(double time) { return getValue(null, time); } @Override public double getValue(AnalyticModelInterface model, double time) { Map<Double, Double> curveCache = curveCacheReference != null ? curveCacheReference.get() : null; if(curveCache == null) { curveCache = new ConcurrentHashMap<Double, Double>(); curveCacheReference = new SoftReference<Map<Double,Double>>(curveCache); } Double valueFromCache = curveCache.get(time); if(valueFromCache != null) return valueFromCache.doubleValue(); double value = valueFromInterpolationEntity(getInterpolationEntityValue(time), time); curveCache.put(time, value); return value; } private double getInterpolationEntityValue(double time) { synchronized(rationalFunctionInterpolationLazyInitLock) { // Lazy initialization of interpolation function if(rationalFunctionInterpolation == null) { double[] pointsArray = new double[points.size()]; double[] valuesArray = new double[points.size()]; for(int i=0; i<points.size(); i++) { pointsArray[i] = points.get(i).time; valuesArray[i] = points.get(i).value; } rationalFunctionInterpolation = new RationalFunctionInterpolation( pointsArray, valuesArray, RationalFunctionInterpolation.InterpolationMethod.valueOf(this.interpolationMethod.toString()), RationalFunctionInterpolation.ExtrapolationMethod.valueOf(this.extrapolationMethod.toString()) ); } } return rationalFunctionInterpolation.getValue(time); } /** * Add a point to this curve. The method will throw an exception if the point * is already part of the curve. * * @param time The x<sub>i</sub> in <sub>i</sub> = f(x<sub>i</sub>). * @param value The y<sub>i</sub> in <sub>i</sub> = f(x<sub>i</sub>). * @param isParameter If true, then this point is served via {@link #getParameter()} and changed via {@link #getCloneForParameter(double[])}, i.e., it can be calibrated. */ protected void addPoint(double time, double value, boolean isParameter) { synchronized (rationalFunctionInterpolationLazyInitLock) { if(interpolationEntity == InterpolationEntity.LOG_OF_VALUE_PER_TIME && time == 0) { if(value == 1.0 && isParameter == false) return; else throw new IllegalArgumentException("The interpolation method LOG_OF_VALUE_PER_TIME does not allow to add a value at time = 0 other than 1.0 (received " + value + ")."); } double interpolationEntityValue = interpolationEntityFromValue(value, time); int index = getTimeIndex(time); if(index >= 0) { if(points.get(index).value == interpolationEntityValue) return; // Already in list else if(isParameter) return; else throw new RuntimeException("Trying to add a value for a time for which another value already exists."); } else { // Insert the new point, retain ordering. Point point = new Point(time, interpolationEntityValue, isParameter); points.add(-index-1, point); if(isParameter) { // Add this point also to the list of parameters int parameterIndex = getParameterIndex(time); if(parameterIndex >= 0) new RuntimeException("Curve inconsistent."); pointsBeingParameters.add(-parameterIndex-1, point); } } this.rationalFunctionInterpolation = null; this.curveCacheReference = null; } } /** * Returns the interpolation method used by this curve. * * @return The interpolation method used by this curve. */ public InterpolationMethod getInterpolationMethod() { return interpolationMethod; } /** * Returns the extrapolation method used by this curve. * * @return The extrapolation method used by this curve. */ public ExtrapolationMethod getExtrapolationMethod() { return extrapolationMethod; } /** * Returns the interpolation entity used by this curve. * * @return The interpolation entity used by this curve. */ public InterpolationEntity getInterpolationEntity() { return interpolationEntity; } protected int getTimeIndex(double time) { Point point = new Point(time, Double.NaN, false); return java.util.Collections.binarySearch(points, point); } protected int getParameterIndex(double time) { Point point = new Point(time, Double.NaN, false); return java.util.Collections.binarySearch(pointsBeingParameters, point); } @Override public double[] getParameter() { double[] parameters = new double[pointsBeingParameters.size()]; for(int i=0; i<pointsBeingParameters.size(); i++) { parameters[i] = valueFromInterpolationEntity(pointsBeingParameters.get(i).value, pointsBeingParameters.get(i).time); } return parameters; } @Override public void setParameter(double[] parameter) { throw new UnsupportedOperationException("This class is immutable. Use getCloneForParameter(double[]) instead."); } private void setParameterPrivate(double[] parameter) { for(int i=0; i<pointsBeingParameters.size(); i++) { pointsBeingParameters.get(i).value = interpolationEntityFromValue(parameter[i], pointsBeingParameters.get(i).time); } this.rationalFunctionInterpolation = null; this.curveCacheReference = null; } private double interpolationEntityFromValue(double value, double time) { switch(interpolationEntity) { case VALUE: default: return value; case LOG_OF_VALUE: return Math.log(Math.max(value,0)); case LOG_OF_VALUE_PER_TIME: if(time == 0) throw new IllegalArgumentException("The interpolation method LOG_OF_VALUE_PER_TIME does not allow to add a value at time = 0."); else return Math.log(Math.max(value,0)) / time; } } private double valueFromInterpolationEntity(double interpolationEntityValue, double time) { switch(interpolationEntity) { case VALUE: default: return interpolationEntityValue; case LOG_OF_VALUE: return Math.exp(interpolationEntityValue); case LOG_OF_VALUE_PER_TIME: return Math.exp(interpolationEntityValue * time); } } @Override public Curve clone() throws CloneNotSupportedException { Curve newCurve = (Curve) super.clone(); newCurve.points = new ArrayList<Point>(); newCurve.pointsBeingParameters = new ArrayList<Point>(); newCurve.rationalFunctionInterpolation = null; newCurve.curveCacheReference = null; for(Point point : points) { Point newPoint = (Point) point.clone(); newCurve.points.add(newPoint); if(point.isParameter) newCurve.pointsBeingParameters.add(newPoint); } return newCurve; } @Override public CurveInterface getCloneForParameter(double[] parameter) throws CloneNotSupportedException { if(Arrays.equals(parameter, getParameter())) return this; Curve newCurve = (Curve) this.clone(); newCurve.setParameterPrivate(parameter); return newCurve; } @Override public CurveBuilderInterface getCloneBuilder() throws CloneNotSupportedException { CurveBuilder curveBuilder = new CurveBuilder(this); return curveBuilder; } @Override public String toString() { /* * Pretty print curve (appended to standard toString) */ StringBuilder curveTableString = new StringBuilder(); NumberFormat formatTime = new DecimalFormat("0.00000000E0"); // Floating point time is accurate to 3+5 digits. for (Point point : points) { curveTableString.append(formatTime.format(point.time) + "\t"); curveTableString.append(FloatingpointDate.getDateFromFloatingPointDate(getReferenceDate(), point.time) + "\t"); curveTableString.append(valueFromInterpolationEntity(point.value, point.time) + "\n"); } return "Curve [points=" + points + ", pointsBeingParameters=" + pointsBeingParameters + ", interpolationMethod=" + interpolationMethod + ", extrapolationMethod=" + extrapolationMethod + ", interpolationEntity=" + interpolationEntity + ", rationalFunctionInterpolation=" + rationalFunctionInterpolation + ", toString()=" + super.toString() + ",\n" + curveTableString + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.tools; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.FileDialog; import java.awt.Toolkit; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.WindowEvent; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; import java.net.URL; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.KeyStroke; import javax.swing.TransferHandler; import javax.swing.UIManager; import javax.swing.border.BevelBorder; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.filechooser.FileFilter; import javax.swing.tree.TreePath; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSBoolean; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSFloat; import org.apache.pdfbox.cos.COSInteger; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSNull; import org.apache.pdfbox.cos.COSObject; import org.apache.pdfbox.cos.COSStream; import org.apache.pdfbox.cos.COSString; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.tools.gui.ArrayEntry; import org.apache.pdfbox.tools.gui.DocumentEntry; import org.apache.pdfbox.tools.gui.MapEntry; import org.apache.pdfbox.tools.gui.OSXAdapter; import org.apache.pdfbox.tools.gui.PDFTreeCellRenderer; import org.apache.pdfbox.tools.gui.PDFTreeModel; import org.apache.pdfbox.tools.gui.PageEntry; import org.apache.pdfbox.tools.pdfdebugger.colorpane.CSArrayBased; import org.apache.pdfbox.tools.pdfdebugger.colorpane.CSDeviceN; import org.apache.pdfbox.tools.pdfdebugger.colorpane.CSIndexed; import org.apache.pdfbox.tools.pdfdebugger.colorpane.CSSeparation; import org.apache.pdfbox.tools.pdfdebugger.flagbitspane.FlagBitsPane; import org.apache.pdfbox.tools.pdfdebugger.fontencodingpane.FontEncodingPaneController; import org.apache.pdfbox.tools.pdfdebugger.pagepane.PagePane; import org.apache.pdfbox.tools.pdfdebugger.streampane.StreamPane; import org.apache.pdfbox.tools.pdfdebugger.treestatus.TreeStatus; import org.apache.pdfbox.tools.pdfdebugger.treestatus.TreeStatusPane; import org.apache.pdfbox.tools.pdfdebugger.ui.RotationMenu; import org.apache.pdfbox.tools.pdfdebugger.ui.Tree; import org.apache.pdfbox.tools.pdfdebugger.ui.ZoomMenu; import org.apache.pdfbox.tools.util.FileOpenSaveDialog; import org.apache.pdfbox.tools.util.RecentFiles; /** * PDF Debugger. * * @author wurtz * @author Ben Litchfield */ public class PDFDebugger extends JFrame { private static final Set<COSName> SPECIALCOLORSPACES = new HashSet<COSName>(Arrays.asList(COSName.INDEXED, COSName.SEPARATION, COSName.DEVICEN)); private static final Set<COSName> OTHERCOLORSPACES = new HashSet<COSName>(Arrays.asList(COSName.ICCBASED, COSName.PATTERN, COSName.CALGRAY, COSName.CALRGB, COSName.LAB)); private static final String PASSWORD = "-password"; private static final int SHORCUT_KEY_MASK = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(); private TreeStatusPane statusPane; private RecentFiles recentFiles; private boolean isPageMode; private PDDocument document; private String currentFilePath; private static final String OS_NAME = System.getProperty("os.name").toLowerCase(); private static final boolean IS_MAC_OS = OS_NAME.startsWith("mac os x"); private JScrollPane jScrollPane1; private JScrollPane jScrollPane2; private javax.swing.JSplitPane jSplitPane1; private javax.swing.JTextPane jTextPane1; private Tree tree; private final JPanel documentPanel = new JPanel(); private javax.swing.JMenuBar menuBar; // file menu private JMenu fileMenu; private JMenuItem openMenuItem; private JMenuItem openUrlMenuItem; private JMenuItem saveAsMenuItem; private JMenuItem saveMenuItem; private JMenu recentFilesMenu; private JMenuItem exitMenuItem; // edit menu private JMenu editMenu; private JMenuItem copyMenuItem; private JMenuItem pasteMenuItem; private JMenuItem cutMenuItem; private JMenuItem deleteMenuItem; // edit > find meu private JMenu findMenu; private JMenuItem findMenuItem; private JMenuItem findNextMenuItem; private JMenuItem findPreviousMenuItem; // view menu private JMenu viewMenu; private JMenuItem viewModeItem; /** * Constructor. */ public PDFDebugger() { initComponents(); } /** * This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ private void initComponents() { jSplitPane1 = new javax.swing.JSplitPane(); jScrollPane1 = new JScrollPane(); tree = new Tree(this); jScrollPane2 = new JScrollPane(); jTextPane1 = new javax.swing.JTextPane(); menuBar = new javax.swing.JMenuBar(); tree.setCellRenderer(new PDFTreeCellRenderer()); tree.setModel(null); setTitle("PDFBox Debugger"); addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowOpened(WindowEvent windowEvent) { tree.requestFocusInWindow(); super.windowOpened(windowEvent); } @Override public void windowClosing(WindowEvent evt) { exitForm(evt); } }); jScrollPane1.setBorder(new BevelBorder(BevelBorder.RAISED)); jScrollPane1.setPreferredSize(new Dimension(300, 500)); tree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent evt) { jTree1ValueChanged(evt); } }); jScrollPane1.setViewportView(tree); jSplitPane1.setRightComponent(jScrollPane2); jSplitPane1.setDividerSize(3); jScrollPane2.setPreferredSize(new Dimension(300, 500)); jScrollPane2.setViewportView(jTextPane1); jSplitPane1.setLeftComponent(jScrollPane1); JScrollPane documentScroller = new JScrollPane(); documentScroller.setViewportView(documentPanel); statusPane = new TreeStatusPane(tree); statusPane.getPanel().setBorder(new BevelBorder(BevelBorder.RAISED)); statusPane.getPanel().setPreferredSize(new Dimension(300, 25)); getContentPane().add(statusPane.getPanel(), BorderLayout.PAGE_START); getContentPane().add(jSplitPane1, BorderLayout.CENTER); // create menus menuBar.add(createFileMenu()); menuBar.add(createEditMenu()); menuBar.add(createViewMenu()); setJMenuBar(menuBar); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); setBounds((screenSize.width-700)/2, (screenSize.height-600)/2, 700, 600); // drag and drop to open files setTransferHandler(new TransferHandler() { @Override public boolean canImport(TransferSupport transferSupport) { return transferSupport.isDataFlavorSupported(DataFlavor.javaFileListFlavor); } @Override @SuppressWarnings("unchecked") public boolean importData(TransferSupport transferSupport) { try { Transferable transferable = transferSupport.getTransferable(); List<File> files = (List<File>) transferable.getTransferData( DataFlavor.javaFileListFlavor); readPDFFile(files.get(0), ""); return true; } catch (IOException e) { throw new RuntimeException(e); } catch (UnsupportedFlavorException e) { throw new RuntimeException(e); } } }); // Mac OS X file open/quit handler if (IS_MAC_OS) { try { Method osxOpenFiles = getClass().getDeclaredMethod("osxOpenFiles", String.class); osxOpenFiles.setAccessible(true); OSXAdapter.setFileHandler(this, osxOpenFiles); Method osxQuit = getClass().getDeclaredMethod("osxQuit"); osxQuit.setAccessible(true); OSXAdapter.setQuitHandler(this, osxQuit); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } } } private JMenu createFileMenu() { fileMenu = new JMenu(); fileMenu.setText("File"); openMenuItem = new JMenuItem(); openMenuItem.setText("Open..."); openMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_O, SHORCUT_KEY_MASK)); openMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { openMenuItemActionPerformed(evt); } }); fileMenu.add(openMenuItem); openUrlMenuItem = new JMenuItem(); openUrlMenuItem.setText("Open URL..."); openUrlMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_U, SHORCUT_KEY_MASK)); openUrlMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { String urlString = JOptionPane.showInputDialog("Enter an URL"); try { readPDFurl(urlString, ""); } catch (IOException e) { throw new RuntimeException(e); } } }); fileMenu.add(openUrlMenuItem); try { recentFiles = new RecentFiles(this.getClass(), 5); } catch (Exception e) { throw new RuntimeException(e); } recentFilesMenu = new JMenu(); recentFilesMenu.setText("Open Recent"); recentFilesMenu.setEnabled(false); addRecentFileItems(); fileMenu.add(recentFilesMenu); exitMenuItem = new JMenuItem(); exitMenuItem.setText("Exit"); exitMenuItem.setAccelerator(KeyStroke.getKeyStroke("alt F4")); exitMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { exitMenuItemActionPerformed(evt); } }); if (!IS_MAC_OS) { fileMenu.addSeparator(); fileMenu.add(exitMenuItem); } return fileMenu; } private JMenu createEditMenu() { editMenu = new JMenu(); editMenu.setText("Edit"); cutMenuItem = new JMenuItem(); cutMenuItem.setText("Cut"); cutMenuItem.setEnabled(false); editMenu.add(cutMenuItem); copyMenuItem = new JMenuItem(); copyMenuItem.setText("Copy"); copyMenuItem.setEnabled(false); editMenu.add(copyMenuItem); pasteMenuItem = new JMenuItem(); pasteMenuItem.setText("Paste"); pasteMenuItem.setEnabled(false); editMenu.add(pasteMenuItem); deleteMenuItem = new JMenuItem(); deleteMenuItem.setText("Delete"); deleteMenuItem.setEnabled(false); editMenu.add(deleteMenuItem); editMenu.addSeparator(); editMenu.add(createFindMenu()); return editMenu; } private JMenu createViewMenu() { viewMenu = new JMenu(); viewMenu.setText("View"); viewModeItem = new JMenuItem(); viewModeItem.setText("Show Pages"); viewModeItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent actionEvent) { if (isPageMode) { viewModeItem.setText("Show Pages"); isPageMode = false; } else { viewModeItem.setText("Show Internal Structure"); isPageMode = true; } if (document != null) { initTree(); } } }); viewMenu.add(viewModeItem); ZoomMenu zoomMenu = ZoomMenu.getInstance(); zoomMenu.setEnableMenu(false); viewMenu.add(zoomMenu.getMenu()); RotationMenu rotationMenu = RotationMenu.getInstance(); rotationMenu.setEnableMenu(false); viewMenu.add(rotationMenu.getMenu()); return viewMenu; } private JMenu createFindMenu() { findMenu = new JMenu("Find"); findMenu.setEnabled(false); findMenuItem = new JMenuItem(); findMenuItem.setActionCommand("find"); findMenuItem.setText("Find..."); findMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, SHORCUT_KEY_MASK)); findNextMenuItem = new JMenuItem(); findNextMenuItem.setText("Find Next"); if (IS_MAC_OS) { findNextMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_G, SHORCUT_KEY_MASK)); } else { findNextMenuItem.setAccelerator(KeyStroke.getKeyStroke("F3")); } findPreviousMenuItem = new JMenuItem(); findPreviousMenuItem.setText("Find Previous"); if (IS_MAC_OS) { findPreviousMenuItem.setAccelerator(KeyStroke.getKeyStroke( KeyEvent.VK_G, SHORCUT_KEY_MASK | InputEvent.SHIFT_DOWN_MASK)); } else { findPreviousMenuItem.setAccelerator(KeyStroke.getKeyStroke( KeyEvent.VK_F3, InputEvent.SHIFT_DOWN_MASK)); } findMenu.add(findMenuItem); findMenu.add(findNextMenuItem); findMenu.add(findPreviousMenuItem); return findMenu; } /** * Returns the File menu. */ public JMenu getFindMenu() { return findMenu; } /** * Returns the Edit > Find > Find menu item. */ public JMenuItem getFindMenuItem() { return findMenuItem; } /** * Returns the Edit > Find > Find Next menu item. */ public JMenuItem getFindNextMenuItem() { return findNextMenuItem; } /** * Returns the Edit > Find > Find Previous menu item. */ public JMenuItem getFindPreviousMenuItem() { return findPreviousMenuItem; } /** * This method is called via reflection on Mac OS X. */ private void osxOpenFiles(String filename) { try { readPDFFile(filename, ""); } catch (IOException e) { throw new RuntimeException(e); } } /** * This method is called via reflection on Mac OS X. */ private void osxQuit() { exitMenuItemActionPerformed(null); } private void openMenuItemActionPerformed(ActionEvent evt) { try { if (IS_MAC_OS) { FileDialog openDialog = new FileDialog(this, "Open"); openDialog.setFilenameFilter(new FilenameFilter() { @Override public boolean accept(File file, String s) { return file.getName().toLowerCase().endsWith(".pdf"); } }); openDialog.setVisible(true); if (openDialog.getFile() != null) { readPDFFile(openDialog.getFile(), ""); } } else { String[] extensions = new String[] {"pdf", "PDF"}; FileFilter pdfFilter = new ExtensionFileFilter(extensions, "PDF Files (*.pdf)"); FileOpenSaveDialog openDialog = new FileOpenSaveDialog(this, pdfFilter); File file = openDialog.openFile(); if (file != null) { readPDFFile(file, ""); } } } catch (IOException e) { throw new RuntimeException(e); } } private void jTree1ValueChanged(TreeSelectionEvent evt) { TreePath path = tree.getSelectionPath(); if (path != null) { try { Object selectedNode = path.getLastPathComponent(); if (isPage(selectedNode)) { showPage(selectedNode); return; } if (isSpecialColorSpace(selectedNode) || isOtherColorSpace(selectedNode)) { showColorPane(selectedNode); return; } if (path.getParentPath() != null && isFlagNode(selectedNode, path.getParentPath().getLastPathComponent())) { Object parentNode = path.getParentPath().getLastPathComponent(); showFlagPane(parentNode, selectedNode); return; } if (isStream(selectedNode)) { showStream((COSStream)getUnderneathObject(selectedNode), path); return; } if (isFont(selectedNode)) { showFont(selectedNode, path); return; } if (!jSplitPane1.getRightComponent().equals(jScrollPane2)) { jSplitPane1.setRightComponent(jScrollPane2); } jTextPane1.setText(convertToString(selectedNode)); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } } private boolean isSpecialColorSpace(Object selectedNode) { selectedNode = getUnderneathObject(selectedNode); if (selectedNode instanceof COSArray && ((COSArray) selectedNode).size() > 0) { COSBase arrayEntry = ((COSArray)selectedNode).get(0); if (arrayEntry instanceof COSName) { COSName name = (COSName) arrayEntry; return SPECIALCOLORSPACES.contains(name); } } return false; } private boolean isOtherColorSpace(Object selectedNode) { selectedNode = getUnderneathObject(selectedNode); if (selectedNode instanceof COSArray && ((COSArray) selectedNode).size() > 0) { COSBase arrayEntry = ((COSArray)selectedNode).get(0); if (arrayEntry instanceof COSName) { COSName name = (COSName) arrayEntry; return OTHERCOLORSPACES.contains(name); } } return false; } private boolean isPage(Object selectedNode) { selectedNode = getUnderneathObject(selectedNode); if (selectedNode instanceof COSDictionary) { COSDictionary dict = (COSDictionary) selectedNode; COSBase typeItem = dict.getItem(COSName.TYPE); if (COSName.PAGE.equals(typeItem)) { return true; } } else if (selectedNode instanceof PageEntry) { return true; } return false; } private boolean isFlagNode(Object selectedNode, Object parentNode) { if (selectedNode instanceof MapEntry) { Object key = ((MapEntry) selectedNode).getKey(); return (COSName.FLAGS.equals(key) && isFontDescriptor(parentNode)) || (COSName.F.equals(key) && isAnnot(parentNode)) || COSName.FF.equals(key) || COSName.PANOSE.equals(key); } return false; } private boolean isFontDescriptor(Object obj) { Object underneathObject = getUnderneathObject(obj); return underneathObject instanceof COSDictionary && ((COSDictionary) underneathObject).containsKey(COSName.TYPE) && ((COSDictionary) underneathObject).getCOSName(COSName.TYPE).equals(COSName.FONT_DESC); } private boolean isAnnot(Object obj) { Object underneathObject = getUnderneathObject(obj); return underneathObject instanceof COSDictionary && ((COSDictionary) underneathObject).containsKey(COSName.TYPE) && ((COSDictionary) underneathObject).getCOSName(COSName.TYPE).equals(COSName.ANNOT); } private boolean isStream(Object selectedNode) { return getUnderneathObject(selectedNode) instanceof COSStream; } private boolean isFont(Object selectedNode) { selectedNode = getUnderneathObject(selectedNode); if (selectedNode instanceof COSDictionary) { COSDictionary dic = (COSDictionary)selectedNode; return dic.containsKey(COSName.TYPE) && dic.getCOSName(COSName.TYPE).equals(COSName.FONT) && !isCIDFont(dic); } return false; } private boolean isCIDFont(COSDictionary dic) { return dic.containsKey(COSName.SUBTYPE) && (dic.getCOSName(COSName.SUBTYPE).equals(COSName.CID_FONT_TYPE0) || dic.getCOSName(COSName.SUBTYPE).equals(COSName.CID_FONT_TYPE2)); } /** * Show a Panel describing color spaces in more detail and interactive way. * @param csNode the special color space containing node. */ private void showColorPane(Object csNode) { csNode = getUnderneathObject(csNode); if (csNode instanceof COSArray && ((COSArray) csNode).size() > 0) { COSArray array = (COSArray)csNode; COSBase arrayEntry = array.get(0); if (arrayEntry instanceof COSName) { COSName csName = (COSName) arrayEntry; if (csName.equals(COSName.SEPARATION)) { jSplitPane1.setRightComponent(new CSSeparation(array).getPanel()); } else if (csName.equals(COSName.DEVICEN)) { jSplitPane1.setRightComponent(new CSDeviceN(array).getPanel()); } else if (csName.equals(COSName.INDEXED)) { jSplitPane1.setRightComponent(new CSIndexed(array).getPanel()); } else if (OTHERCOLORSPACES.contains(csName)) { jSplitPane1.setRightComponent(new CSArrayBased(array).getPanel()); } } } } private void showPage(Object selectedNode) { selectedNode = getUnderneathObject(selectedNode); COSDictionary page; if (selectedNode instanceof COSDictionary) { page = (COSDictionary) selectedNode; } else { page = ((PageEntry) selectedNode).getDict(); } COSBase typeItem = page.getItem(COSName.TYPE); if (COSName.PAGE.equals(typeItem)) { PagePane pagePane = new PagePane(document, page); jSplitPane1.setRightComponent(new JScrollPane(pagePane.getPanel())); } } private void showFlagPane(Object parentNode, Object selectedNode) { parentNode = getUnderneathObject(parentNode); if (parentNode instanceof COSDictionary) { selectedNode = ((MapEntry)selectedNode).getKey(); selectedNode = getUnderneathObject(selectedNode); FlagBitsPane flagBitsPane = new FlagBitsPane((COSDictionary) parentNode, (COSName) selectedNode); jSplitPane1.setRightComponent(flagBitsPane.getPane()); } } private void showStream(COSStream stream, TreePath path) { boolean isContentStream = false; boolean isThumb = false; COSName key = getNodeKey(path.getLastPathComponent()); COSName parentKey = getNodeKey(path.getParentPath().getLastPathComponent()); COSDictionary resourcesDic = null; if (COSName.CONTENTS.equals(key)) { Object pageObj = path.getParentPath().getLastPathComponent(); COSDictionary page = (COSDictionary) getUnderneathObject(pageObj); resourcesDic = (COSDictionary) page.getDictionaryObject(COSName.RESOURCES); isContentStream = true; } else if (COSName.CONTENTS.equals(parentKey) || COSName.CHAR_PROCS.equals(parentKey)) { Object pageObj = path.getParentPath().getParentPath().getLastPathComponent(); COSDictionary page = (COSDictionary) getUnderneathObject(pageObj); resourcesDic = (COSDictionary) page.getDictionaryObject(COSName.RESOURCES); isContentStream = true; } else if (COSName.FORM.equals(stream.getCOSName(COSName.SUBTYPE)) || COSName.PATTERN.equals(stream.getCOSName(COSName.TYPE))) { if (stream.containsKey(COSName.RESOURCES)) { resourcesDic = (COSDictionary) stream.getDictionaryObject(COSName.RESOURCES); } isContentStream = true; } else if (COSName.IMAGE.equals((stream).getCOSName(COSName.SUBTYPE))) { Object resourcesObj = path.getParentPath().getParentPath().getLastPathComponent(); resourcesDic = (COSDictionary) getUnderneathObject(resourcesObj); } else if (COSName.THUMB.equals(key)) { resourcesDic = null; isThumb = true; } StreamPane streamPane = new StreamPane(stream, isContentStream, isThumb, resourcesDic); jSplitPane1.setRightComponent(streamPane.getPanel()); } private void showFont(Object selectedNode, TreePath path) { COSName fontName = getNodeKey(selectedNode); COSDictionary resourceDic = (COSDictionary) getUnderneathObject(path.getParentPath().getParentPath().getLastPathComponent()); FontEncodingPaneController fontEncodingPaneController = new FontEncodingPaneController(fontName, resourceDic); jSplitPane1.setRightComponent(fontEncodingPaneController.getPane()); } private COSName getNodeKey(Object selectedNode) { if (selectedNode instanceof MapEntry) { return ((MapEntry) selectedNode).getKey(); } return null; } private Object getUnderneathObject(Object selectedNode) { if (selectedNode instanceof MapEntry) { selectedNode = ((MapEntry) selectedNode).getValue(); } else if (selectedNode instanceof ArrayEntry) { selectedNode = ((ArrayEntry) selectedNode).getValue(); } else if (selectedNode instanceof PageEntry) { selectedNode = ((PageEntry) selectedNode).getDict(); } if (selectedNode instanceof COSObject) { selectedNode = ((COSObject) selectedNode).getObject(); } return selectedNode; } private String convertToString( Object selectedNode ) { String data = null; if(selectedNode instanceof COSBoolean) { data = "" + ((COSBoolean)selectedNode).getValue(); } else if( selectedNode instanceof COSFloat ) { data = "" + ((COSFloat)selectedNode).floatValue(); } else if( selectedNode instanceof COSNull ) { data = "null"; } else if( selectedNode instanceof COSInteger ) { data = "" + ((COSInteger)selectedNode).intValue(); } else if( selectedNode instanceof COSName ) { data = "" + ((COSName)selectedNode).getName(); } else if( selectedNode instanceof COSString ) { String text = ((COSString) selectedNode).getString(); // display unprintable strings as hex for (char c : text.toCharArray()) { if (Character.isISOControl(c)) { text = "<" + ((COSString) selectedNode).toHexString() + ">"; break; } } data = "" + text; } else if( selectedNode instanceof COSStream ) { try { COSStream stream = (COSStream)selectedNode; InputStream ioStream = stream.getUnfilteredStream(); ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int amountRead; while( (amountRead = ioStream.read( buffer, 0, buffer.length ) ) != -1 ) { byteArray.write( buffer, 0, amountRead ); } data = byteArray.toString(); } catch( IOException e ) { throw new RuntimeException(e); } } else if( selectedNode instanceof MapEntry ) { data = convertToString( ((MapEntry)selectedNode).getValue() ); } else if( selectedNode instanceof ArrayEntry ) { data = convertToString( ((ArrayEntry)selectedNode).getValue() ); } return data; } private void exitMenuItemActionPerformed(ActionEvent evt) { if( document != null ) { try { document.close(); if (!currentFilePath.startsWith("http")) { recentFiles.addFile(currentFilePath); } recentFiles.close(); } catch( IOException e ) { throw new RuntimeException(e); } } System.exit(0); } /** * Exit the Application. */ private void exitForm(WindowEvent evt) { if( document != null ) { try { document.close(); if (!currentFilePath.startsWith("http")) { recentFiles.addFile(currentFilePath); } recentFiles.close(); } catch( IOException e ) { throw new RuntimeException(e); } } System.exit(0); } /** * Entry point. * * @param args the command line arguments * @throws Exception If anything goes wrong. */ public static void main(String[] args) throws Exception { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); System.setProperty("apple.laf.useScreenMenuBar", "true"); // handle uncaught exceptions Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable throwable) { StringBuilder sb = new StringBuilder(); sb.append(throwable.toString()); for (StackTraceElement element : throwable.getStackTrace()) { sb.append('\n'); sb.append(element); } JOptionPane.showMessageDialog(null, "Error: " + sb.toString(), "Error", JOptionPane.ERROR_MESSAGE); } }); final PDFDebugger viewer = new PDFDebugger(); // open file, if any String filename = null; String password = ""; for( int i = 0; i < args.length; i++ ) { if( args[i].equals( PASSWORD ) ) { i++; if( i >= args.length ) { usage(); } password = args[i]; } else { filename = args[i]; } } if (filename != null) { File file = new File(filename); if (file.exists()) { viewer.readPDFFile( filename, password ); } } viewer.setVisible(true); } private void readPDFFile(String filePath, String password) throws IOException { File file = new File(filePath); readPDFFile(file, password); } private void readPDFFile(File file, String password) throws IOException { if( document != null ) { document.close(); if (!currentFilePath.startsWith("http")) { recentFiles.addFile(currentFilePath); } } currentFilePath = file.getPath(); recentFiles.removeFile(file.getPath()); parseDocument( file, password ); initTree(); if (IS_MAC_OS) { setTitle(file.getName()); getRootPane().putClientProperty("Window.documentFile", file); } else { setTitle("PDF Debugger - " + file.getAbsolutePath()); } addRecentFileItems(); } private void readPDFurl(String urlString, String password) throws IOException { if (document != null) { document.close(); if (!currentFilePath.startsWith("http")) { recentFiles.addFile(currentFilePath); } } currentFilePath = urlString; URL url = new URL(urlString); document = PDDocument.load(url.openStream(), password); initTree(); if (IS_MAC_OS) { setTitle(urlString); } else { setTitle("PDF Debugger - " + urlString); } addRecentFileItems(); } private void initTree() { TreeStatus treeStatus = new TreeStatus(document.getDocument().getTrailer()); statusPane.updateTreeStatus(treeStatus); if (isPageMode) { File file = new File(currentFilePath); DocumentEntry documentEntry = new DocumentEntry(document, file.getName()); tree.setModel(new PDFTreeModel(documentEntry)); // Root/Pages/Kids/[0] is not always the first page, so use the first row instead: tree.setSelectionPath(tree.getPathForRow(1)); } else { tree.setModel(new PDFTreeModel(document)); tree.setSelectionPath(treeStatus.getPathForString("Root")); } } /** * This will parse a document. * * @param file The file addressing the document. * * @throws IOException If there is an error parsing the document. */ private void parseDocument( File file, String password )throws IOException { document = PDDocument.load(file, password); } private void addRecentFileItems() { Action recentMenuAction = new AbstractAction() { @Override public void actionPerformed(ActionEvent actionEvent) { String filePath = (String) ((JComponent) actionEvent.getSource()).getClientProperty("path"); try { readPDFFile(filePath, ""); } catch (Exception e) { throw new RuntimeException(e); } } }; if (!recentFiles.isEmpty()) { recentFilesMenu.removeAll(); List<String> files = recentFiles.getFiles(); for (int i = files.size() - 1; i >= 0; i--) { String path = files.get(i); String name = new File(path).getName(); JMenuItem recentFileMenuItem = new JMenuItem(name); recentFileMenuItem.putClientProperty("path", path); recentFileMenuItem.addActionListener(recentMenuAction); recentFilesMenu.add(recentFileMenuItem); } recentFilesMenu.setEnabled(true); } } /** * This will print out a message telling how to use this utility. */ private static void usage() { System.err.println( "usage: java -jar pdfbox-app-x.y.z.jar PDFDebugger [OPTIONS] <input-file>\n" + " -password <password> Password to decrypt the document\n" + " <input-file> The PDF document to be loaded\n" ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; import org.apache.hadoop.yarn.util.resource.Resources; /** * Resource Usage by Labels for following fields by label - AM resource (to * enforce max-am-resource-by-label after YARN-2637) - Used resource (includes * AM resource usage) - Reserved resource - Pending resource - Headroom * * This class can be used to track resource usage in queue/user/app. * * And it is thread-safe */ public class ResourceUsage { private ReadLock readLock; private WriteLock writeLock; private Map<String, UsageByLabel> usages; // short for no-label :) private static final String NL = CommonNodeLabelsManager.NO_LABEL; private final UsageByLabel usageNoLabel; public ResourceUsage() { ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); writeLock = lock.writeLock(); usages = new HashMap<String, UsageByLabel>(); usageNoLabel = new UsageByLabel(NL); usages.put(NL, usageNoLabel); } // Usage enum here to make implement cleaner private enum ResourceType { //CACHED_USED and CACHED_PENDING may be read by anyone, but must only //be written by ordering policies USED(0), PENDING(1), AMUSED(2), RESERVED(3), CACHED_USED(4), CACHED_PENDING(5), AMLIMIT(6); private int idx; private ResourceType(int value) { this.idx = value; } } private static class UsageByLabel { // usage by label, contains all UsageType private Resource[] resArr; public UsageByLabel(String label) { resArr = new Resource[ResourceType.values().length]; for (int i = 0; i < resArr.length; i++) { resArr[i] = Resource.newInstance(0, 0); }; } public Resource getUsed() { return resArr[ResourceType.USED.idx]; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{used=" + resArr[0] + "%, "); sb.append("pending=" + resArr[1] + "%, "); sb.append("am_used=" + resArr[2] + "%, "); sb.append("reserved=" + resArr[3] + "%}"); sb.append("am_limit=" + resArr[6] + "%, "); return sb.toString(); } } /* * Used */ public Resource getUsed() { return getUsed(NL); } public Resource getUsed(String label) { return _get(label, ResourceType.USED); } public Resource getCachedUsed() { return _get(NL, ResourceType.CACHED_USED); } public Resource getCachedUsed(String label) { return _get(label, ResourceType.CACHED_USED); } public Resource getCachedPending() { return _get(NL, ResourceType.CACHED_PENDING); } public Resource getCachedPending(String label) { return _get(label, ResourceType.CACHED_PENDING); } public void incUsed(String label, Resource res) { _inc(label, ResourceType.USED, res); } public void incUsed(Resource res) { incUsed(NL, res); } public void decUsed(Resource res) { decUsed(NL, res); } public void decUsed(String label, Resource res) { _dec(label, ResourceType.USED, res); } public void setUsed(Resource res) { setUsed(NL, res); } public void copyAllUsed(ResourceUsage other) { try { writeLock.lock(); for (Entry<String, UsageByLabel> entry : other.usages.entrySet()) { setUsed(entry.getKey(), Resources.clone(entry.getValue().getUsed())); } } finally { writeLock.unlock(); } } public void setUsed(String label, Resource res) { _set(label, ResourceType.USED, res); } public void setCachedUsed(String label, Resource res) { _set(label, ResourceType.CACHED_USED, res); } public void setCachedUsed(Resource res) { _set(NL, ResourceType.CACHED_USED, res); } public void setCachedPending(String label, Resource res) { _set(label, ResourceType.CACHED_PENDING, res); } public void setCachedPending(Resource res) { _set(NL, ResourceType.CACHED_PENDING, res); } /* * Pending */ public Resource getPending() { return getPending(NL); } public Resource getPending(String label) { return _get(label, ResourceType.PENDING); } public void incPending(String label, Resource res) { _inc(label, ResourceType.PENDING, res); } public void incPending(Resource res) { incPending(NL, res); } public void decPending(Resource res) { decPending(NL, res); } public void decPending(String label, Resource res) { _dec(label, ResourceType.PENDING, res); } public void setPending(Resource res) { setPending(NL, res); } public void setPending(String label, Resource res) { _set(label, ResourceType.PENDING, res); } /* * Reserved */ public Resource getReserved() { return getReserved(NL); } public Resource getReserved(String label) { return _get(label, ResourceType.RESERVED); } public void incReserved(String label, Resource res) { _inc(label, ResourceType.RESERVED, res); } public void incReserved(Resource res) { incReserved(NL, res); } public void decReserved(Resource res) { decReserved(NL, res); } public void decReserved(String label, Resource res) { _dec(label, ResourceType.RESERVED, res); } public void setReserved(Resource res) { setReserved(NL, res); } public void setReserved(String label, Resource res) { _set(label, ResourceType.RESERVED, res); } /* * AM-Used */ public Resource getAMUsed() { return getAMUsed(NL); } public Resource getAMUsed(String label) { return _get(label, ResourceType.AMUSED); } public void incAMUsed(String label, Resource res) { _inc(label, ResourceType.AMUSED, res); } public void incAMUsed(Resource res) { incAMUsed(NL, res); } public void decAMUsed(Resource res) { decAMUsed(NL, res); } public void decAMUsed(String label, Resource res) { _dec(label, ResourceType.AMUSED, res); } public void setAMUsed(Resource res) { setAMUsed(NL, res); } public void setAMUsed(String label, Resource res) { _set(label, ResourceType.AMUSED, res); } /* * AM-Resource Limit */ public Resource getAMLimit() { return getAMLimit(NL); } public Resource getAMLimit(String label) { return _get(label, ResourceType.AMLIMIT); } public void incAMLimit(String label, Resource res) { _inc(label, ResourceType.AMLIMIT, res); } public void incAMLimit(Resource res) { incAMLimit(NL, res); } public void decAMLimit(Resource res) { decAMLimit(NL, res); } public void decAMLimit(String label, Resource res) { _dec(label, ResourceType.AMLIMIT, res); } public void setAMLimit(Resource res) { setAMLimit(NL, res); } public void setAMLimit(String label, Resource res) { _set(label, ResourceType.AMLIMIT, res); } private static Resource normalize(Resource res) { if (res == null) { return Resources.none(); } return res; } private Resource _get(String label, ResourceType type) { if (label == null || label.equals(NL)) { return normalize(usageNoLabel.resArr[type.idx]); } try { readLock.lock(); UsageByLabel usage = usages.get(label); if (null == usage) { return Resources.none(); } return normalize(usage.resArr[type.idx]); } finally { readLock.unlock(); } } private Resource _getAll(ResourceType type) { try { readLock.lock(); Resource allOfType = Resources.createResource(0); for (Map.Entry<String, UsageByLabel> usageEntry : usages.entrySet()) { //all usages types are initialized Resources.addTo(allOfType, usageEntry.getValue().resArr[type.idx]); } return allOfType; } finally { readLock.unlock(); } } public Resource getAllPending() { return _getAll(ResourceType.PENDING); } public Resource getAllUsed() { return _getAll(ResourceType.USED); } private UsageByLabel getAndAddIfMissing(String label) { if (label == null || label.equals(NL)) { return usageNoLabel; } if (!usages.containsKey(label)) { UsageByLabel u = new UsageByLabel(label); usages.put(label, u); return u; } return usages.get(label); } private void _set(String label, ResourceType type, Resource res) { try { writeLock.lock(); UsageByLabel usage = getAndAddIfMissing(label); usage.resArr[type.idx] = res; } finally { writeLock.unlock(); } } private void _inc(String label, ResourceType type, Resource res) { try { writeLock.lock(); UsageByLabel usage = getAndAddIfMissing(label); Resources.addTo(usage.resArr[type.idx], res); } finally { writeLock.unlock(); } } private void _dec(String label, ResourceType type, Resource res) { try { writeLock.lock(); UsageByLabel usage = getAndAddIfMissing(label); Resources.subtractFrom(usage.resArr[type.idx], res); } finally { writeLock.unlock(); } } public Resource getCachedDemand(String label) { try { readLock.lock(); Resource demand = Resources.createResource(0); Resources.addTo(demand, getCachedUsed(label)); Resources.addTo(demand, getCachedPending(label)); return demand; } finally { readLock.unlock(); } } @Override public String toString() { try { readLock.lock(); return usages.toString(); } finally { readLock.unlock(); } } public Set<String> getNodePartitionsSet() { try { readLock.lock(); return usages.keySet(); } finally { readLock.unlock(); } } }
/* * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.replication.regionserver; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.catalog.CatalogTracker; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALObserver; import org.apache.hadoop.hbase.replication.ReplicationSourceDummy; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import java.net.URLEncoder; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import static org.junit.Assert.assertEquals; public class TestReplicationSourceManager { private static final Log LOG = LogFactory.getLog(TestReplicationSourceManager.class); private static Configuration conf; private static HBaseTestingUtility utility; private static Replication replication; private static ReplicationSourceManager manager; private static ZooKeeperWatcher zkw; private static HTableDescriptor htd; private static HRegionInfo hri; private static final byte[] r1 = Bytes.toBytes("r1"); private static final byte[] r2 = Bytes.toBytes("r2"); private static final byte[] f1 = Bytes.toBytes("f1"); private static final byte[] f2 = Bytes.toBytes("f2"); private static final byte[] test = Bytes.toBytes("test"); private static FileSystem fs; private static Path oldLogDir; private static Path logDir; @BeforeClass public static void setUpBeforeClass() throws Exception { conf = HBaseConfiguration.create(); conf.set("replication.replicationsource.implementation", ReplicationSourceDummy.class.getCanonicalName()); conf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true); utility = new HBaseTestingUtility(conf); utility.startMiniZKCluster(); zkw = new ZooKeeperWatcher(conf, "test", null); ZKUtil.createWithParents(zkw, "/hbase/replication"); ZKUtil.createWithParents(zkw, "/hbase/replication/peers/1"); ZKUtil.setData(zkw, "/hbase/replication/peers/1",Bytes.toBytes( conf.get(HConstants.ZOOKEEPER_QUORUM)+":" + conf.get("hbase.zookeeper.property.clientPort")+":/1")); ZKUtil.createWithParents(zkw, "/hbase/replication/state"); ZKUtil.setData(zkw, "/hbase/replication/state", Bytes.toBytes("true")); replication = new Replication(new DummyServer(), fs, logDir, oldLogDir); manager = replication.getReplicationManager(); fs = FileSystem.get(conf); oldLogDir = new Path(utility.getTestDir(), HConstants.HREGION_OLDLOGDIR_NAME); logDir = new Path(utility.getTestDir(), HConstants.HREGION_LOGDIR_NAME); manager.addSource("1"); htd = new HTableDescriptor(test); HColumnDescriptor col = new HColumnDescriptor("f1"); col.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); htd.addFamily(col); col = new HColumnDescriptor("f2"); col.setScope(HConstants.REPLICATION_SCOPE_LOCAL); htd.addFamily(col); hri = new HRegionInfo(htd, r1, r2); } @AfterClass public static void tearDownAfterClass() throws Exception { manager.join(); utility.shutdownMiniCluster(); } @Before public void setUp() throws Exception { fs.delete(logDir, true); fs.delete(oldLogDir, true); } @After public void tearDown() throws Exception { setUp(); } @Test public void testLogRoll() throws Exception { long seq = 0; long baseline = 1000; long time = baseline; KeyValue kv = new KeyValue(r1, f1, r1); WALEdit edit = new WALEdit(); edit.add(kv); List<WALObserver> listeners = new ArrayList<WALObserver>(); listeners.add(replication); HLog hlog = new HLog(fs, logDir, oldLogDir, conf, listeners, URLEncoder.encode("regionserver:60020", "UTF8")); manager.init(); // Testing normal log rolling every 20 for(long i = 1; i < 101; i++) { if(i > 1 && i % 20 == 0) { hlog.rollWriter(); } LOG.info(i); HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, System.currentTimeMillis()); hlog.append(hri, key, edit); } // Simulate a rapid insert that's followed // by a report that's still not totally complete (missing last one) LOG.info(baseline + " and " + time); baseline += 101; time = baseline; LOG.info(baseline + " and " + time); for (int i = 0; i < 3; i++) { HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, System.currentTimeMillis()); hlog.append(hri, key, edit); } assertEquals(6, manager.getHLogs().size()); hlog.rollWriter(); manager.logPositionAndCleanOldLogs(manager.getSources().get(0).getCurrentPath(), "1", 0, false); HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, System.currentTimeMillis()); hlog.append(hri, key, edit); assertEquals(1, manager.getHLogs().size()); // TODO Need a case with only 2 HLogs and we only want to delete the first one } static class DummyServer implements Server { @Override public Configuration getConfiguration() { return conf; } @Override public ZooKeeperWatcher getZooKeeper() { return zkw; } @Override public CatalogTracker getCatalogTracker() { return null; //To change body of implemented methods use File | Settings | File Templates. } @Override public String getServerName() { return null; //To change body of implemented methods use File | Settings | File Templates. } @Override public void abort(String why, Throwable e) { //To change body of implemented methods use File | Settings | File Templates. } @Override public void stop(String why) { //To change body of implemented methods use File | Settings | File Templates. } @Override public boolean isStopped() { return false; //To change body of implemented methods use File | Settings | File Templates. } } }