repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
clarkyzl/flink
flink-python/src/main/java/org/apache/flink/table/runtime/arrow/readers/TimeFieldReader.java
2680
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.arrow.readers; import org.apache.flink.annotation.Internal; import org.apache.flink.util.Preconditions; import org.apache.arrow.vector.TimeMicroVector; import org.apache.arrow.vector.TimeMilliVector; import org.apache.arrow.vector.TimeNanoVector; import org.apache.arrow.vector.TimeSecVector; import org.apache.arrow.vector.ValueVector; import java.sql.Time; import java.util.TimeZone; /** {@link ArrowFieldReader} for Time. */ @Internal public final class TimeFieldReader extends ArrowFieldReader<Time> { // The local time zone. private static final TimeZone LOCAL_TZ = TimeZone.getDefault(); public TimeFieldReader(ValueVector valueVector) { super(valueVector); Preconditions.checkState( valueVector instanceof TimeSecVector || valueVector instanceof TimeMilliVector || valueVector instanceof TimeMicroVector || valueVector instanceof TimeNanoVector); } @Override public Time read(int index) { ValueVector valueVector = getValueVector(); if (valueVector.isNull(index)) { return null; } else { long timeMilli; if (valueVector instanceof TimeSecVector) { timeMilli = ((TimeSecVector) getValueVector()).get(index) * 1000; } else if (valueVector instanceof TimeMilliVector) { timeMilli = ((TimeMilliVector) getValueVector()).get(index); } else if (valueVector instanceof TimeMicroVector) { timeMilli = ((TimeMicroVector) getValueVector()).get(index) / 1000; } else { timeMilli = ((TimeNanoVector) getValueVector()).get(index) / 1000000; } return new Time(timeMilli - LOCAL_TZ.getOffset(timeMilli)); } } }
apache-2.0
scnakandala/derby
java/testing/org/apache/derbyTesting/system/nstest/init/DbSetup.java
7434
/* Derby - Class org.apache.derbyTesting.system.nstest.init.DbSetup Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.system.nstest.init; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import org.apache.derbyTesting.system.nstest.NsTest; /** * DbSetup: Creates database and builds single user table with indexes */ public class DbSetup { /** * The main database setup method */ public static boolean doIt(Connection conn) throws Throwable { Statement s = null; ResultSet rs = null; boolean finished = false; NsTest.logger.println("dbSetup.doIt() starting..."); try { conn.setAutoCommit(false); } catch (Exception e) { NsTest.logger.println("FAIL - setAutoCommit() failed:"); printException("setting autocommit in dbSetup", e); return (false); } try { s = conn.createStatement(); rs = s.executeQuery("select tablename from sys.systables " + " where tablename = 'NSTESTTAB'"); if (rs.next()) { rs.close(); NsTest.logger.println("table 'NSTESTTAB' already exists"); finished = true; NsTest.schemaCreated = true; // indicates to other classes // that the schema already exists } } catch (Exception e) { NsTest.logger .println("dbSetup.doIt() check existance of table: FAIL -- unexpected exception:"); printException( "executing query or processing resultSet to check for table existence", e); return (false); } // if we reach here then the table does not exist, so we create it if (finished == false) { try { NsTest.logger .println("creating table 'NSTESTTAB' and corresponding indices"); s.execute("create table nstesttab (" + "id int," + "t_char char(100)," + "t_date date," + "t_decimal decimal," + "t_decimal_nn decimal(10,10)," + "t_double double precision," + "t_float float," + "t_int int," + "t_longint bigint," + "t_numeric_large numeric(30,10)," + "t_real real," + "t_smallint smallint," + "t_time time," + "t_timestamp timestamp," + "t_varchar varchar(100)," + "t_clob clob(1K)," + "t_blob blob(10K)," + "serialkey bigint generated always as identity, " + "sequenceColumn bigint, " + "unique (serialkey)) "); s.execute("create index t_char_ind on nstesttab ( t_char)"); s.execute("create index t_date_ind on nstesttab ( t_date)"); s .execute("create index t_decimal_ind on nstesttab ( t_decimal)"); s .execute("create index t_decimal_nn_ind on nstesttab ( t_decimal_nn)"); s.execute("create index t_double_ind on nstesttab ( t_double)"); s.execute("create index t_float_ind on nstesttab ( t_float)"); s.execute("create index t_int_ind on nstesttab ( t_int)"); s .execute("create index t_longint_ind on nstesttab ( t_longint)"); s .execute("create index t_num_lrg_ind on nstesttab ( t_numeric_large)"); s.execute("create index t_real_ind on nstesttab ( t_real)"); s .execute("create index t_smallint_ind on nstesttab ( t_smallint)"); s.execute("create index t_time_ind on nstesttab ( t_time)"); s .execute("create index t_timestamp_ind on nstesttab ( t_timestamp)"); s .execute("create index t_varchar_ind on nstesttab ( t_varchar)"); s .execute("create index t_serialkey_ind on nstesttab (serialkey)"); NsTest.logger.println( "Creating nstesttab_seq sequence" ); s.execute( "create sequence nstesttab_seq as bigint start with 0" ); NsTest.logger .println("creating table 'NSTRIGTAB' and corresponding indices"); s.execute("create table NSTRIGTAB (" + "id int," + "t_char char(100)," + "t_date date," + "t_decimal decimal," + "t_decimal_nn decimal(10,10)," + "t_double double precision," + "t_float float," + "t_int int," + "t_longint bigint," + "t_numeric_large numeric(30,10)," + "t_real real," + "t_smallint smallint," + "t_time time," + "t_timestamp timestamp," + "t_varchar varchar(100)," + "t_clob clob(1K)," + "t_blob blob(10K)," + "serialkey bigint, " + "sequenceColumn bigint )"); // create trigger s.execute("CREATE TRIGGER NSTEST_TRIG AFTER DELETE ON nstesttab " + "REFERENCING OLD AS OLDROW FOR EACH ROW MODE DB2SQL " + "INSERT INTO NSTRIGTAB values(" + "OLDROW.ID, OLDROW.T_CHAR,OLDROW.T_DATE," + "OLDROW.T_DECIMAL,OLDROW.T_DECIMAL_NN,OLDROW.T_DOUBLE," + "OLDROW.T_FLOAT, OLDROW.T_INT,OLDROW.T_LONGINT, OLDROW.T_numeric_large," + "OLDROW.T_real,OLDROW.T_smallint,OLDROW.T_time,OLDROW.T_timestamp,OLDROW.T_varchar," + "OLDROW.T_clob,OLDROW.T_blob, " + "OLDROW.serialkey, " + "OLDROW.sequenceColumn )"); } catch (Exception e) { if ( NsTest.justCountErrors() ) { NsTest.printException( DbSetup.class.getName(), e ); } else { e.printStackTrace( NsTest.logger ); } NsTest.logger .println("FAIL - unexpected exception in dbSetup.doIt() while creating schema:"); printException("executing statements to create schema", e); return (false); } }// end of if(finished==false) conn.commit(); return (true); }// end of method doIt() // ** This method abstracts exception message printing for all exception // messages. You may want to change // ****it if more detailed exception messages are desired. // ***Method is synchronized so that the output file will contain sensible // stack traces that are not // ****mixed but rather one exception printed at a time public static synchronized void printException(String where, Exception e) { if ( NsTest.justCountErrors() ) { NsTest.addError( e ); return; } if (e instanceof SQLException) { SQLException se = (SQLException) e; if (se.getSQLState().equals("40001")) NsTest.logger.println("deadlocked detected"); if (se.getSQLState().equals("40XL1")) NsTest.logger.println(" lock timeout exception"); if (se.getSQLState().equals("23500")) NsTest.logger.println(" duplicate key violation"); if (se.getNextException() != null) { String m = se.getNextException().getSQLState(); NsTest.logger.println(se.getNextException().getMessage() + " SQLSTATE: " + m); } } if (e.getMessage() == null) { NsTest.logger.println("NULL error message detected"); NsTest.logger.println("Here is the NULL exection - " + e.toString()); NsTest.logger.println("Stack trace of the NULL exception - "); e.printStackTrace( NsTest.logger ); } NsTest.logger.println("During " + where + ", exception thrown was : " + e.getMessage()); } }//end of class definition
apache-2.0
linqingyicen/jdonframework
src/main/java/com/jdon/annotation/Model.java
768
package com.jdon.annotation; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; /** * Domain Model should normal live in memory not in database. so cache in memory * is very important for domain model life cycle. * * * @see com.jdon.controller.model.ModelIF * @author banQ * */ @Target(TYPE) @Retention(RUNTIME) @Documented public @interface Model { /** * disable from version 6.5 * * @return */ boolean isCacheable() default true; /** * disable from version 6.5 * * @return */ boolean isModified() default false; }
apache-2.0
prasi-in/geode
geode-core/src/main/java/org/apache/geode/internal/admin/statalerts/SingleAttrDefinitionImpl.java
5728
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.admin.statalerts; import java.io.DataOutput; import java.io.DataInput; import java.io.IOException; import org.apache.geode.DataSerializer; import org.apache.geode.StatisticDescriptor; import org.apache.geode.Statistics; import org.apache.geode.StatisticsFactory; import org.apache.geode.internal.admin.StatAlert; import org.apache.geode.internal.admin.StatAlertDefinition; /** * Implementation of {@link StatAlertDefinition} This provides the definition for single statistic * */ public class SingleAttrDefinitionImpl implements StatAlertDefinition { private static final long serialVersionUID = 3292417185742697896L; protected String name; protected int _id; protected StatisticInfo statisticInfo; public SingleAttrDefinitionImpl() {} /** * @param statisticInfo */ public SingleAttrDefinitionImpl(String name, StatisticInfo statisticInfo) { super(); this.statisticInfo = statisticInfo; this.name = name; this._id = getName().toUpperCase().hashCode(); } public int getId() { return _id; } @Override // GemStoneAddition public int hashCode() { return getId(); } public boolean verify(StatisticsFactory factory) { boolean result = false; if (name == null || name.length() == 0) return false; if (statisticInfo != null) { Statistics[] temp = factory.findStatisticsByTextId(statisticInfo.getStatisticsTextId()); if (temp == null || temp.length == 0) return false; StatisticDescriptor[] temp1 = temp[0].getType().getStatistics(); for (int i = 0; i < temp1.length; i++) { if (statisticInfo.getStatisticName().equals(temp1[i].getName())) { result = true; break; } } } return result; } public String getStringRepresentation() { StringBuffer buffer = new StringBuffer(); buffer.append("StatAlertDefinition [\n"); buffer.append(toString()); buffer.append("]"); return buffer.toString(); } @Override // GemStoneAddition public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append("Name:" + getName() + "\n"); buffer.append("Attribute:\n"); if (statisticInfo != null) { buffer.append(statisticInfo.toString() + "\n"); } return buffer.toString(); } /** * This method returns the name of this stat alert definition. * * @return Name of the StatAlertDefinition */ public String getName() { return name; } /** * This method sets the name of this stat alert definition. * * @param name name to be set for this StatAlertDefinition. */ public void setName(String name) { this.name = name; } /* * (non-Javadoc) * * @see org.apache.geode.internal.admin.StatAlertDefinition#getStatisticInfo() */ public StatisticInfo[] getStatisticInfo() { return new StatisticInfo[] {statisticInfo}; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.admin.StatAlertDefinition#setStatisticInfo(org.apache.geode.internal. * admin.StatisticInfo[]) */ public void setStatisticInfo(StatisticInfo[] info) { if (info == null || info.length != 1) throw new IllegalArgumentException( "setStatisticInfo method requires 1 length array of StatisticInfo objects."); statisticInfo = info[0]; } public Number[] getValue() { Number[] vals = new Number[1]; vals[0] = statisticInfo.getStatistics().get(statisticInfo.getStatisticDescriptor()); return vals; } public Number[] getValue(Number[] vals) { return vals; } /* * (non-Javadoc) * * @see org.apache.geode.internal.admin.StatAlertDefinition#evaluate(java.lang.Number[]) */ public boolean evaluate(Number[] params) { return evaluate() && params != null && params.length == 1; } public boolean evaluate() { return statisticInfo != null; } public StatAlert evaluateAndAlert(Number[] params) { return evaluate(params) ? getAlert(params[0]) : null; } public StatAlert evaluateAndAlert() { return evaluate() ? getAlert(getValue()[0]) : null; } protected StatAlert getAlert(Number val) { Number[] vals = new Number[1]; vals[0] = val; return new StatAlert(this.getId(), vals); } public boolean hasDecorator(String decoratorID) { return false; } public StatAlertDefinition getDecorator(String decoratorID) { return null; } public void toData(DataOutput out) throws IOException { DataSerializer.writeString(this.name, out); DataSerializer.writePrimitiveInt(this._id, out); DataSerializer.writeObject(this.statisticInfo, out); } public void fromData(DataInput in) throws IOException, ClassNotFoundException { this.name = DataSerializer.readString(in); this._id = DataSerializer.readPrimitiveInt(in); this.statisticInfo = (StatisticInfo) DataSerializer.readObject(in); } }
apache-2.0
profjrr/zaproxy
src/org/zaproxy/zap/extension/spider/SpiderDialog.java
15217
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 ZAP development team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.spider; import java.awt.Dimension; import java.awt.Frame; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.net.URL; import java.util.ArrayList; import java.util.List; import javax.swing.JButton; import org.apache.commons.httpclient.URI; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.Session; import org.zaproxy.zap.extension.users.ExtensionUserManagement; import org.zaproxy.zap.model.Context; import org.zaproxy.zap.model.StructuralNode; import org.zaproxy.zap.model.Target; import org.zaproxy.zap.spider.SpiderParam; import org.zaproxy.zap.spider.filters.MaxChildrenFetchFilter; import org.zaproxy.zap.spider.filters.MaxChildrenParseFilter; import org.zaproxy.zap.users.User; import org.zaproxy.zap.view.StandardFieldsDialog; public class SpiderDialog extends StandardFieldsDialog { private static final String FIELD_START = "spider.custom.label.start"; private static final String FIELD_CONTEXT = "spider.custom.label.context"; private static final String FIELD_USER = "spider.custom.label.user"; private static final String FIELD_RECURSE = "spider.custom.label.recurse"; private static final String FIELD_ADVANCED = "spider.custom.label.adv"; private static final String FIELD_MAX_DEPTH = "spider.custom.label.maxDepth"; private static final String FIELD_MAX_CHILDREN = "spider.custom.label.maxChildren"; private static final String FIELD_SEND_REFERER = "spider.custom.label.sendReferer"; private static final String FIELD_PROCESS_FORMS = "spider.custom.label.processForms"; private static final String FIELD_POST_FORMS = "spider.custom.label.postForms"; private static final String FIELD_PARSE_COMMENTS = "spider.custom.label.parseComments"; private static final String FIELD_PARSE_ROBOTS = "spider.custom.label.parseRobots"; private static final String FIELD_PARSE_SITEMAP = "spider.custom.label.sitemap"; private static final String FIELD_PARSE_SVN = "spider.custom.label.parseSvn"; private static final String FIELD_PARSE_GIT = "spider.custom.label.parseGit"; private static final String FIELD_HANDLE_ODATA = "spider.custom.label.handleOdata"; private static Logger logger = Logger.getLogger(SpiderDialog.class); private static final long serialVersionUID = 1L; private JButton[] extraButtons = null; private ExtensionSpider extension = null; private SpiderParam spiderParam = null; private ExtensionUserManagement extUserMgmt = (ExtensionUserManagement) Control.getSingleton().getExtensionLoader() .getExtension(ExtensionUserManagement.NAME); private Target target = null; private int maxChildrenToCrawl = 0; // This is not persisted anywhere public SpiderDialog(ExtensionSpider ext, Frame owner, Dimension dim) { super(owner, "spider.custom.title", dim, new String[]{ "spider.custom.tab.scope", "spider.custom.tab.adv" }); this.extension = ext; // The first time init to the default options set, after that keep own copies reset(false); } public void init(Target target) { if (target != null) { // If one isnt specified then leave the previously selected one this.target = target; } logger.debug("init " + this.target); this.removeAllFields(); this.addTargetSelectField(0, FIELD_START, this.target, true, false); this.addComboField(0, FIELD_CONTEXT, new String[] {}, ""); this.addComboField(0, FIELD_USER, new String[] {}, ""); this.addCheckBoxField(0, FIELD_RECURSE, true); // This option is always read from the 'global' options this.addCheckBoxField(0, FIELD_ADVANCED, getSpiderParam().isShowAdvancedDialog()); this.addPadding(0); // Advanced options this.addNumberField(1, FIELD_MAX_DEPTH, 1, 19, getSpiderParam().getMaxDepth()); this.addNumberField(1, FIELD_MAX_CHILDREN, 0, Integer.MAX_VALUE, maxChildrenToCrawl); this.addCheckBoxField(1, FIELD_SEND_REFERER, getSpiderParam().isSendRefererHeader()); this.addCheckBoxField(1, FIELD_PROCESS_FORMS, getSpiderParam().isProcessForm()); this.addCheckBoxField(1, FIELD_POST_FORMS, getSpiderParam().isPostForm()); this.addCheckBoxField(1, FIELD_PARSE_COMMENTS, getSpiderParam().isParseComments()); this.addCheckBoxField(1, FIELD_PARSE_ROBOTS, getSpiderParam().isParseRobotsTxt()); this.addCheckBoxField(1, FIELD_PARSE_SITEMAP, getSpiderParam().isParseSitemapXml()); this.addCheckBoxField(1, FIELD_PARSE_SVN, getSpiderParam().isParseSVNEntries()); this.addCheckBoxField(1, FIELD_PARSE_GIT, getSpiderParam().isParseGit()); this.addCheckBoxField(1, FIELD_HANDLE_ODATA, getSpiderParam().isHandleODataParametersVisited()); this.addPadding(1); if (! getBoolValue(FIELD_PROCESS_FORMS)) { setFieldValue(FIELD_POST_FORMS, false); getField(FIELD_POST_FORMS).setEnabled(false); } this.addFieldListener(FIELD_CONTEXT, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setUsers(); } }); this.addFieldListener(FIELD_PROCESS_FORMS, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (getBoolValue(FIELD_PROCESS_FORMS)) { getField(FIELD_POST_FORMS).setEnabled(true); } else { setFieldValue(FIELD_POST_FORMS, false); getField(FIELD_POST_FORMS).setEnabled(false); } } }); this.addFieldListener(FIELD_ADVANCED, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setAdvancedTabs(getBoolValue(FIELD_ADVANCED)); } }); if (target != null) { // Set up the fields if a node has been specified, otherwise leave as previously set this.targetSelected(FIELD_START, this.target); this.setUsers(); } if ( ! extension.getSpiderParam().isShowAdvancedDialog()) { // Remove all but the first tab this.setAdvancedTabs(false); } this.pack(); } private SpiderParam getSpiderParam() { if (spiderParam == null) { // First time in clone the global options, after that keep the last ones the user set spiderParam = (SpiderParam) extension.getSpiderParam().clone(); } return spiderParam; } private void setAdvancedTabs(boolean visible) { // Show/hide all except from the first tab this.setTabsVisible (new String[] { "spider.custom.tab.adv" }, visible); } @Override public String getHelpIndex() { return "ui.dialogs.spider"; } @Override public void targetSelected(String field, Target node) { List<String> ctxNames = new ArrayList<String>(); if (node != null) { // The user has selected a new node this.target = node; if (node.getStartNode() != null) { Session session = Model.getSingleton().getSession(); List<Context> contexts = session.getContextsForNode(node.getStartNode()); for (Context context : contexts) { ctxNames.add(context.getName()); } } else if (node.getContext() != null) { ctxNames.add(node.getContext().getName()); } } this.setComboFields(FIELD_CONTEXT, ctxNames, ""); this.getField(FIELD_CONTEXT).setEnabled(ctxNames.size() > 0); } private Context getSelectedContext() { String ctxName = this.getStringValue(FIELD_CONTEXT); if (this.extUserMgmt != null && ! this.isEmptyField(FIELD_CONTEXT)) { Session session = Model.getSingleton().getSession(); return session.getContext(ctxName); } return null; } private User getSelectedUser() { Context context = this.getSelectedContext(); if (context != null) { String userName = this.getStringValue(FIELD_USER); List<User> users = this.extUserMgmt.getContextUserAuthManager(context.getIndex()).getUsers(); for (User user : users) { if (userName.equals(user.getName())) { return user; } } } return null; } private void setUsers() { Context context = this.getSelectedContext(); List<String> userNames = new ArrayList<String>(); if (context != null) { List<User> users = this.extUserMgmt.getContextUserAuthManager(context.getIndex()).getUsers(); userNames.add(""); // The default should always be 'not specified' for (User user : users) { userNames.add(user.getName()); } } this.setComboFields(FIELD_USER, userNames, ""); this.getField(FIELD_USER).setEnabled(userNames.size() > 1); // Theres always 1.. } private void reset(boolean refreshUi) { // Reset to the global options spiderParam = null; if (refreshUi) { init(target); repaint(); } } @Override public String getSaveButtonText() { return Constant.messages.getString("spider.custom.button.scan"); } @Override public JButton[] getExtraButtons() { if (extraButtons == null) { JButton resetButton = new JButton(Constant.messages.getString("spider.custom.button.reset")); resetButton.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { reset(true); } }); extraButtons = new JButton[]{resetButton}; } return extraButtons; } @Override public void save() { Object[] contextSpecificObjects = null; URI startUri = null; try { // Always include the startUri, this has the side effect // of handling URLs that have not been accessed startUri = new URI(this.getStringValue(FIELD_START), true); } catch (Exception e1) { // Ignore - will have been checked in validateParams } if (this.getBoolValue(FIELD_ADVANCED)) { // Set the advanced options spiderParam.setMaxDepth(this.getIntValue(FIELD_MAX_DEPTH)); spiderParam.setSendRefererHeader(this.getBoolValue(FIELD_SEND_REFERER)); spiderParam.setProcessForm(this.getBoolValue(FIELD_PROCESS_FORMS)); spiderParam.setPostForm(this.getBoolValue(FIELD_POST_FORMS)); spiderParam.setParseComments(this.getBoolValue(FIELD_PARSE_COMMENTS)); spiderParam.setParseRobotsTxt(this.getBoolValue(FIELD_PARSE_ROBOTS)); spiderParam.setParseSitemapXml(this.getBoolValue(FIELD_PARSE_SITEMAP)); spiderParam.setParseSVNEntries(this.getBoolValue(FIELD_PARSE_SVN)); spiderParam.setParseGit(this.getBoolValue(FIELD_PARSE_GIT)); spiderParam.setHandleODataParametersVisited(this.getBoolValue(FIELD_HANDLE_ODATA)); spiderParam.setThreadCount(extension.getSpiderParam().getThreadCount()); maxChildrenToCrawl = this.getIntValue(FIELD_MAX_CHILDREN); if (maxChildrenToCrawl > 0) { // Add the filters to filter on maximum number of children MaxChildrenFetchFilter maxChildrenFetchFilter = new MaxChildrenFetchFilter(); maxChildrenFetchFilter.setMaxChildren(maxChildrenToCrawl); maxChildrenFetchFilter.setModel(extension.getModel()); MaxChildrenParseFilter maxChildrenParseFilter = new MaxChildrenParseFilter(); maxChildrenParseFilter.setMaxChildren(maxChildrenToCrawl); maxChildrenParseFilter.setModel(extension.getModel()); contextSpecificObjects = new Object[]{ spiderParam, maxChildrenFetchFilter, maxChildrenParseFilter, startUri }; } else { contextSpecificObjects = new Object[]{ spiderParam, startUri }; } } else { contextSpecificObjects = new Object[]{ startUri }; } String displayName; if (target == null || target.getStartNode() == null || ! this.getStringValue(FIELD_START).equals( target.getStartNode().getHierarchicNodeName())) { // Clear the target as it doesnt match the value entered manually target = new Target((StructuralNode)null); displayName = startUri.toString(); if (displayName.length() >= 30) { // Just use the first and last 14 chrs to prevent huge urls messing up the display displayName = displayName.substring(0, 14) + ".." + displayName.substring(displayName.length()-15, displayName.length()); } } else { displayName = target.getDisplayName(); } // Save the adv option permanently for next time extension.getSpiderParam().setShowAdvancedDialog(this.getBoolValue(FIELD_ADVANCED)); target.setRecurse(this.getBoolValue(FIELD_RECURSE)); if (target.getContext() == null && getSelectedContext() != null) { target.setContext(getSelectedContext()); } this.extension.startScan( displayName, target, getSelectedUser(), contextSpecificObjects); } @Override public String validateFields() { if (this.isEmptyField(FIELD_START)) { return Constant.messages.getString("spider.custom.nostart.error"); } try { // Need both constructors as they catch slightly different issues ;) String url = this.getStringValue(FIELD_START); new URI(url, true); new URL(url); } catch (Exception e) { return Constant.messages.getString("spider.custom.nostart.error"); } if (this.target != null && !this.target.isValid()) { return Constant.messages.getString("spider.custom.nostart.error"); } return null; } }
apache-2.0
xiao-chen/hadoop
hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/package-info.java
861
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This package contains Ozone S3 exceptions. */
apache-2.0
rockmkd/datacollector
container/src/test/java/com/streamsets/datacollector/runner/preview/StageConfigurationBuilder.java
3320
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.runner.preview; import com.streamsets.datacollector.config.ServiceConfiguration; import com.streamsets.datacollector.config.StageConfiguration; import com.streamsets.pipeline.api.Config; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; public class StageConfigurationBuilder { private String instanceName; private String library = "default"; private String stageName; private int stageVersion = 1; private List<Config> configuration = Collections.emptyList(); private Map<String, Object> uiInfo = null; private List<ServiceConfiguration> services = Collections.emptyList(); private List<String> inputLanes = Collections.emptyList(); private List<String> outputLanes = Collections.emptyList(); private List<String> eventLanes = Collections.emptyList(); public StageConfigurationBuilder(String instanceName, String stageName) { this.instanceName = instanceName; this.stageName = stageName; } public StageConfigurationBuilder withLibrary(String library) { this.library = library; return this; } public StageConfigurationBuilder withStageVersion(int version) { this.stageVersion = version; return this; } public StageConfigurationBuilder withConfig(Config ...configs) { this.configuration = Arrays.asList(configs); return this; } public StageConfigurationBuilder withServices(ServiceConfiguration ...services) { this.services = Arrays.asList(services); return this; } public StageConfigurationBuilder withServices(List<ServiceConfiguration> services) { this.services = services; return this; } public StageConfigurationBuilder withInputLanes(String ...lanes) { this.inputLanes = Arrays.asList(lanes); return this; } public StageConfigurationBuilder withInputLanes(List<String> lanes) { this.inputLanes = lanes; return this; } public StageConfigurationBuilder withOutputLanes(String ...lanes) { this.outputLanes = Arrays.asList(lanes); return this; } public StageConfigurationBuilder withOutputLanes(List<String> lanes) { this.outputLanes = lanes; return this; } public StageConfigurationBuilder withEventLanes(String ...lanes) { this.eventLanes = Arrays.asList(lanes); return this; } public StageConfigurationBuilder withEventLanes(List<String> lanes) { this.eventLanes = lanes; return this; } public StageConfiguration build() { return new StageConfiguration( instanceName, library, stageName, stageVersion, configuration, uiInfo, services, inputLanes, outputLanes, eventLanes ); } }
apache-2.0
flofreud/aws-sdk-java
aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/EbsBlockDevice.java
6429
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; import java.io.Serializable; /** * <p> * Configuration of requested EBS block device associated with the instance * group. * </p> */ public class EbsBlockDevice implements Serializable, Cloneable { /** * <p> * EBS volume specifications such as volume type, IOPS, and size(GiB) that * will be requested for the EBS volume attached to an EC2 instance in the * cluster. * </p> */ private VolumeSpecification volumeSpecification; /** * <p> * The device name that is exposed to the instance, such as /dev/sdh. * </p> */ private String device; /** * <p> * EBS volume specifications such as volume type, IOPS, and size(GiB) that * will be requested for the EBS volume attached to an EC2 instance in the * cluster. * </p> * * @param volumeSpecification * EBS volume specifications such as volume type, IOPS, and size(GiB) * that will be requested for the EBS volume attached to an EC2 * instance in the cluster. */ public void setVolumeSpecification(VolumeSpecification volumeSpecification) { this.volumeSpecification = volumeSpecification; } /** * <p> * EBS volume specifications such as volume type, IOPS, and size(GiB) that * will be requested for the EBS volume attached to an EC2 instance in the * cluster. * </p> * * @return EBS volume specifications such as volume type, IOPS, and * size(GiB) that will be requested for the EBS volume attached to * an EC2 instance in the cluster. */ public VolumeSpecification getVolumeSpecification() { return this.volumeSpecification; } /** * <p> * EBS volume specifications such as volume type, IOPS, and size(GiB) that * will be requested for the EBS volume attached to an EC2 instance in the * cluster. * </p> * * @param volumeSpecification * EBS volume specifications such as volume type, IOPS, and size(GiB) * that will be requested for the EBS volume attached to an EC2 * instance in the cluster. * @return Returns a reference to this object so that method calls can be * chained together. */ public EbsBlockDevice withVolumeSpecification( VolumeSpecification volumeSpecification) { setVolumeSpecification(volumeSpecification); return this; } /** * <p> * The device name that is exposed to the instance, such as /dev/sdh. * </p> * * @param device * The device name that is exposed to the instance, such as /dev/sdh. */ public void setDevice(String device) { this.device = device; } /** * <p> * The device name that is exposed to the instance, such as /dev/sdh. * </p> * * @return The device name that is exposed to the instance, such as * /dev/sdh. */ public String getDevice() { return this.device; } /** * <p> * The device name that is exposed to the instance, such as /dev/sdh. * </p> * * @param device * The device name that is exposed to the instance, such as /dev/sdh. * @return Returns a reference to this object so that method calls can be * chained together. */ public EbsBlockDevice withDevice(String device) { setDevice(device); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVolumeSpecification() != null) sb.append("VolumeSpecification: " + getVolumeSpecification() + ","); if (getDevice() != null) sb.append("Device: " + getDevice()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof EbsBlockDevice == false) return false; EbsBlockDevice other = (EbsBlockDevice) obj; if (other.getVolumeSpecification() == null ^ this.getVolumeSpecification() == null) return false; if (other.getVolumeSpecification() != null && other.getVolumeSpecification().equals( this.getVolumeSpecification()) == false) return false; if (other.getDevice() == null ^ this.getDevice() == null) return false; if (other.getDevice() != null && other.getDevice().equals(this.getDevice()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVolumeSpecification() == null) ? 0 : getVolumeSpecification().hashCode()); hashCode = prime * hashCode + ((getDevice() == null) ? 0 : getDevice().hashCode()); return hashCode; } @Override public EbsBlockDevice clone() { try { return (EbsBlockDevice) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
sgmiller/hiveelements
galeforce/src/main/java/com/beecavegames/concurrency/Labeled.java
91
package com.beecavegames.concurrency; public interface Labeled { String getLabel(); }
apache-2.0
falko/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/bpmn/parser/ConditionalEventDefinition.java
3979
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.bpmn.parser; import java.io.Serializable; import java.util.Set; import org.camunda.bpm.engine.delegate.DelegateExecution; import org.camunda.bpm.engine.impl.Condition; import org.camunda.bpm.engine.impl.core.variable.event.VariableEvent; import org.camunda.bpm.engine.impl.event.EventType; import org.camunda.bpm.engine.impl.pvm.process.ActivityImpl; /** * Represents the conditional event definition corresponding to the * ConditionalEvent defined by the BPMN 2.0 spec. * * @author Christopher Zell <christopher.zell@camunda.com> */ public class ConditionalEventDefinition extends EventSubscriptionDeclaration implements Serializable { private static final long serialVersionUID = 1L; protected String conditionAsString; protected final Condition condition; protected boolean interrupting; protected String variableName; protected Set<String> variableEvents; protected ActivityImpl conditionalActivity; public ConditionalEventDefinition(Condition condition, ActivityImpl conditionalActivity) { super(null, EventType.CONDITONAL); this.activityId = conditionalActivity.getActivityId(); this.conditionalActivity = conditionalActivity; this.condition = condition; } public ActivityImpl getConditionalActivity() { return conditionalActivity; } public void setConditionalActivity(ActivityImpl conditionalActivity) { this.conditionalActivity = conditionalActivity; } public boolean isInterrupting() { return interrupting; } public void setInterrupting(boolean interrupting) { this.interrupting = interrupting; } public String getVariableName() { return variableName; } public void setVariableName(String variableName) { this.variableName = variableName; } public Set<String> getVariableEvents() { return variableEvents; } public void setVariableEvents(Set<String> variableEvents) { this.variableEvents = variableEvents; } public String getConditionAsString() { return conditionAsString; } public void setConditionAsString(String conditionAsString) { this.conditionAsString = conditionAsString; } public boolean shouldEvaluateForVariableEvent(VariableEvent event) { return ((variableName == null || event.getVariableInstance().getName().equals(variableName)) && ((variableEvents == null || variableEvents.isEmpty()) || variableEvents.contains(event.getEventName()))); } public boolean evaluate(DelegateExecution execution) { if (condition != null) { return condition.evaluate(execution, execution); } throw new IllegalStateException("Conditional event must have a condition!"); } public boolean tryEvaluate(DelegateExecution execution) { if (condition != null) { return condition.tryEvaluate(execution, execution); } throw new IllegalStateException("Conditional event must have a condition!"); } public boolean tryEvaluate(VariableEvent variableEvent, DelegateExecution execution) { return (variableEvent == null || shouldEvaluateForVariableEvent(variableEvent)) && tryEvaluate(execution); } }
apache-2.0
wmixvideo/nfe
src/main/java/com/fincatto/documentofiscal/nfe310/classes/nota/NFNotaInfoTransporte.java
3099
package com.fincatto.documentofiscal.nfe310.classes.nota; import com.fincatto.documentofiscal.DFBase; import com.fincatto.documentofiscal.nfe310.classes.NFModalidadeFrete; import com.fincatto.documentofiscal.validadores.DFListValidador; import com.fincatto.documentofiscal.validadores.DFStringValidador; import org.simpleframework.xml.Element; import org.simpleframework.xml.ElementList; import java.util.List; public class NFNotaInfoTransporte extends DFBase { private static final long serialVersionUID = 1172316192774549031L; @Element(name = "modFrete") private NFModalidadeFrete modalidadeFrete; @Element(name = "transporta", required = false) private NFNotaInfoTransportador transportador; @Element(name = "retTransp", required = false) private NFNotaInfoRetencaoICMSTransporte icmsTransporte; @Element(name = "veicTransp", required = false) private NFNotaInfoVeiculo veiculo; @ElementList(entry = "reboque", inline = true, required = false) private List<NFNotaInfoReboque> reboques; @Element(name = "vagao", required = false) private String vagao; @Element(name = "balsa", required = false) private String balsa; @ElementList(entry = "vol", inline = true, required = false) private List<NFNotaInfoVolume> volumes; public void setModalidadeFrete(final NFModalidadeFrete modalidadeFrete) { this.modalidadeFrete = modalidadeFrete; } public void setTransportador(final NFNotaInfoTransportador transportador) { this.transportador = transportador; } public void setIcmsTransporte(final NFNotaInfoRetencaoICMSTransporte icmsTransporte) { this.icmsTransporte = icmsTransporte; } public void setVeiculo(final NFNotaInfoVeiculo veiculo) { this.veiculo = veiculo; } public void setReboques(final List<NFNotaInfoReboque> reboques) { DFListValidador.tamanho5(reboques, "Reboques"); this.reboques = reboques; } public void setVolumes(final List<NFNotaInfoVolume> volumes) { DFListValidador.tamanho5000(volumes, "Volumes"); this.volumes = volumes; } public void setVagao(final String vagao) { DFStringValidador.tamanho20(vagao, "Vagao"); this.vagao = vagao; } public void setBalsa(final String balsa) { DFStringValidador.tamanho20(balsa, "Balsa"); this.balsa = balsa; } public NFModalidadeFrete getModalidadeFrete() { return this.modalidadeFrete; } public NFNotaInfoTransportador getTransportador() { return this.transportador; } public NFNotaInfoRetencaoICMSTransporte getIcmsTransporte() { return this.icmsTransporte; } public NFNotaInfoVeiculo getVeiculo() { return this.veiculo; } public List<NFNotaInfoReboque> getReboques() { return this.reboques; } public String getVagao() { return this.vagao; } public String getBalsa() { return this.balsa; } public List<NFNotaInfoVolume> getVolumes() { return this.volumes; } }
apache-2.0
ollie314/kafka
tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java
10984
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.tools; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import org.apache.kafka.common.security.auth.SecurityProtocol; import org.apache.kafka.common.utils.Exit; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Properties; import static net.sourceforge.argparse4j.impl.Arguments.store; /** * Primarily intended for use with system testing, this appender produces message * to Kafka on each "append" request. For example, this helps with end-to-end tests * of KafkaLog4jAppender. * * When used as a command-line tool, it appends increasing integers. It will produce a * fixed number of messages unless the default max-messages -1 is used, in which case * it appends indefinitely. */ public class VerifiableLog4jAppender { Logger logger = Logger.getLogger(VerifiableLog4jAppender.class); // If maxMessages < 0, log until the process is killed externally private long maxMessages = -1; // Hook to trigger logging thread to stop logging messages private volatile boolean stopLogging = false; /** Get the command-line argument parser. */ private static ArgumentParser argParser() { ArgumentParser parser = ArgumentParsers .newArgumentParser("verifiable-log4j-appender") .defaultHelp(true) .description("This tool produces increasing integers to the specified topic using KafkaLog4jAppender."); parser.addArgument("--topic") .action(store()) .required(true) .type(String.class) .metavar("TOPIC") .help("Produce messages to this topic."); parser.addArgument("--broker-list") .action(store()) .required(true) .type(String.class) .metavar("HOST1:PORT1[,HOST2:PORT2[...]]") .dest("brokerList") .help("Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,..."); parser.addArgument("--max-messages") .action(store()) .required(false) .setDefault(-1) .type(Integer.class) .metavar("MAX-MESSAGES") .dest("maxMessages") .help("Produce this many messages. If -1, produce messages until the process is killed externally."); parser.addArgument("--acks") .action(store()) .required(false) .setDefault("-1") .type(String.class) .choices("0", "1", "-1") .metavar("ACKS") .help("Acks required on each produced message. See Kafka docs on request.required.acks for details."); parser.addArgument("--security-protocol") .action(store()) .required(false) .setDefault("PLAINTEXT") .type(String.class) .choices("PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL") .metavar("SECURITY-PROTOCOL") .dest("securityProtocol") .help("Security protocol to be used while communicating with Kafka brokers."); parser.addArgument("--ssl-truststore-location") .action(store()) .required(false) .type(String.class) .metavar("SSL-TRUSTSTORE-LOCATION") .dest("sslTruststoreLocation") .help("Location of SSL truststore to use."); parser.addArgument("--ssl-truststore-password") .action(store()) .required(false) .type(String.class) .metavar("SSL-TRUSTSTORE-PASSWORD") .dest("sslTruststorePassword") .help("Password for SSL truststore to use."); parser.addArgument("--appender.config") .action(store()) .required(false) .type(String.class) .metavar("CONFIG_FILE") .help("Log4jAppender config properties file."); parser.addArgument("--sasl-kerberos-service-name") .action(store()) .required(false) .type(String.class) .metavar("SASL-KERBEROS-SERVICE-NAME") .dest("saslKerberosServiceName") .help("Name of sasl kerberos service."); parser.addArgument("--client-jaas-conf-path") .action(store()) .required(false) .type(String.class) .metavar("CLIENT-JAAS-CONF-PATH") .dest("clientJaasConfPath") .help("Path of JAAS config file of Kafka client."); parser.addArgument("--kerb5-conf-path") .action(store()) .required(false) .type(String.class) .metavar("KERB5-CONF-PATH") .dest("kerb5ConfPath") .help("Path of Kerb5 config file."); return parser; } /** * Read a properties file from the given path * @param filename The path of the file to read * * Note: this duplication of org.apache.kafka.common.utils.Utils.loadProps is unfortunate * but *intentional*. In order to use VerifiableProducer in compatibility and upgrade tests, * we use VerifiableProducer from the development tools package, and run it against 0.8.X.X kafka jars. * Since this method is not in Utils in the 0.8.X.X jars, we have to cheat a bit and duplicate. */ public static Properties loadProps(String filename) throws IOException { Properties props = new Properties(); try (InputStream propStream = Files.newInputStream(Paths.get(filename))) { props.load(propStream); } return props; } /** Construct a VerifiableLog4jAppender object from command-line arguments. */ public static VerifiableLog4jAppender createFromArgs(String[] args) { ArgumentParser parser = argParser(); VerifiableLog4jAppender producer = null; try { Namespace res = parser.parseArgs(args); int maxMessages = res.getInt("maxMessages"); String topic = res.getString("topic"); String configFile = res.getString("appender.config"); Properties props = new Properties(); props.setProperty("log4j.rootLogger", "INFO, KAFKA"); props.setProperty("log4j.appender.KAFKA", "org.apache.kafka.log4jappender.KafkaLog4jAppender"); props.setProperty("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout"); props.setProperty("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n"); props.setProperty("log4j.appender.KAFKA.BrokerList", res.getString("brokerList")); props.setProperty("log4j.appender.KAFKA.Topic", topic); props.setProperty("log4j.appender.KAFKA.RequiredNumAcks", res.getString("acks")); props.setProperty("log4j.appender.KAFKA.SyncSend", "true"); final String securityProtocol = res.getString("securityProtocol"); if (securityProtocol != null && !securityProtocol.equals(SecurityProtocol.PLAINTEXT.toString())) { props.setProperty("log4j.appender.KAFKA.SecurityProtocol", securityProtocol); } if (securityProtocol != null && securityProtocol.contains("SSL")) { props.setProperty("log4j.appender.KAFKA.SslTruststoreLocation", res.getString("sslTruststoreLocation")); props.setProperty("log4j.appender.KAFKA.SslTruststorePassword", res.getString("sslTruststorePassword")); } if (securityProtocol != null && securityProtocol.contains("SASL")) { props.setProperty("log4j.appender.KAFKA.SaslKerberosServiceName", res.getString("saslKerberosServiceName")); props.setProperty("log4j.appender.KAFKA.clientJaasConfPath", res.getString("clientJaasConfPath")); props.setProperty("log4j.appender.KAFKA.kerb5ConfPath", res.getString("kerb5ConfPath")); } props.setProperty("log4j.logger.kafka.log4j", "INFO, KAFKA"); // Changing log level from INFO to WARN as a temporary workaround for KAFKA-6415. This is to // avoid deadlock in system tests when producer network thread appends to log while updating metadata. props.setProperty("log4j.logger.org.apache.kafka.clients.Metadata", "WARN, KAFKA"); if (configFile != null) { try { props.putAll(loadProps(configFile)); } catch (IOException e) { throw new ArgumentParserException(e.getMessage(), parser); } } producer = new VerifiableLog4jAppender(props, maxMessages); } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); Exit.exit(0); } else { parser.handleError(e); Exit.exit(1); } } return producer; } public VerifiableLog4jAppender(Properties props, int maxMessages) { this.maxMessages = maxMessages; PropertyConfigurator.configure(props); } public static void main(String[] args) throws IOException { final VerifiableLog4jAppender appender = createFromArgs(args); boolean infinite = appender.maxMessages < 0; Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { // Trigger main thread to stop producing messages appender.stopLogging = true; } }); long maxMessages = infinite ? Long.MAX_VALUE : appender.maxMessages; for (long i = 0; i < maxMessages; i++) { if (appender.stopLogging) { break; } appender.append(String.format("%d", i)); } } private void append(String msg) { logger.info(msg); } }
apache-2.0
ecatmur/avro
lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
1857
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.file; import java.io.IOException; import java.io.Closeable; import java.util.Iterator; import org.apache.avro.Schema; /** Interface for reading data from a file. */ public interface FileReader<D> extends Iterator<D>, Iterable<D>, Closeable { /** Return the schema for data in this file. */ Schema getSchema(); /** Read the next datum from the file. * @param reuse an instance to reuse. * @throws NoSuchElementException if no more remain in the file. */ D next(D reuse) throws IOException; /** Move to the next synchronization point after a position. To process a * range of file entires, call this with the starting position, then check * {@link #pastSync(long)} with the end point before each call to {@link * #next()}. */ void sync(long position) throws IOException; /** Return true if past the next synchronization point after a position. */ boolean pastSync(long position) throws IOException; /** Return the current position in the input. */ long tell() throws IOException; }
apache-2.0
tseen/Federated-HDFS
tseenliu/FedHDFS-hadoop-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
7688
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.applicationhistoryservice; import java.io.IOException; import org.junit.Assert; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; import org.junit.Before; import org.junit.Test; public class TestMemoryApplicationHistoryStore extends ApplicationHistoryStoreTestUtils { @Before public void setup() { store = new MemoryApplicationHistoryStore(); } @Test public void testReadWriteApplicationHistory() throws Exception { // Out of order ApplicationId appId = ApplicationId.newInstance(0, 1); try { writeApplicationFinishData(appId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains( "is stored before the start information")); } // Normal int numApps = 5; for (int i = 1; i <= numApps; ++i) { appId = ApplicationId.newInstance(0, i); writeApplicationStartData(appId); writeApplicationFinishData(appId); } Assert.assertEquals(numApps, store.getAllApplications().size()); for (int i = 1; i <= numApps; ++i) { appId = ApplicationId.newInstance(0, i); ApplicationHistoryData data = store.getApplication(appId); Assert.assertNotNull(data); Assert.assertEquals(appId.toString(), data.getApplicationName()); Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo()); } // Write again appId = ApplicationId.newInstance(0, 1); try { writeApplicationStartData(appId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } try { writeApplicationFinishData(appId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } } @Test public void testReadWriteApplicationAttemptHistory() throws Exception { // Out of order ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); try { writeApplicationAttemptFinishData(appAttemptId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains( "is stored before the start information")); } // Normal int numAppAttempts = 5; writeApplicationStartData(appId); for (int i = 1; i <= numAppAttempts; ++i) { appAttemptId = ApplicationAttemptId.newInstance(appId, i); writeApplicationAttemptStartData(appAttemptId); writeApplicationAttemptFinishData(appAttemptId); } Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId) .size()); for (int i = 1; i <= numAppAttempts; ++i) { appAttemptId = ApplicationAttemptId.newInstance(appId, i); ApplicationAttemptHistoryData data = store.getApplicationAttempt(appAttemptId); Assert.assertNotNull(data); Assert.assertEquals(appAttemptId.toString(), data.getHost()); Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo()); } writeApplicationFinishData(appId); // Write again appAttemptId = ApplicationAttemptId.newInstance(appId, 1); try { writeApplicationAttemptStartData(appAttemptId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } try { writeApplicationAttemptFinishData(appAttemptId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } } @Test public void testReadWriteContainerHistory() throws Exception { // Out of order ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); try { writeContainerFinishData(containerId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains( "is stored before the start information")); } // Normal writeApplicationAttemptStartData(appAttemptId); int numContainers = 5; for (int i = 1; i <= numContainers; ++i) { containerId = ContainerId.newInstance(appAttemptId, i); writeContainerStartData(containerId); writeContainerFinishData(containerId); } Assert .assertEquals(numContainers, store.getContainers(appAttemptId).size()); for (int i = 1; i <= numContainers; ++i) { containerId = ContainerId.newInstance(appAttemptId, i); ContainerHistoryData data = store.getContainer(containerId); Assert.assertNotNull(data); Assert.assertEquals(Priority.newInstance(containerId.getId()), data.getPriority()); Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo()); } ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId); Assert.assertNotNull(masterContainer); Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), masterContainer.getContainerId()); writeApplicationAttemptFinishData(appAttemptId); // Write again containerId = ContainerId.newInstance(appAttemptId, 1); try { writeContainerStartData(containerId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } try { writeContainerFinishData(containerId); Assert.fail(); } catch (IOException e) { Assert.assertTrue(e.getMessage().contains("is already stored")); } } @Test public void testMassiveWriteContainerHistory() throws IOException { long mb = 1024 * 1024; Runtime runtime = Runtime.getRuntime(); long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb; int numContainers = 100000; ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); for (int i = 1; i <= numContainers; ++i) { ContainerId containerId = ContainerId.newInstance(appAttemptId, i); writeContainerStartData(containerId); writeContainerFinishData(containerId); } long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb; Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200); } }
apache-2.0
narendragoyal/hbase
hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZooKeeperRegistry.java
4720
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.zookeeper.KeeperException; /** * A cluster registry that stores to zookeeper. */ class ZooKeeperRegistry implements Registry { private static final Log LOG = LogFactory.getLog(ZooKeeperRegistry.class); // Needs an instance of hci to function. Set after construct this instance. ConnectionImplementation hci; @Override public void init(Connection connection) { if (!(connection instanceof ConnectionImplementation)) { throw new RuntimeException("This registry depends on ConnectionImplementation"); } this.hci = (ConnectionImplementation)connection; } @Override public RegionLocations getMetaRegionLocation() throws IOException { ZooKeeperKeepAliveConnection zkw = hci.getKeepAliveZooKeeperWatcher(); try { if (LOG.isTraceEnabled()) { LOG.trace("Looking up meta region location in ZK," + " connection=" + this); } List<ServerName> servers = new MetaTableLocator().blockUntilAvailable(zkw, hci.rpcTimeout, hci.getConfiguration()); if (LOG.isTraceEnabled()) { if (servers == null) { LOG.trace("Looked up meta region location, connection=" + this + "; servers = null"); } else { StringBuilder str = new StringBuilder(); for (ServerName s : servers) { str.append(s.toString()); str.append(" "); } LOG.trace("Looked up meta region location, connection=" + this + "; servers = " + str.toString()); } } if (servers == null) return null; HRegionLocation[] locs = new HRegionLocation[servers.size()]; int i = 0; for (ServerName server : servers) { HRegionInfo h = RegionReplicaUtil.getRegionInfoForReplica( HRegionInfo.FIRST_META_REGIONINFO, i); if (server == null) locs[i++] = null; else locs[i++] = new HRegionLocation(h, server, 0); } return new RegionLocations(locs); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return null; } finally { zkw.close(); } } private String clusterId = null; @Override public String getClusterId() { if (this.clusterId != null) return this.clusterId; // No synchronized here, worse case we will retrieve it twice, that's // not an issue. ZooKeeperKeepAliveConnection zkw = null; try { zkw = hci.getKeepAliveZooKeeperWatcher(); this.clusterId = ZKClusterId.readClusterIdZNode(zkw); if (this.clusterId == null) { LOG.info("ClusterId read in ZooKeeper is null"); } } catch (KeeperException e) { LOG.warn("Can't retrieve clusterId from Zookeeper", e); } catch (IOException e) { LOG.warn("Can't retrieve clusterId from Zookeeper", e); } finally { if (zkw != null) zkw.close(); } return this.clusterId; } @Override public int getCurrentNrHRS() throws IOException { ZooKeeperKeepAliveConnection zkw = hci.getKeepAliveZooKeeperWatcher(); try { // We go to zk rather than to master to get count of regions to avoid // HTable having a Master dependency. See HBase-2828 return ZKUtil.getNumberOfChildren(zkw, zkw.rsZNode); } catch (KeeperException ke) { throw new IOException("Unexpected ZooKeeper exception", ke); } finally { zkw.close(); } } }
apache-2.0
varshavaradarajan/gocd
server/src/test-fast/java/com/thoughtworks/go/server/service/plugins/processor/serverinfo/ServerInfoRequestProcessorTest.java
4406
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service.plugins.processor.serverinfo; import com.thoughtworks.go.config.ServerConfig; import com.thoughtworks.go.plugin.access.common.settings.GoPluginExtension; import com.thoughtworks.go.plugin.api.GoPluginIdentifier; import com.thoughtworks.go.plugin.api.request.DefaultGoApiRequest; import com.thoughtworks.go.plugin.api.response.GoApiResponse; import com.thoughtworks.go.plugin.infra.PluginRequestProcessorRegistry; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import com.thoughtworks.go.server.service.GoConfigService; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.Arrays; import java.util.Collections; import static com.thoughtworks.go.server.service.plugins.processor.serverinfo.ServerInfoRequestProcessor.GET_SERVER_INFO; import static java.lang.String.format; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class ServerInfoRequestProcessorTest { @Mock private GoConfigService goConfigService; @Mock private GoPluginExtension pluginExtension; @Mock private GoPluginDescriptor pluginDescriptor; private PluginRequestProcessorRegistry processorRegistry; private ServerInfoRequestProcessor processor; private ServerConfig serverConfig; private String pluginId = "plugin_id"; @Before public void setUp() throws Exception { initMocks(this); serverConfig = new ServerConfig(); serverConfig.ensureServerIdExists(); serverConfig.setSecureSiteUrl("https://example.com:8154/go"); serverConfig.setSiteUrl("http://example.com:8153/go"); processorRegistry = new PluginRequestProcessorRegistry(); processor = new ServerInfoRequestProcessor(processorRegistry, goConfigService); when(goConfigService.serverConfig()).thenReturn(serverConfig); when(pluginExtension.extensionName()).thenReturn("extension1"); when(pluginDescriptor.id()).thenReturn(pluginId); } @Test public void shouldRegisterAPIRequestWithProcessor() { DefaultGoApiRequest request = new DefaultGoApiRequest(GET_SERVER_INFO, "1.0", new GoPluginIdentifier("extension1", Collections.singletonList("1.0"))); assertThat(processorRegistry.canProcess(request), is(true)); } @Test public void shouldReturnAServerIdInJSONForm() { DefaultGoApiRequest request = new DefaultGoApiRequest(GET_SERVER_INFO, "1.0", new GoPluginIdentifier("extension1", Arrays.asList("1.0"))); GoApiResponse response = processor.process(pluginDescriptor, request); assertThat(response.responseCode(), is(200)); assertThat(response.responseBody(), is(format("{\"server_id\":\"%s\",\"site_url\":\"%s\",\"secure_site_url\":\"%s\"}", serverConfig.getServerId(), serverConfig.getSiteUrl().getUrl(), serverConfig.getSecureSiteUrl().getUrl()))); } @Test public void shouldReturnSuccessForServerInfoV2() { DefaultGoApiRequest request = new DefaultGoApiRequest(GET_SERVER_INFO, "2.0", new GoPluginIdentifier("extension1", Arrays.asList("1.0"))); GoApiResponse response = processor.process(pluginDescriptor, request); assertThat(response.responseCode(), is(200)); } @Test public void shouldReturnAErrorResponseIfExtensionDoesNotSupportServerInfo() { DefaultGoApiRequest request = new DefaultGoApiRequest(GET_SERVER_INFO, "bad-version", new GoPluginIdentifier("foo", Arrays.asList("1.0"))); GoApiResponse response = processor.process(pluginDescriptor, request); assertThat(response.responseCode(), is(400)); } }
apache-2.0
chanakaudaya/developer-studio
bps/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/editparts/FaultHandlerEditPart.java
5167
/******************************************************************************* * Copyright (c) 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.bpel.ui.editparts; import org.eclipse.bpel.model.CompensationHandler; import org.eclipse.bpel.model.EventHandler; import org.eclipse.bpel.model.TerminationHandler; import org.eclipse.bpel.ui.BPELUIPlugin; import org.eclipse.bpel.ui.IBPELUIConstants; import org.eclipse.bpel.ui.editparts.borders.RoundRectangleBorderWithDecoration; import org.eclipse.bpel.ui.editparts.policies.BPELContainerEditPolicy; import org.eclipse.bpel.ui.editparts.policies.BPELOrderedLayoutEditPolicy; import org.eclipse.bpel.ui.editparts.policies.ContainerHighlightEditPolicy; import org.eclipse.bpel.ui.figures.ILayoutAware; import org.eclipse.bpel.ui.util.ModelHelper; import org.eclipse.draw2d.Border; import org.eclipse.draw2d.Figure; import org.eclipse.draw2d.FlowLayout; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.MarginBorder; import org.eclipse.draw2d.geometry.Insets; import org.eclipse.gef.EditPolicy; import org.eclipse.swt.graphics.Image; public class FaultHandlerEditPart extends BPELEditPart implements ILayoutAware{ private Image image; private IFigure contentPane; private Border containerBorder; public static final int LEFT_MARGIN = 30; protected void createEditPolicies() { super.createEditPolicies(); // Show the selection rectangle installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE, new ContainerHighlightEditPolicy(false, false)); installEditPolicy(EditPolicy.CONTAINER_ROLE, new BPELContainerEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, new BPELOrderedLayoutEditPolicy()); } protected IFigure createFigure() { IFigure figure = new Figure(); FlowLayout layout = new FlowLayout(); layout.setMinorAlignment(FlowLayout.ALIGN_CENTER); boolean vertical = (getModel() instanceof CompensationHandler) || (getModel() instanceof TerminationHandler); boolean horizontalLayout = ModelHelper.isHorizontalLayout(getModel()); layout.setHorizontal(horizontalLayout ? vertical : !vertical); figure.setLayoutManager(layout); if (image == null) { // Get Image from registry if (getModel() instanceof EventHandler) { image = BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_EVENT_INDICATOR); } else if (getModel() instanceof CompensationHandler) { image = BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_COMPENSATION_INDICATOR); } else if (getModel() instanceof TerminationHandler) { image = BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_TERMINATION_INDICATOR); } else { image = BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_FAULT_INDICATOR); } } figure.setBorder(new RoundRectangleBorderWithDecoration(figure, image, new Insets(20, 10, 20, 10))); figure.setOpaque(true); this.contentPane = figure; int topMargin = calcTopMargin(horizontalLayout); int leftMargin = calcLeftMargin(horizontalLayout); IFigure container = new Figure(); this.containerBorder = new MarginBorder(topMargin,leftMargin,0,0); container.setBorder(containerBorder); container.add(figure); layout = new FlowLayout(); layout.setHorizontal(false); container.setLayoutManager(layout); return container; } public void deactivate() { if (!isActive()) return; super.deactivate(); if (this.image != null) { //this.image.dispose(); this.image = null; } } public IFigure getContentPane() { return contentPane; } public void switchLayout(boolean horizontal) { boolean vertical = (getModel() instanceof CompensationHandler) || (getModel() instanceof TerminationHandler); boolean horizontalLayout = ModelHelper.isHorizontalLayout(getModel()); ((FlowLayout)getContentPane().getLayoutManager()).setHorizontal(horizontalLayout ? vertical : !vertical); int leftMargin = calcLeftMargin(horizontal); int topMargin = calcTopMargin(horizontal); getFigure().setBorder(new MarginBorder(topMargin,leftMargin,0,0)); } /** * Calculates the top margin regarding the layout orientation * @return */ private int calcTopMargin(boolean horizontal){ int topMargin = 0; if(horizontal){ topMargin = 2; }else{ if(getParent() instanceof ScopeEditPart){ // Four possible handlers topMargin = 42; }else if(getParent() instanceof InvokeEditPart){ // Standard offset topMargin = 17; }else topMargin = 16; } return topMargin; } /** * Calculates the left margin regarding the layout orientation * @return */ private int calcLeftMargin(boolean horizontal){ int leftMargin = 0; if(horizontal && getParent() instanceof ProcessEditPart){ leftMargin = LEFT_MARGIN; } return leftMargin; } }
apache-2.0
jludvice/fabric8
fabric/fabric-commands/src/main/java/io/fabric8/commands/ContainerCreateChildAction.java
5818
/** * Copyright 2005-2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.commands; import java.io.IOException; import io.fabric8.api.Container; import org.apache.felix.gogo.commands.Argument; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; import io.fabric8.api.CreateChildContainerOptions; import io.fabric8.api.CreateContainerMetadata; import io.fabric8.api.FabricAuthenticationException; import io.fabric8.api.FabricService; import io.fabric8.api.ZooKeeperClusterService; import io.fabric8.boot.commands.support.AbstractContainerCreateAction; import io.fabric8.utils.shell.ShellUtils; import static io.fabric8.utils.FabricValidations.validateProfileNames; @Command(name = ContainerCreateChild.FUNCTION_VALUE, scope = ContainerCreateChild.SCOPE_VALUE, description = ContainerCreateChild.DESCRIPTION, detailedDescription = "classpath:containerCreateChild.txt") public class ContainerCreateChildAction extends AbstractChildContainerCreateAction { @Option(name = "--jmx-user", multiValued = false, required = false, description = "The jmx user name of the parent container.") protected String username; @Option(name = "--jmx-password", multiValued = false, required = false, description = "The jmx password of the parent container.") protected String password; @Argument(index = 0, required = true, description = "Parent (root) container ID") protected String parent; @Argument(index = 1, required = true, description = "The name of the containers to be created. When creating multiple containers it serves as a prefix") protected String name; @Argument(index = 2, required = false, description = "The number of containers that should be created") protected int number = 0; ContainerCreateChildAction(FabricService fabricService, ZooKeeperClusterService clusterService) { super(fabricService, clusterService); } @Override protected Object doExecute() throws Exception { CreateContainerMetadata[] metadata = null; validateProfileNames(profiles); // validate input before creating containers preCreateContainer(name); validateParentContainer(parent); String jmxUser = username != null ? username : ShellUtils.retrieveFabricUser(session); String jmxPassword = password != null ? password : ShellUtils.retrieveFabricUserPassword(session); // okay create child container CreateChildContainerOptions.Builder builder = CreateChildContainerOptions.builder() .name(name) .parent(parent) .bindAddress(bindAddress) .resolver(resolver) .manualIp(manualIp) .ensembleServer(false) .number(number) .zookeeperUrl(fabricService.getZookeeperUrl()) .zookeeperPassword(fabricService.getZookeeperPassword()) .jvmOpts(jvmOpts != null ? jvmOpts : fabricService.getDefaultJvmOptions()) .jmxUser(jmxUser) .jmxPassword(jmxPassword) .version(version) .profiles(getProfileNames()) .dataStoreProperties(getDataStoreProperties()); try { metadata = fabricService.createContainers(builder.build()); rethrowAuthenticationErrors(metadata); ShellUtils.storeFabricCredentials(session, jmxUser, jmxPassword); } catch (FabricAuthenticationException ex) { //If authentication fails, prompts for credentials and try again. username = null; password = null; promptForJmxCredentialsIfNeeded(); metadata = fabricService.createContainers(builder.jmxUser(username).jmxPassword(password).build()); ShellUtils.storeFabricCredentials(session, username, password); } // display containers displayContainers(metadata); return null; } protected void validateParentContainer(String parent) { Container container = fabricService.getContainer(parent); if (container == null) { throw new IllegalArgumentException("Parent container " + parent + " does not exists!"); } if (!container.isRoot()) { throw new IllegalArgumentException("Parent container " + parent + " must be a root container!"); } } @Override protected void preCreateContainer(String name) { super.preCreateContainer(name); // validate number is not out of bounds if (number < 0 || number > 99) { throw new IllegalArgumentException("The number of containers must be between 1 and 99."); } } private void promptForJmxCredentialsIfNeeded() throws IOException { // If the username was not configured via cli, then prompt the user for the values if (username == null) { log.debug("Prompting user for jmx login"); username = ShellUtils.readLine(session, "Jmx Login for " + parent + ": ", false); } if (password == null) { password = ShellUtils.readLine(session, "Jmx Password for " + parent + ": ", true); } } }
apache-2.0
pjain1/druid
server/src/main/java/org/apache/druid/segment/realtime/appenderator/PeonAppenderatorsManager.java
6009
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.client.cache.Cache; import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.Query; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMerger; import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.join.JoinableFactory; import org.apache.druid.segment.loading.DataSegmentPusher; import org.apache.druid.segment.realtime.FireDepartmentMetrics; import org.apache.druid.server.coordination.DataSegmentAnnouncer; import org.joda.time.Interval; import java.util.concurrent.ExecutorService; /** * Manages Appenderators for tasks running within a CliPeon process. * * It provides the ability to create a realtime appenderator or multiple batch appenderators, * and serves queries on the realtime appenderator. * * The implementation contains sanity checks that throw errors if more than one realtime appenderator is created, * or if a task tries to create both realtime and batch appenderators. These checks can be adjusted if these * assumptions are no longer true. * * Because the peon is a separate process that will terminate after task completion, this implementation * relies on process shutdown for resource cleanup. */ public class PeonAppenderatorsManager implements AppenderatorsManager { private Appenderator realtimeAppenderator; private Appenderator batchAppenderator; @Override public Appenderator createRealtimeAppenderatorForTask( String taskId, DataSchema schema, AppenderatorConfig config, FireDepartmentMetrics metrics, DataSegmentPusher dataSegmentPusher, ObjectMapper jsonMapper, IndexIO indexIO, IndexMerger indexMerger, QueryRunnerFactoryConglomerate conglomerate, DataSegmentAnnouncer segmentAnnouncer, ServiceEmitter emitter, ExecutorService queryExecutorService, JoinableFactory joinableFactory, Cache cache, CacheConfig cacheConfig, CachePopulatorStats cachePopulatorStats ) { if (realtimeAppenderator != null) { throw new ISE("A realtime appenderator was already created for this peon's task."); } else if (batchAppenderator != null) { throw new ISE("A batch appenderator was already created for this peon's task."); } else { realtimeAppenderator = Appenderators.createRealtime( taskId, schema, config, metrics, dataSegmentPusher, jsonMapper, indexIO, indexMerger, conglomerate, segmentAnnouncer, emitter, queryExecutorService, joinableFactory, cache, cacheConfig, cachePopulatorStats ); } return realtimeAppenderator; } @Override public Appenderator createOfflineAppenderatorForTask( String taskId, DataSchema schema, AppenderatorConfig config, boolean storeCompactionState, FireDepartmentMetrics metrics, DataSegmentPusher dataSegmentPusher, ObjectMapper objectMapper, IndexIO indexIO, IndexMerger indexMerger ) { // CompactionTask does run multiple sub-IndexTasks, so we allow multiple batch appenderators if (realtimeAppenderator != null) { throw new ISE("A realtime appenderator was already created for this peon's task."); } else { batchAppenderator = Appenderators.createOffline( taskId, schema, config, storeCompactionState, metrics, dataSegmentPusher, objectMapper, indexIO, indexMerger ); return batchAppenderator; } } @Override public void removeAppenderatorsForTask(String taskId, String dataSource) { // the peon only runs one task, and the process will shutdown later, don't need to do anything } @Override public <T> QueryRunner<T> getQueryRunnerForIntervals( Query<T> query, Iterable<Interval> intervals ) { if (realtimeAppenderator == null) { throw new ISE("Was asked for a query runner but realtimeAppenderator was null!"); } else { return realtimeAppenderator.getQueryRunnerForIntervals(query, intervals); } } @Override public <T> QueryRunner<T> getQueryRunnerForSegments( Query<T> query, Iterable<SegmentDescriptor> specs ) { if (realtimeAppenderator == null) { throw new ISE("Was asked for a query runner but realtimeAppenderator was null!"); } else { return realtimeAppenderator.getQueryRunnerForSegments(query, specs); } } @Override public boolean shouldTaskMakeNodeAnnouncements() { return true; } @Override public void shutdown() { // nothing to shut down } }
apache-2.0
flofreud/aws-sdk-java
aws-java-sdk-logs/src/main/java/com/amazonaws/services/logs/model/PutDestinationPolicyRequest.java
5680
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.logs.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * */ public class PutDestinationPolicyRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * A name for an existing destination. * </p> */ private String destinationName; /** * <p> * An IAM policy document that authorizes cross-account users to deliver * their log events to associated destination. * </p> */ private String accessPolicy; /** * <p> * A name for an existing destination. * </p> * * @param destinationName * A name for an existing destination. */ public void setDestinationName(String destinationName) { this.destinationName = destinationName; } /** * <p> * A name for an existing destination. * </p> * * @return A name for an existing destination. */ public String getDestinationName() { return this.destinationName; } /** * <p> * A name for an existing destination. * </p> * * @param destinationName * A name for an existing destination. * @return Returns a reference to this object so that method calls can be * chained together. */ public PutDestinationPolicyRequest withDestinationName( String destinationName) { setDestinationName(destinationName); return this; } /** * <p> * An IAM policy document that authorizes cross-account users to deliver * their log events to associated destination. * </p> * * @param accessPolicy * An IAM policy document that authorizes cross-account users to * deliver their log events to associated destination. */ public void setAccessPolicy(String accessPolicy) { this.accessPolicy = accessPolicy; } /** * <p> * An IAM policy document that authorizes cross-account users to deliver * their log events to associated destination. * </p> * * @return An IAM policy document that authorizes cross-account users to * deliver their log events to associated destination. */ public String getAccessPolicy() { return this.accessPolicy; } /** * <p> * An IAM policy document that authorizes cross-account users to deliver * their log events to associated destination. * </p> * * @param accessPolicy * An IAM policy document that authorizes cross-account users to * deliver their log events to associated destination. * @return Returns a reference to this object so that method calls can be * chained together. */ public PutDestinationPolicyRequest withAccessPolicy(String accessPolicy) { setAccessPolicy(accessPolicy); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDestinationName() != null) sb.append("DestinationName: " + getDestinationName() + ","); if (getAccessPolicy() != null) sb.append("AccessPolicy: " + getAccessPolicy()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PutDestinationPolicyRequest == false) return false; PutDestinationPolicyRequest other = (PutDestinationPolicyRequest) obj; if (other.getDestinationName() == null ^ this.getDestinationName() == null) return false; if (other.getDestinationName() != null && other.getDestinationName().equals(this.getDestinationName()) == false) return false; if (other.getAccessPolicy() == null ^ this.getAccessPolicy() == null) return false; if (other.getAccessPolicy() != null && other.getAccessPolicy().equals(this.getAccessPolicy()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDestinationName() == null) ? 0 : getDestinationName() .hashCode()); hashCode = prime * hashCode + ((getAccessPolicy() == null) ? 0 : getAccessPolicy() .hashCode()); return hashCode; } @Override public PutDestinationPolicyRequest clone() { return (PutDestinationPolicyRequest) super.clone(); } }
apache-2.0
MichaelTong/cassandra-rapid
src/java/org/apache/cassandra/db/marshal/DurationType.java
3127
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.marshal; import java.nio.ByteBuffer; import org.apache.cassandra.cql3.CQL3Type; import org.apache.cassandra.cql3.Constants; import org.apache.cassandra.cql3.Duration; import org.apache.cassandra.cql3.Term; import org.apache.cassandra.serializers.DurationSerializer; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.serializers.TypeSerializer; import org.apache.cassandra.transport.ProtocolVersion; import org.apache.cassandra.utils.ByteBufferUtil; /** * Represents a duration. The duration is stored as months, days, and nanoseconds. This is done * <p>Internally he duration is stored as months (unsigned integer), days (unsigned integer), and nanoseconds.</p> */ public class DurationType extends AbstractType<Duration> { public static final DurationType instance = new DurationType(); DurationType() { super(ComparisonType.NOT_COMPARABLE); } // singleton public ByteBuffer fromString(String source) throws MarshalException { // Return an empty ByteBuffer for an empty string. if (source.isEmpty()) return ByteBufferUtil.EMPTY_BYTE_BUFFER; return decompose(Duration.from(source)); } @Override public boolean isValueCompatibleWithInternal(AbstractType<?> otherType) { return this == otherType; } public Term fromJSONObject(Object parsed) throws MarshalException { try { return new Constants.Value(fromString((String) parsed)); } catch (ClassCastException exc) { throw new MarshalException(String.format("Expected a string representation of a duration, but got a %s: %s", parsed.getClass().getSimpleName(), parsed)); } } @Override public String toJSONString(ByteBuffer buffer, ProtocolVersion protocolVersion) { return getSerializer().deserialize(buffer).toString(); } @Override public TypeSerializer<Duration> getSerializer() { return DurationSerializer.instance; } @Override public CQL3Type asCQL3Type() { return CQL3Type.Native.DURATION; } @Override public boolean referencesDuration() { return true; } }
apache-2.0
ludovicc/testng-debian
test/v4/src/test/tmp/Tmp.java
513
package test.tmp; import java.util.Random; import org.testng.annotations.Test; public class Tmp { @Test(invocationCount = 10, threadPoolSize = 5) public void f() { ppp("START " + Thread.currentThread().getId()); try { Thread.sleep(Math.abs(new Random().nextInt() % 300)); } catch (InterruptedException e) { e.printStackTrace(); } ppp("END " + Thread.currentThread().getId()); } private void ppp(String string) { System.out.println("[Tmp] " + string); } }
apache-2.0
goodwinnk/intellij-community
java/java-impl/src/com/intellij/refactoring/extractMethod/JavaDuplicatesExtractMethodProcessor.java
10894
// Copyright 2000-2017 JetBrains s.r.o. // Use of this source code is governed by the Apache 2.0 license that can be // found in the LICENSE file. package com.intellij.refactoring.extractMethod; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.util.Pass; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.VariableData; import com.intellij.refactoring.util.duplicates.*; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author Pavel.Dolgov */ public class JavaDuplicatesExtractMethodProcessor extends ExtractMethodProcessor { private static final Logger LOG = Logger.getInstance(JavaDuplicatesExtractMethodProcessor.class); private static final Pass<ExtractMethodProcessor> USE_SNAPSHOT_TARGET_CLASS = new Pass<ExtractMethodProcessor>() { @Override public void pass(ExtractMethodProcessor processor) {} // it's a dummy but it's required to select the target class }; public JavaDuplicatesExtractMethodProcessor(@NotNull PsiElement[] elements, @NotNull String refactoringName) { this(elements, null, refactoringName); } public JavaDuplicatesExtractMethodProcessor(@NotNull PsiElement[] elements, @Nullable Editor editor, @Nullable String refactoringName) { super(elements[0].getProject(), editor, elements, null, refactoringName, "", HelpID.EXTRACT_METHOD); } public void applyFrom(@NotNull ExtractMethodProcessor from, @NotNull Map<PsiVariable, PsiVariable> variablesMapping) { myMethodName = from.myMethodName != null ? from.myMethodName : "dummyMethodName"; myStatic = from.myStatic; myIsChainedConstructor = from.myIsChainedConstructor; myMethodVisibility = from.myMethodVisibility; myNullability = from.myNullability; myReturnType = from.myReturnType; myOutputVariables = Arrays.stream(from.myOutputVariables) .map(variable -> variablesMapping.getOrDefault(variable, variable)) .toArray(PsiVariable[]::new); myOutputVariable = ArrayUtil.getFirstElement(myOutputVariables); myArtificialOutputVariable = variablesMapping.getOrDefault(from.myArtificialOutputVariable, from.myArtificialOutputVariable); List<VariableData> variableDatum = new ArrayList<>(); List<VariableData> inputVariables = getInputVariables().getInputVariables(); for (int i = 0; i < from.myVariableDatum.length; i++) { VariableData fromData = from.myVariableDatum[i]; PsiVariable mappedVariable = variablesMapping.get(fromData.variable); if (isReferenced(mappedVariable, fromData.variable) && isUnchanged(mappedVariable, fromData.type, inputVariables)) { VariableData newData = fromData.substitute(mappedVariable); variableDatum.add(newData); } } Set<PsiVariable> parameterVariables = ContainerUtil.map2Set(variableDatum, data -> data.variable); for (VariableData data : inputVariables) { if (!parameterVariables.contains(data.variable)) { variableDatum.add(data); } } myVariableDatum = variableDatum.toArray(new VariableData[0]); } private static boolean isUnchanged(PsiVariable fromVariable, PsiType fromType, @NotNull List<VariableData> inputVariables) { for (VariableData data : inputVariables) { if (data.variable == fromVariable) { return data.type != null && data.type.equalsToText(fromType.getCanonicalText()); } } return true; } public boolean prepareFromSnapshot(@NotNull ExtractMethodSnapshot from, boolean showErrorHint) { applyFromSnapshot(from); PsiFile psiFile = myElements[0].getContainingFile(); ExtractMethodSnapshot.SNAPSHOT_KEY.set(psiFile, from); try { if (!prepare(USE_SNAPSHOT_TARGET_CLASS, showErrorHint)) { return false; } } finally { ExtractMethodSnapshot.SNAPSHOT_KEY.set(psiFile, null); } myStatic = from.myStatic; myInputVariables.setFoldingAvailable(from.myFoldable); return true; } private void applyFromSnapshot(@NotNull ExtractMethodSnapshot from) { myMethodName = from.myMethodName; myStatic = from.myStatic; myIsChainedConstructor = from.myIsChainedConstructor; myMethodVisibility = from.myMethodVisibility; myNullability = from.myNullability; myReturnType = from.myReturnType != null ? from.myReturnType.getType() : null; myOutputVariables = StreamEx.of(from.myOutputVariables).map(SmartPsiElementPointer::getElement).toArray(new PsiVariable[0]); LOG.assertTrue(!ArrayUtil.contains(null, myOutputVariables)); myOutputVariable = ArrayUtil.getFirstElement(myOutputVariables); myArtificialOutputVariable = from.myArtificialOutputVariable != null ? from.myArtificialOutputVariable.getElement() : null; myVariableDatum = StreamEx.of(from.myVariableDatum).map(VariableDataSnapshot::getData).toArray(new VariableData[0]); LOG.assertTrue(!ArrayUtil.contains(null, myVariableDatum)); } private boolean isReferenced(@Nullable PsiVariable variable, PsiVariable fromVariable) { return variable == fromVariable || // it's a freshlyDeclaredParameter (variable != null && ReferencesSearch.search(variable, new LocalSearchScope(myElements)).findFirst() != null); } public void applyDefaults(@NotNull String methodName, @PsiModifier.ModifierConstant @NotNull String visibility) { myMethodName = methodName; myVariableDatum = getInputVariables().getInputVariables().toArray(new VariableData[0]); myMethodVisibility = visibility; myArtificialOutputVariable = PsiType.VOID.equals(myReturnType) ? getArtificialOutputVariable() : null; final PsiType returnType = myArtificialOutputVariable != null ? myArtificialOutputVariable.getType() : myReturnType; if (returnType != null) { myReturnType = returnType; } } @Override public void doExtract() { super.chooseAnchor(); super.doExtract(); } public void updateStaticModifier(List<Match> matches) { if (!isStatic() && isCanBeStatic()) { for (Match match : matches) { if (!isInSameFile(match) || !isInSameClass(match)) { PsiUtil.setModifierProperty(myExtractedMethod, PsiModifier.STATIC, true); myStatic = true; break; } } } } public void putExtractedParameters(Map<PsiLocalVariable, ExtractedParameter> extractedParameters) { for (Map.Entry<PsiLocalVariable, ExtractedParameter> entry : extractedParameters.entrySet()) { myInputVariables.foldExtractedParameter(entry.getKey(), entry.getValue().myPattern.getUsage()); } } public boolean prepare(boolean showErrorHint) { return prepare(null, showErrorHint); } private boolean prepare(@Nullable Pass<ExtractMethodProcessor> pass, boolean showErrorHint) { setShowErrorDialogs(false); try { if (super.prepare(pass)) { return true; } final String message = RefactoringBundle.getCannotRefactorMessage( RefactoringBundle.message("is.not.supported.in.the.current.context", myRefactoringName)); LOG.info(message); if (showErrorHint) { CommonRefactoringUtil.showErrorHint(myProject, null, message, myRefactoringName, HelpID.EXTRACT_METHOD); } return false; } catch (PrepareFailedException e) { LOG.info(e); if (showErrorHint) { CommonRefactoringUtil.showErrorHint(myProject, null, e.getMessage(), myRefactoringName, HelpID.EXTRACT_METHOD); } return false; } } @Override public PsiElement processMatch(Match match) throws IncorrectOperationException { boolean inSameFile = isInSameFile(match); if (!inSameFile) { relaxMethodVisibility(match); } boolean inSameClass = isInSameClass(match); PsiElement element = super.processMatch(match); if (!inSameFile || !inSameClass) { PsiMethodCallExpression callExpression = getMatchMethodCallExpression(element); if (callExpression != null) { return updateCallQualifier(callExpression); } } return element; } @Override protected boolean isFoldingApplicable() { return false; } @NotNull private PsiElement updateCallQualifier(PsiMethodCallExpression callExpression) { PsiElementFactory factory = JavaPsiFacade.getElementFactory(myProject); PsiClass psiClass = myExtractedMethod.getContainingClass(); LOG.assertTrue(psiClass != null, "myExtractedMethod.getContainingClass"); PsiReferenceExpression newQualifier = factory.createReferenceExpression(psiClass); callExpression.getMethodExpression().setQualifierExpression(newQualifier); return JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(callExpression); } @NotNull public DuplicatesFinder createDuplicatesFinder() { ReturnValue returnValue = myOutputVariables.length == 1 ? new VariableReturnValue(myOutputVariables[0]) : null; Set<PsiVariable> effectivelyLocal = getEffectivelyLocalVariables(); return new DuplicatesFinder(myElements, myInputVariables, returnValue, Collections.emptyList(), DuplicatesFinder.MatchType.PARAMETRIZED, effectivelyLocal, null); } private void relaxMethodVisibility(Match match) { if (isInSamePackage(match)) { PsiUtil.setModifierProperty(myExtractedMethod, PsiModifier.PRIVATE, false); } else { PsiUtil.setModifierProperty(myExtractedMethod, PsiModifier.PUBLIC, true); } } private boolean isInSameFile(Match match) { return myExtractedMethod.getContainingFile() == match.getMatchStart().getContainingFile(); } private boolean isInSamePackage(Match match) { PsiFile psiFile = myExtractedMethod.getContainingFile(); PsiFile matchFile = match.getMatchStart().getContainingFile(); return psiFile instanceof PsiJavaFile && matchFile instanceof PsiJavaFile && Objects.equals(((PsiJavaFile)psiFile).getPackageName(), ((PsiJavaFile)matchFile).getPackageName()); } private boolean isInSameClass(Match match) { PsiClass matchClass = PsiTreeUtil.getParentOfType(match.getMatchStart(), PsiClass.class); PsiClass psiClass = PsiTreeUtil.getParentOfType(myExtractedMethod, PsiClass.class); return matchClass != null && PsiTreeUtil.isAncestor(psiClass, matchClass, false); } }
apache-2.0
thomas-young-2013/wherehowsX
web/test/ApplicationTest.java
450
import org.junit.*; import play.mvc.*; import static play.test.Helpers.*; import static org.fest.assertions.Assertions.*; public class ApplicationTest { private static String TEST_USER = "test"; private static String FAKE_CSRF_TOKEN = "token"; @Test public void renderTemplate() { Content html = views.html.index.render(TEST_USER, FAKE_CSRF_TOKEN); assertThat(contentType(html)).isEqualTo("text/html"); } }
apache-2.0
grgrzybek/wildfly-camel
itests/standalone/src/test/java/org/wildfly/camel/test/core/subA/RecipientListBean.java
901
/* * #%L * Wildfly Camel :: Example :: Camel REST * %% * Copyright (C) 2013 - 2014 RedHat * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wildfly.camel.test.core.subA; import org.apache.camel.RecipientList; public class RecipientListBean { @RecipientList public String[] route() { return new String[] { "direct:one", "direct:two" }; } }
apache-2.0
lgobinath/carbon-analytics-common
components/event-monitor/event-statistics/org.wso2.carbon.event.statistics/src/main/java/org/wso2/carbon/event/statistics/EventStatisticsService.java
7801
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.event.statistics; import org.wso2.carbon.event.statistics.internal.Constants; import org.wso2.carbon.event.statistics.internal.EventStatisticsManager; import org.wso2.carbon.event.statistics.internal.EventStatsHelper; import org.wso2.carbon.event.statistics.internal.GhostEventStatisticsMonitor; import org.wso2.carbon.event.statistics.internal.counter.StatsCounter; import org.wso2.carbon.event.statistics.internal.data.StatsDTO; import org.wso2.carbon.event.statistics.internal.ds.EventStatisticsServiceHolder; public class EventStatisticsService { @Deprecated public StatsDTO getGlobalCount(int tenantId) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); if (eventStatisticsManager == null) { return null; } StatsCounter tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); if (tenantData == null) { tenantData = new StatsCounter(tenantId + "", Constants.TENANT); eventStatisticsManager.getTenantDataMap().putIfAbsent(tenantId, tenantData); tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); } return EventStatsHelper.constructStatsDTO(tenantData); } @Deprecated public StatsDTO getCategoryCount(int tenantId, String categoryName) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); if (eventStatisticsManager == null) { return null; } StatsCounter tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); if (tenantData == null) { tenantData = new StatsCounter(tenantId + "", Constants.TENANT); eventStatisticsManager.getTenantDataMap().putIfAbsent(tenantId, tenantData); tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); } StatsCounter categoryData = tenantData.getChildCounter(categoryName); if (categoryData == null) { categoryData = new StatsCounter(categoryName, Constants.CATEGORY); tenantData.getChildCounters().putIfAbsent(categoryName, categoryData); categoryData = tenantData.getChildCounters().get(categoryName); } return EventStatsHelper.constructStatsDTO(categoryData); } @Deprecated public StatsDTO getDeploymentCount(int tenantId, String categoryName, String deploymentName) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); if (eventStatisticsManager == null) { return null; } StatsCounter tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); if (tenantData == null) { tenantData = new StatsCounter(tenantId + "", Constants.TENANT); eventStatisticsManager.getTenantDataMap().putIfAbsent(tenantId, tenantData); tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); } StatsCounter categoryData = tenantData.getChildCounter(categoryName); if (categoryData == null) { return null; } StatsCounter deploymentData = categoryData.getChildCounter(deploymentName); if (deploymentData == null) { return null; } return EventStatsHelper.constructStatsDTO(deploymentData); } @Deprecated public StatsDTO getElementCount(int tenantId, String categoryName, String deploymentName, String elementName) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); if (eventStatisticsManager == null) { return null; } StatsCounter tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); if (tenantData == null) { tenantData = new StatsCounter(tenantId + "", Constants.TENANT); eventStatisticsManager.getTenantDataMap().putIfAbsent(tenantId, tenantData); tenantData = eventStatisticsManager.getTenantDataMap().get(tenantId); } StatsCounter categoryData = tenantData.getChildCounter(categoryName); if (categoryData == null) { return null; } StatsCounter deploymentData = categoryData.getChildCounter(deploymentName); if (deploymentData == null) { return null; } StatsCounter elementData = deploymentData.getChildCounter(elementName); if (elementData == null) { return null; } return EventStatsHelper.constructStatsDTO(elementData); } @Deprecated public synchronized EventStatisticsMonitor getEventStatisticMonitor(int tenantId, String category, String deployment, String element) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); if (eventStatisticsManager == null) { return new GhostEventStatisticsMonitor(); } return eventStatisticsManager.getEventStatisticMonitor(tenantId, category, deployment, element); } @Deprecated public synchronized void reset() { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); eventStatisticsManager.reset(); } /** * Register a custom statistics consumer to receive updates from this statistics store. * * @param o The EventStatisticsObserver instance to be notified of data updates */ @Deprecated public void registerObserver(EventStatisticsObserver o) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); eventStatisticsManager.registerObserver(o); } /** * Unregister the custom statistics consumer from the mediation statistics store. * * @param o The EventStatisticsObserver instance to be removed */ @Deprecated public void unregisterObserver(EventStatisticsObserver o) { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); eventStatisticsManager.unregisterObserver(o); } @Deprecated public void unregisterObservers() { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); eventStatisticsManager.unregisterObservers(); } @Deprecated public boolean isGlobalStatisticsEnabled() { EventStatisticsManager eventStatisticsManager = EventStatisticsServiceHolder.getInstance().getEventStatisticsManager(); return eventStatisticsManager != null; } }
apache-2.0
waysact/activemq-artemis
artemis-server/src/main/java/org/apache/activemq/artemis/core/paging/cursor/PageCache.java
1694
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.paging.cursor; import org.apache.activemq.artemis.core.paging.PagedMessage; import org.apache.activemq.artemis.utils.SoftValueHashMap; public interface PageCache extends SoftValueHashMap.ValueCache { long getPageId(); int getNumberOfMessages(); void setMessages(PagedMessage[] messages); PagedMessage[] getMessages(); /** * @return whether this cache is still being updated */ boolean isLive(); /** * @param messageNumber The order of the message on the page * @return */ PagedMessage getMessage(int messageNumber); /** * When the cache is being created, * We need to first read the files before other threads can get messages from this. */ void lock(); /** * You have to call this method within the same thread you called lock */ void unlock(); void close(); }
apache-2.0
Darsstar/framework
uitest/src/main/java/com/vaadin/tests/components/combobox/ComboBoxVaadinIcons.java
811
package com.vaadin.tests.components.combobox; import java.util.Arrays; import com.vaadin.annotations.Widgetset; import com.vaadin.icons.VaadinIcons; import com.vaadin.server.VaadinRequest; import com.vaadin.tests.components.AbstractTestUI; import com.vaadin.ui.ComboBox; @Widgetset("com.vaadin.DefaultWidgetSet") public class ComboBoxVaadinIcons extends AbstractTestUI { public static final VaadinIcons[] icons = { VaadinIcons.AMBULANCE, VaadinIcons.PAPERPLANE, VaadinIcons.AIRPLANE }; @Override protected void setup(VaadinRequest request) { ComboBox<String> comboBox = new ComboBox<>(); comboBox.setItems(Arrays.stream(icons).map(VaadinIcons::name)); comboBox.setItemIconGenerator(VaadinIcons::valueOf); addComponent(comboBox); } }
apache-2.0
ern/elasticsearch
server/src/main/java/org/elasticsearch/index/engine/SoftDeletesPolicy.java
7355
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.engine; import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.Query; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import java.util.Objects; import java.util.function.LongSupplier; import java.util.function.Supplier; /** * A policy that controls how many soft-deleted documents should be retained for peer-recovery and querying history changes purpose. */ final class SoftDeletesPolicy { private final LongSupplier globalCheckpointSupplier; private long localCheckpointOfSafeCommit; // This lock count is used to prevent `minRetainedSeqNo` from advancing. private int retentionLockCount; // The extra number of operations before the global checkpoint are retained private long retentionOperations; // The min seq_no value that is retained - ops after this seq# should exist in the Lucene index. private long minRetainedSeqNo; // provides the retention leases used to calculate the minimum sequence number to retain private final Supplier<RetentionLeases> retentionLeasesSupplier; SoftDeletesPolicy( final LongSupplier globalCheckpointSupplier, final long minRetainedSeqNo, final long retentionOperations, final Supplier<RetentionLeases> retentionLeasesSupplier) { this.globalCheckpointSupplier = globalCheckpointSupplier; this.retentionOperations = retentionOperations; this.minRetainedSeqNo = minRetainedSeqNo; this.retentionLeasesSupplier = Objects.requireNonNull(retentionLeasesSupplier); this.localCheckpointOfSafeCommit = SequenceNumbers.NO_OPS_PERFORMED; this.retentionLockCount = 0; } /** * Updates the number of soft-deleted documents prior to the global checkpoint to be retained * See {@link org.elasticsearch.index.IndexSettings#INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING} */ synchronized void setRetentionOperations(long retentionOperations) { this.retentionOperations = retentionOperations; } /** * Sets the local checkpoint of the current safe commit */ synchronized void setLocalCheckpointOfSafeCommit(long newCheckpoint) { if (newCheckpoint < this.localCheckpointOfSafeCommit) { throw new IllegalArgumentException("Local checkpoint can't go backwards; " + "new checkpoint [" + newCheckpoint + "]," + "current checkpoint [" + localCheckpointOfSafeCommit + "]"); } this.localCheckpointOfSafeCommit = newCheckpoint; } /** * Acquires a lock on soft-deleted documents to prevent them from cleaning up in merge processes. This is necessary to * make sure that all operations that are being retained will be retained until the lock is released. * This is a analogy to the translog's retention lock; see {@link Translog#acquireRetentionLock()} */ synchronized Releasable acquireRetentionLock() { assert retentionLockCount >= 0 : "Invalid number of retention locks [" + retentionLockCount + "]"; retentionLockCount++; return Releasables.releaseOnce(this::releaseRetentionLock); } private synchronized void releaseRetentionLock() { assert retentionLockCount > 0 : "Invalid number of retention locks [" + retentionLockCount + "]"; retentionLockCount--; } /** * Returns the min seqno that is retained in the Lucene index. * Operations whose seq# is least this value should exist in the Lucene index. */ synchronized long getMinRetainedSeqNo() { /* * When an engine is flushed, we need to provide it the latest collection of retention leases even when the soft deletes policy is * locked for peer recovery. */ final RetentionLeases retentionLeases = retentionLeasesSupplier.get(); // do not advance if the retention lock is held if (retentionLockCount == 0) { /* * This policy retains operations for two purposes: peer-recovery and querying changes history. * - Peer-recovery is driven by the local checkpoint of the safe commit. In peer-recovery, the primary transfers a safe commit, * then sends operations after the local checkpoint of that commit. This requires keeping all ops after * localCheckpointOfSafeCommit. * - Changes APIs are driven by a combination of the global checkpoint, retention operations, and retention leases. Here we * prefer using the global checkpoint instead of the maximum sequence number because only operations up to the global * checkpoint are exposed in the changes APIs. */ // calculate the minimum sequence number to retain based on retention leases final long minimumRetainingSequenceNumber = retentionLeases .leases() .stream() .mapToLong(RetentionLease::retainingSequenceNumber) .min() .orElse(Long.MAX_VALUE); /* * The minimum sequence number to retain is the minimum of the minimum based on retention leases, and the number of operations * below the global checkpoint to retain (index.soft_deletes.retention.operations). The additional increments on the global * checkpoint and the local checkpoint of the safe commit are due to the fact that we want to retain all operations above * those checkpoints. */ final long minSeqNoForQueryingChanges = Math.min(1 + globalCheckpointSupplier.getAsLong() - retentionOperations, minimumRetainingSequenceNumber); final long minSeqNoToRetain = Math.min(minSeqNoForQueryingChanges, 1 + localCheckpointOfSafeCommit); /* * We take the maximum as minSeqNoToRetain can go backward as the retention operations value can be changed in settings, or from * the addition of leases with a retaining sequence number lower than previous retaining sequence numbers. */ minRetainedSeqNo = Math.max(minRetainedSeqNo, minSeqNoToRetain); } return minRetainedSeqNo; } /** * Returns a soft-deletes retention query that will be used in {@link org.apache.lucene.index.SoftDeletesRetentionMergePolicy} * Documents including tombstones are soft-deleted and matched this query will be retained and won't cleaned up by merges. */ Query getRetentionQuery() { return LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, getMinRetainedSeqNo(), Long.MAX_VALUE); } }
apache-2.0
ravisund/Kundera
src/kundera-cassandra/cassandra-core/src/test/java/com/impetus/client/twitter/entities/ProfessionalDetailCassandra.java
11577
/** * Copyright 2012 Impetus Infotech. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.impetus.client.twitter.entities; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Time; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Temporal; import javax.persistence.TemporalType; import com.impetus.kundera.index.Index; import com.impetus.kundera.index.IndexCollection; /** * Holds professional details of any user * * @author amresh.singh */ @Embeddable @IndexCollection(columns = { @Index(name = "professionId"), @Index(name = "departmentName"), @Index(name = "isExceptional"), @Index(name = "age"), @Index(name = "grade"), @Index(name = "digitalSignature"), @Index(name = "rating"), @Index(name = "compliance"), @Index(name = "height"), @Index(name = "enrolmentDate"), @Index(name = "enrolmentTime"), @Index(name = "joiningDateAndTime"), @Index(name = "yearsSpent"), @Index(name = "uniqueId"), @Index(name = "monthlySalary"), @Index(name = "birthday"), @Index(name = "birthtime"), @Index(name = "anniversary"), @Index(name = "jobAttempts"), @Index(name = "accumulatedWealth"), @Index(name = "graduationDay") }) public class ProfessionalDetailCassandra { // Primitive Types @Column(name = "PROFESSION_ID") private long professionId; @Column(name = "DEPARTMENT_NAME") private String departmentName; @Column(name = "IS_EXCEPTIONAL") private boolean isExceptional; @Column(name = "AGE") private int age; @Column(name = "GRADE") private char grade; // A,B,C,D,E,F for i to vi @Column(name = "DIGITAL_SIGNATURE") private byte digitalSignature; @Column(name = "RATING") private short rating; // 1-10 @Column(name = "COMPLIANCE") private float compliance; @Column(name = "HEIGHT") private double height; // Date-time types @Column(name = "ENROLMENT_DATE") @Temporal(TemporalType.DATE) private java.util.Date enrolmentDate; @Column(name = "ENROLMENT_TIME") @Temporal(TemporalType.TIME) private java.util.Date enrolmentTime; @Column(name = "JOINING_DATE_TIME") @Temporal(TemporalType.TIMESTAMP) private java.util.Date joiningDateAndTime; // Wrapper types @Column(name = "YEARS_SPENT") private Integer yearsSpent; @Column(name = "UNIQUE_ID") private Long uniqueId; @Column(name = "MONTHLY_SALARY") private Double monthlySalary; @Column(name = "BIRTH_DAY") private java.sql.Date birthday; @Column(name = "BIRTH_TIME") private java.sql.Time birthtime; @Column(name = "ANNIVERSARY") private java.sql.Timestamp anniversary; @Column(name = "JOB_ATTEMPTS") private BigInteger jobAttempts; @Column(name = "ACCUMULATED_WEALTH") private BigDecimal accumulatedWealth; @Column(name = "GRADUATION_DAY") private Calendar graduationDay; public ProfessionalDetailCassandra(long professionId, String departmentName, boolean isExceptional, int age, char grade, byte digitalSignature, short rating, float compliance, double height, Date enrolmentDate, Date enrolmentTime, Date joiningDateAndTime, Integer yearsSpent, Long uniqueId, Double monthlySalary, java.sql.Date birthday, Time birthtime, Timestamp anniversary, BigInteger jobAttempts, BigDecimal accumulatedWealth, Calendar graduationDay) { super(); this.professionId = professionId; this.departmentName = departmentName; this.isExceptional = isExceptional; this.age = age; this.grade = grade; this.digitalSignature = digitalSignature; this.rating = rating; this.compliance = compliance; this.height = height; this.enrolmentDate = enrolmentDate; this.enrolmentTime = enrolmentTime; this.joiningDateAndTime = joiningDateAndTime; this.yearsSpent = yearsSpent; this.uniqueId = uniqueId; this.monthlySalary = monthlySalary; this.birthday = birthday; this.birthtime = birthtime; this.anniversary = anniversary; this.jobAttempts = jobAttempts; this.accumulatedWealth = accumulatedWealth; this.graduationDay = graduationDay; } public ProfessionalDetailCassandra() { } /** * @return the professionId */ public long getProfessionId() { return professionId; } /** * @param professionId * the professionId to set */ public void setProfessionId(long professionId) { this.professionId = professionId; } /** * @return the departmentName */ public String getDepartmentName() { return departmentName; } /** * @param departmentName * the departmentName to set */ public void setDepartmentName(String departmentName) { this.departmentName = departmentName; } /** * @return the isExceptional */ public boolean isExceptional() { return isExceptional; } /** * @param isExceptional * the isExceptional to set */ public void setExceptional(boolean isExceptional) { this.isExceptional = isExceptional; } /** * @return the age */ public int getAge() { return age; } /** * @param age * the age to set */ public void setAge(int age) { this.age = age; } /** * @return the grade */ public char getGrade() { return grade; } /** * @param grade * the grade to set */ public void setGrade(char grade) { this.grade = grade; } /** * @return the digitalSignature */ public byte getDigitalSignature() { return digitalSignature; } /** * @param digitalSignature * the digitalSignature to set */ public void setDigitalSignature(byte digitalSignature) { this.digitalSignature = digitalSignature; } /** * @return the rating */ public short getRating() { return rating; } /** * @param rating * the rating to set */ public void setRating(short rating) { this.rating = rating; } /** * @return the compliance */ public float getCompliance() { return compliance; } /** * @param compliance * the compliance to set */ public void setCompliance(float compliance) { this.compliance = compliance; } /** * @return the height */ public double getHeight() { return height; } /** * @param height * the height to set */ public void setHeight(double height) { this.height = height; } /** * @return the enrolmentDate */ public java.util.Date getEnrolmentDate() { return enrolmentDate; } /** * @param enrolmentDate * the enrolmentDate to set */ public void setEnrolmentDate(java.util.Date enrolmentDate) { this.enrolmentDate = enrolmentDate; } /** * @return the enrolmentTime */ public java.util.Date getEnrolmentTime() { return enrolmentTime; } /** * @param enrolmentTime * the enrolmentTime to set */ public void setEnrolmentTime(java.util.Date enrolmentTime) { this.enrolmentTime = enrolmentTime; } /** * @return the joiningDateAndTime */ public java.util.Date getJoiningDateAndTime() { return joiningDateAndTime; } /** * @param joiningDateAndTime * the joiningDateAndTime to set */ public void setJoiningDateAndTime(java.util.Date joiningDateAndTime) { this.joiningDateAndTime = joiningDateAndTime; } /** * @return the yearsSpent */ public Integer getYearsSpent() { return yearsSpent; } /** * @param yearsSpent * the yearsSpent to set */ public void setYearsSpent(Integer yearsSpent) { this.yearsSpent = yearsSpent; } /** * @return the uniqueId */ public Long getUniqueId() { return uniqueId; } /** * @param uniqueId * the uniqueId to set */ public void setUniqueId(Long uniqueId) { this.uniqueId = uniqueId; } /** * @return the monthlySalary */ public Double getMonthlySalary() { return monthlySalary; } /** * @param monthlySalary * the monthlySalary to set */ public void setMonthlySalary(Double monthlySalary) { this.monthlySalary = monthlySalary; } /** * @return the birthday */ public java.sql.Date getBirthday() { return birthday; } /** * @param birthday * the birthday to set */ public void setBirthday(java.sql.Date birthday) { this.birthday = birthday; } /** * @return the birthtime */ public java.sql.Time getBirthtime() { return birthtime; } /** * @param birthtime * the birthtime to set */ public void setBirthtime(java.sql.Time birthtime) { this.birthtime = birthtime; } /** * @return the anniversary */ public java.sql.Timestamp getAnniversary() { return anniversary; } /** * @param anniversary * the anniversary to set */ public void setAnniversary(java.sql.Timestamp anniversary) { this.anniversary = anniversary; } /** * @return the jobAttempts */ public BigInteger getJobAttempts() { return jobAttempts; } /** * @param jobAttempts * the jobAttempts to set */ public void setJobAttempts(BigInteger jobAttempts) { this.jobAttempts = jobAttempts; } /** * @return the accumulatedWealth */ public BigDecimal getAccumulatedWealth() { return accumulatedWealth; } /** * @param accumulatedWealth * the accumulatedWealth to set */ public void setAccumulatedWealth(BigDecimal accumulatedWealth) { this.accumulatedWealth = accumulatedWealth; } /** * @return the graduationDay */ public Calendar getGraduationDay() { return graduationDay; } /** * @param graduationDay * the graduationDay to set */ public void setGraduationDay(Calendar graduationDay) { this.graduationDay = graduationDay; } }
apache-2.0
leafclick/intellij-community
java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/inliner/LambdaInliner.java
2358
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.dataFlow.inliner; import com.intellij.codeInspection.dataFlow.CFGBuilder; import com.intellij.codeInspection.dataFlow.DfaPsiUtil; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtil; import one.util.streamex.EntryStream; import org.jetbrains.annotations.NotNull; /** * An inliner which is capable to inline a call like ((IntSupplier)(() -> 5)).getAsInt() to the lambda body. * Works even if lambda body is complex, has several returns, etc. */ public class LambdaInliner implements CallInliner { @Override public boolean tryInlineCall(@NotNull CFGBuilder builder, @NotNull PsiMethodCallExpression call) { PsiExpression qualifier = PsiUtil.skipParenthesizedExprDown(call.getMethodExpression().getQualifierExpression()); if (qualifier == null) return false; JavaResolveResult result = call.getMethodExpression().advancedResolve(false); PsiMethod method = (PsiMethod)result.getElement(); if (method == null || method != LambdaUtil.getFunctionalInterfaceMethod(method.getContainingClass())) return false; if (method.isVarArgs()) return false; // TODO: support varargs PsiExpression[] args = call.getArgumentList().getExpressions(); PsiParameter[] parameters = method.getParameterList().getParameters(); if (args.length != parameters.length) return false; PsiSubstitutor substitutor = result.getSubstitutor(); return builder.tryInlineLambda(args.length, qualifier, DfaPsiUtil.getTypeNullability(substitutor.substitute(method.getReturnType())), () -> EntryStream.zip(args, parameters) .forKeyValue((arg, parameter) -> builder.pushExpression(arg).boxUnbox(arg, parameter.getType()))); } }
apache-2.0
paplorinc/intellij-community
platform/util/src/com/intellij/util/ui/GeometryUtil.java
10570
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.ui; import javax.swing.*; import java.awt.*; import java.awt.geom.AffineTransform; import java.awt.geom.Line2D; import java.awt.geom.Point2D; import java.util.TreeMap; public class GeometryUtil implements SwingConstants { private static final int myArrowSize = 9; private static final Shape myArrowPolygon = new Polygon(new int[] {0, myArrowSize, 0, 0}, new int[] {0, myArrowSize /2, myArrowSize, 0}, 4); public static Point getIntersectionPoint(Line2D aSegment, Rectangle aRectangle) { if (segmentOutsideRectangle(aRectangle, aSegment)) { throw new IllegalArgumentException("Segment " + toString(aSegment) + " lies out of rectangle " + aRectangle + " or intersects more than one bound"); } if (segmentInsideRectangle(aRectangle, aSegment)) { return null; } Line2D[] bounds = new Line2D[4]; bounds[0] = getTopOf(aRectangle); bounds[1] = getRightOf(aRectangle); bounds[2] = getBottomOf(aRectangle); bounds[3] = getLeftOf(aRectangle); for (Line2D bound : bounds) { if (bound.intersectsLine(aSegment)) { return getIntersectionPoint(aSegment, bound); } } return null; } public static Line2D.Double getLeftOf(Rectangle aRectangle) { return new Line2D.Double(aRectangle.getX(), aRectangle.getY(), aRectangle.getX(), aRectangle.getY() + aRectangle.getHeight()); } public static Line2D.Double getBottomOf(Rectangle aRectangle) { return new Line2D.Double(aRectangle.getX(), aRectangle.getY() + aRectangle.getHeight(), aRectangle.getX() + aRectangle.getWidth(), aRectangle.getY() + aRectangle.getHeight()); } public static Line2D.Double getRightOf(Rectangle aRectangle) { return new Line2D.Double(aRectangle.getX() + aRectangle.getWidth(), aRectangle.getY(), aRectangle.getX() + aRectangle.getWidth(), aRectangle.getY() + aRectangle.getHeight()); } public static Line2D.Double getTopOf(Rectangle aRectangle) { return new Line2D.Double(aRectangle.getX(), aRectangle.getY(), aRectangle.getX() + aRectangle.getWidth(), aRectangle.getY()); } private static boolean segmentInsideRectangle(Rectangle aRectangle, Line2D aSegment) { return isWithin(aRectangle, aSegment.getP1()) && isWithin(aRectangle, aSegment.getP2()); } private static boolean segmentOutsideRectangle(Rectangle aRectangle, Line2D aSegment) { return (!isWithin(aRectangle, aSegment.getP1())) && (!isWithin(aRectangle, aSegment.getP2())); } public static boolean isWithin(Rectangle aRectangle, Point2D aPoint) { return (aPoint.getX() > aRectangle.getX()) && (aPoint.getX() < aRectangle.getX() + aRectangle.getBounds().width) && ((aPoint.getY() > aRectangle.getY()) && (aPoint.getY() < aRectangle.getY() + aRectangle.getBounds().height)); } public static Point getIntersectionPoint(Line2D aFirst, Line2D aSecond) { double firstDeltaX = aFirst.getX2() - aFirst.getX1(); double firstDeltaY = aFirst.getY2() - aFirst.getY1(); double kFirst = firstDeltaY / firstDeltaX; double bFirst = aFirst.getY1() - kFirst * aFirst.getX1(); double secondDeltaX = aSecond.getX2() - aSecond.getX1(); double secondDeltaY = aSecond.getY2() - aSecond.getY1(); double kSecond = secondDeltaY / secondDeltaX; double bSecond = aSecond.getY1() - kSecond * aSecond.getX1(); double xIntersection; double yIntersection; double deltaK = (kFirst - kSecond); if (linesAreAngledAndParallel(kFirst, kSecond)) { return null; } if (Double.isInfinite(deltaK) || (0 == deltaK)) { if (firstDeltaX == secondDeltaX && 0 == firstDeltaX) { return null; } if (firstDeltaY == secondDeltaY && 0 == firstDeltaY) { return null; } if ((0 == firstDeltaX) && (0 == secondDeltaY)) { xIntersection = aFirst.getX1(); yIntersection = aSecond.getY1(); } else if ((0 == secondDeltaX) && (0 == firstDeltaY)) { xIntersection = aSecond.getX1(); yIntersection = aFirst.getY1(); } else { if (0 == firstDeltaX) { xIntersection = aFirst.getX1(); yIntersection = kSecond * xIntersection + bSecond; } else { xIntersection = aSecond.getX1(); yIntersection = kFirst * xIntersection + bFirst; } } } else { xIntersection = (bSecond - bFirst) / deltaK; yIntersection = kFirst * xIntersection + bFirst; } return new Point((int) xIntersection, (int) yIntersection); } private static boolean linesAreAngledAndParallel(double aKFirst, double aKSecond) { return (aKFirst == aKSecond) && (0 != aKFirst); } public static String toString(Line2D aLine) { return aLine.getP1() + ":" + aLine.getP2(); } public static boolean intersects(Rectangle aRectangle, Line2D aLine) { if (aLine == null || aRectangle == null) { return false; } return (!segmentOutsideRectangle(aRectangle, aLine)) && (!segmentInsideRectangle(aRectangle, aLine)); } public static int getPointPositionOnRectangle(Rectangle aRectangle, Point aPoint, int aEpsilon) { final int ERROR_CODE = Integer.MIN_VALUE; if (pointOnBound(getTopOf(aRectangle), aPoint, aEpsilon)) { return TOP; } else if (pointOnBound(getBottomOf(aRectangle), aPoint, aEpsilon)) { return BOTTOM; } else if (pointOnBound(getLeftOf(aRectangle), aPoint, aEpsilon)) { return LEFT; } else if (pointOnBound(getRightOf(aRectangle), aPoint, aEpsilon)) { return RIGHT; } else { return ERROR_CODE; } } private static boolean pointOnBound(Line2D aTop, Point aPoint, int aEpsilon) { return withinRange(aTop.getX1(), aTop.getX2(), aPoint.getX(), aEpsilon) && withinRange(aTop.getY1(), aTop.getY2(), aPoint.getY(), aEpsilon); } private static boolean withinRange(double aLeft, double aRight, double aValue, int aEpsilon) { return ((aLeft - aEpsilon) <= aValue) && ((aRight + aEpsilon) >= aValue); } // public static Point shiftByY(Line2D aLine, Point aPoint, int aPointDeltaY) { // return new Point((int) (aPoint.getX() + getShiftByY(aLine, aPointDeltaY)), (int) (aPoint.getY() + aPointDeltaY)); // } // // public static Point shiftByX(Line2D aLine, Point aPoint, int aPointDeltaX) { // return new Point((int) (aPoint.getX() + aPointDeltaX), (int) (aPoint.getY() + getShiftByX(aLine, aPointDeltaX))); // } public static double getShiftByY(Line2D aLine, double aPointDeltaY) { return aPointDeltaY * ((aLine.getX2() - aLine.getX1()) / (aLine.getY2() - aLine.getY1())); } public static double getShiftByX(Line2D aLine, double aPointDeltaX) { double width = aLine.getX2() - aLine.getX1(); double height = aLine.getY2() - aLine.getY1(); return aPointDeltaX * (height / width); } public static Shape getArrowShape(Line2D line, Point2D intersectionPoint) { final double deltaY = line.getP2().getY() - line.getP1().getY(); final double length = Math.sqrt(Math.pow(deltaY, 2) + Math.pow(line.getP2().getX() - line.getP1().getX(), 2)); double theta = Math.asin(deltaY / length); if (line.getP1().getX() > line.getP2().getX()) { theta = Math.PI - theta; } AffineTransform rotate = AffineTransform.getRotateInstance(theta, myArrowSize, myArrowSize / 2); Shape polygon = rotate.createTransformedShape(myArrowPolygon); AffineTransform move = AffineTransform.getTranslateInstance(intersectionPoint.getX() - myArrowSize, intersectionPoint.getY() - myArrowSize /2); polygon = move.createTransformedShape(polygon); return polygon; } private static class OrientedPoint extends Point { private final int myOrientation; OrientedPoint(double x, double y, int aOrientation) { super((int) x, (int) y); myOrientation = aOrientation; } public int getOrientation() { return myOrientation; } } public static int getClosestToLineRectangleCorner(Rectangle aRectangle, Line2D aSegment) { Point northWest = new OrientedPoint(aRectangle.getX(), aRectangle.getY(), NORTH_WEST); Point northEast = new OrientedPoint(aRectangle.getMaxX(), aRectangle.getY(), NORTH_EAST); Point southEast = new OrientedPoint(aRectangle.getMaxX(), aRectangle.getMaxY(), SOUTH_EAST); Point southWest = new OrientedPoint(aRectangle.getX(), aRectangle.getMaxY(), SOUTH_WEST); TreeMap sorter = new TreeMap(); sorter.put(getDistance(aSegment, northWest), northWest); sorter.put(getDistance(aSegment, southWest), southWest); sorter.put(getDistance(aSegment, southEast), southEast); sorter.put(getDistance(aSegment, northEast), northEast); return ((OrientedPoint) sorter.get(sorter.firstKey())).getOrientation(); } private static Double getDistance(Line2D aSegment, Point aPoint) { double length1 = getLineLength(aSegment.getX1(), aSegment.getY1(), aPoint.getX(), aPoint.getY()); double length2 = getLineLength(aSegment.getX2(), aSegment.getY2(), aPoint.getX(), aPoint.getY()); return new Double(length1 + length2); } public static double getLineLength(double aX1, double aY1, double aX2, double aY2) { double deltaX = aX2 - aX1; double deltaY = aY2 - aY1; return Math.hypot(deltaX, deltaY); } public static double cos(Line2D aLine) { final double length = getLineLength(aLine.getX1(), aLine.getY1(), aLine.getX2(), aLine.getY2()); if (length == 0) { throw new IllegalArgumentException(toString(aLine) + " has a zero length"); } double deltaX = aLine.getX2() - aLine.getX1(); return deltaX / length; } public static double sin(Line2D aLine) { final double length = getLineLength(aLine.getX1(), aLine.getY1(), aLine.getX2(), aLine.getY2()); if (length == 0) { throw new IllegalArgumentException(toString(aLine) + " has a zero length"); } double deltaY = aLine.getY2() - aLine.getY1(); return deltaY / length; } }
apache-2.0
drmacro/basex
basex-core/src/main/java/org/basex/query/func/prof/ProfCurrentNs.java
442
package org.basex.query.func.prof; import org.basex.query.*; import org.basex.query.func.*; import org.basex.query.value.item.*; import org.basex.util.*; /** * Function implementation. * * @author BaseX Team 2005-16, BSD License * @author Christian Gruen */ public final class ProfCurrentNs extends StandardFunc { @Override public Item item(final QueryContext qc, final InputInfo ii) { return Int.get(System.nanoTime()); } }
bsd-3-clause
lostdj/webbit
src/main/java/org/webbitserver/helpers/QueryParameters.java
1858
package org.webbitserver.helpers; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; public class QueryParameters { private static final List<String> EMPTY = Collections.emptyList(); private final Map<String, List<String>> params = new HashMap<String, List<String>>(); public QueryParameters(String query) { if (query != null) { parse(query); } } private void parse(String query) { try { // StringTokenizer is faster than split. http://www.javamex.com/tutorials/regular_expressions/splitting_tokenisation_performance.shtml StringTokenizer st = new StringTokenizer(query, "&"); while (st.hasMoreTokens()) { String[] pair = st.nextToken().split("="); String key = URLDecoder.decode(pair[0], "UTF-8"); String value = pair.length == 1 ? null : URLDecoder.decode(pair[1], "UTF-8"); List<String> values = params.get(key); if (values == null) { values = new ArrayList<String>(); params.put(key, values); } values.add(value); } } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't parse query string: " + query, e); } } public String first(String key) { List<String> all = all(key); return all.isEmpty() ? null : all.get(0); } public List<String> all(String key) { return params.containsKey(key) ? params.get(key) : EMPTY; } public Set<String> keys() { return params.keySet(); } }
bsd-3-clause
magirtopcu/mopub-android-sdk
mopub-sdk/src/main/java/com/mopub/mobileads/FacebookKeywordProvider.java
3315
/* * Copyright (c) 2010-2013, MoPub Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of 'MoPub Inc.' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.mopub.mobileads; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.util.Log; /* * This class enables the MoPub SDK to deliver targeted ads from Facebook via MoPub Marketplace * (MoPub's real-time bidding ad exchange) as part of a test program. This class sends an identifier * to Facebook so Facebook can select the ad MoPub will serve in your app through MoPub Marketplace. * If this class is removed from the SDK, your application will not receive targeted ads from * Facebook. */ public class FacebookKeywordProvider { private static final Uri ID_URL = Uri.parse("content://com.facebook.katana.provider.AttributionIdProvider"); private static final String ID_COLUMN_NAME = "aid"; private static final String ID_PREFIX = "FBATTRID:"; public static String getKeyword(Context context) { Cursor cursor = null; try { String projection[] = {ID_COLUMN_NAME}; cursor = context.getContentResolver().query(ID_URL, projection, null, null, null); if (cursor == null || !cursor.moveToFirst()) { return null; } String attributionId = cursor.getString(cursor.getColumnIndex(ID_COLUMN_NAME)); if (attributionId == null || attributionId.length() == 0) { return null; } return ID_PREFIX + attributionId; } catch (Exception exception) { Log.d("MoPub", "Unable to retrieve FBATTRID: " + exception.toString()); return null; } finally { if (cursor != null) { cursor.close(); } } } }
bsd-3-clause
wrwg/grpc-java
core/src/main/java/io/grpc/internal/AbstractServerStream.java
9371
/* * Copyright 2014, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.internal; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.Preconditions; import io.grpc.Metadata; import io.grpc.Status; import java.io.InputStream; import java.util.logging.Level; import java.util.logging.Logger; /** * Abstract base class for {@link ServerStream} implementations. * * @param <IdT> the type of the stream identifier */ public abstract class AbstractServerStream<IdT> extends AbstractStream<IdT> implements ServerStream { private static final Logger log = Logger.getLogger(AbstractServerStream.class.getName()); /** Whether listener.closed() has been called. */ private boolean listenerClosed; private ServerStreamListener listener; private boolean headersSent = false; /** * Whether the stream was closed gracefully by the application (vs. a transport-level failure). */ private boolean gracefulClose; /** Saved trailers from close() that need to be sent once the framer has sent all messages. */ private Metadata stashedTrailers; protected AbstractServerStream(WritableBufferAllocator bufferAllocator, int maxMessageSize) { super(bufferAllocator, maxMessageSize); } /** * Sets the listener to receive notifications. Must be called in the context of the transport * thread. */ public final void setListener(ServerStreamListener listener) { this.listener = checkNotNull(listener); // Now that the stream has actually been initialized, call the listener's onReady callback if // appropriate. onStreamAllocated(); } @Override protected ServerStreamListener listener() { return listener; } @Override protected void receiveMessage(InputStream is) { inboundPhase(Phase.MESSAGE); listener().messageRead(is); } @Override public final void writeHeaders(Metadata headers) { Preconditions.checkNotNull(headers, "headers"); outboundPhase(Phase.HEADERS); headersSent = true; internalSendHeaders(headers); outboundPhase(Phase.MESSAGE); } @Override public final void writeMessage(InputStream message) { if (outboundPhase() != Phase.MESSAGE) { throw new IllegalStateException("Messages are only permitted after headers and before close"); } super.writeMessage(message); } @Override public final void close(Status status, Metadata trailers) { Preconditions.checkNotNull(status, "status"); Preconditions.checkNotNull(trailers, "trailers"); if (outboundPhase(Phase.STATUS) != Phase.STATUS) { gracefulClose = true; stashedTrailers = trailers; writeStatusToTrailers(status); closeFramer(); } } private void writeStatusToTrailers(Status status) { stashedTrailers.removeAll(Status.CODE_KEY); stashedTrailers.removeAll(Status.MESSAGE_KEY); stashedTrailers.put(Status.CODE_KEY, status); if (status.getDescription() != null) { stashedTrailers.put(Status.MESSAGE_KEY, status.getDescription()); } } /** * Called in the network thread to process the content of an inbound DATA frame from the client. * * @param frame the inbound HTTP/2 DATA frame. If this buffer is not used immediately, it must * be retained. * @param endOfStream {@code true} if no more data will be received on the stream. */ public void inboundDataReceived(ReadableBuffer frame, boolean endOfStream) { if (inboundPhase() == Phase.STATUS) { frame.close(); return; } // TODO(zhangkun83): It sounds sub-optimal to deframe in the network thread. That means // decompression is serialized. deframe(frame, endOfStream); } @Override protected final void deframeFailed(Throwable cause) { log.log(Level.WARNING, "Exception processing message", cause); abortStream(Status.fromThrowable(cause), true); } @Override protected final void internalSendFrame(WritableBuffer frame, boolean endOfStream, boolean flush) { if (frame != null) { sendFrame(frame, false, endOfStream ? false : flush); } if (endOfStream) { sendTrailers(stashedTrailers, headersSent); headersSent = true; stashedTrailers = null; } } /** * Sends response headers to the remote end points. * * @param headers the headers to be sent to client. */ protected abstract void internalSendHeaders(Metadata headers); /** * Sends an outbound frame to the remote end point. * * @param frame a buffer containing the chunk of data to be sent. * @param endOfStream if {@code true} indicates that no more data will be sent on the stream by * this endpoint. * @param flush {@code true} if more data may not be arriving soon */ protected abstract void sendFrame(WritableBuffer frame, boolean endOfStream, boolean flush); /** * Sends trailers to the remote end point. This call implies end of stream. * * @param trailers metadata to be sent to end point * @param headersSent {@code true} if response headers have already been sent. */ protected abstract void sendTrailers(Metadata trailers, boolean headersSent); /** * Indicates the stream is considered completely closed and there is no further opportunity for * error. It calls the listener's {@code closed()} if it was not already done by {@link * #abortStream}. Note that it is expected that either {@code closed()} or {@code abortStream()} * was previously called, since {@code closed()} is required for a normal stream closure and * {@code abortStream()} for abnormal. */ public void complete() { if (!gracefulClose) { closeListener(Status.INTERNAL.withDescription("successful complete() without close()")); throw new IllegalStateException("successful complete() without close()"); } closeListener(Status.OK); } /** * Called when the remote end half-closes the stream. */ @Override protected final void remoteEndClosed() { halfCloseListener(); } /** * Aborts the stream with an error status, cleans up resources and notifies the listener if * necessary. * * <p>Unlike {@link #close(Status, Metadata)}, this method is only called from the * transport. The transport should use this method instead of {@code close(Status)} for internal * errors to prevent exposing unexpected states and exceptions to the application. * * @param status the error status. Must not be {@link Status#OK}. * @param notifyClient {@code true} if the stream is still writable and you want to notify the * client about stream closure and send the status */ public final void abortStream(Status status, boolean notifyClient) { // TODO(louiscryan): Investigate whether we can remove the notification to the client // and rely on a transport layer stream reset instead. Preconditions.checkArgument(!status.isOk(), "status must not be OK"); closeListener(status); if (notifyClient) { // TODO(louiscryan): Remove if (stashedTrailers == null) { stashedTrailers = new Metadata(); } writeStatusToTrailers(status); closeFramer(); } else { dispose(); } } @Override public boolean isClosed() { return super.isClosed() || listenerClosed; } /** * Fires a half-closed event to the listener and frees inbound resources. */ private void halfCloseListener() { if (inboundPhase(Phase.STATUS) != Phase.STATUS && !listenerClosed) { closeDeframer(); listener().halfClosed(); } } /** * Closes the listener if not previously closed and frees resources. */ private void closeListener(Status newStatus) { if (!listenerClosed) { listenerClosed = true; closeDeframer(); listener().closed(newStatus); } } }
bsd-3-clause
spotify/robolectric
resources/src/main/java/org/robolectric/res/android/SortedVector.java
1044
package org.robolectric.res.android; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; // roughly transliterated from system/core/libutils/include/utils/SortedVector.h and // system/core/libutils/VectorImpl.cpp public class SortedVector<T extends Comparable<T>> { // internal storage for the data. Re-sorted on insertion private List<T> mStorage; SortedVector(int itemSize) { mStorage = new ArrayList<>(itemSize); } SortedVector() { mStorage = new ArrayList<>(); } public void add(T info) { mStorage.add(info); Collections.sort(mStorage, new Comparator<T>() { @Override public int compare(T t, T t1) { return t.compareTo(t1); } }); } public int size() { return mStorage.size(); } public T itemAt(int contIdx) { return mStorage.get(contIdx); } public int indexOf(T tmpInfo) { return mStorage.indexOf(tmpInfo); } public void removeAt(int matchIdx) { mStorage.remove(matchIdx); } }
mit
adolphenom/MARS_Assembler
mars/mips/instructions/syscalls/SyscallPrintIntUnsigned.java
2199
package mars.mips.instructions.syscalls; import mars.util.*; import mars.mips.hardware.*; import mars.*; /* Copyright (c) 2003-2010, Pete Sanderson and Kenneth Vollmar Developed by Pete Sanderson (psanderson@otterbein.edu) and Kenneth Vollmar (kenvollmar@missouristate.edu) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. (MIT license, http://www.opensource.org/licenses/mit-license.html) */ /** * Service to display integer stored in $a0 on the console as unsigned decimal. * */ public class SyscallPrintIntUnsigned extends AbstractSyscall { /** * Build an instance of the Print Integer Unsigned syscall. Default service number * is 36 and name is "PrintIntUnsigned". */ public SyscallPrintIntUnsigned() { super(36, "PrintIntUnsigned"); } /** * Performs syscall function to print on the console the integer stored in $a0. * The value is treated as unsigned. */ public void simulate(ProgramStatement statement) throws ProcessingException { SystemIO.printString( Binary.unsignedIntToIntString(RegisterFile.getValue(4))); } }
mit
iseki-masaya/spongycastle
core/src/main/java/org/spongycastle/asn1/pkcs/PKCSObjectIdentifiers.java
21960
package org.spongycastle.asn1.pkcs; import org.spongycastle.asn1.ASN1ObjectIdentifier; /** * pkcs-1 OBJECT IDENTIFIER ::=<p> * { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 1 } * */ public interface PKCSObjectIdentifiers { /** PKCS#1: 1.2.840.113549.1.1 */ static final ASN1ObjectIdentifier pkcs_1 = new ASN1ObjectIdentifier("1.2.840.113549.1.1"); /** PKCS#1: 1.2.840.113549.1.1.1 */ static final ASN1ObjectIdentifier rsaEncryption = pkcs_1.branch("1"); /** PKCS#1: 1.2.840.113549.1.1.2 */ static final ASN1ObjectIdentifier md2WithRSAEncryption = pkcs_1.branch("2"); /** PKCS#1: 1.2.840.113549.1.1.3 */ static final ASN1ObjectIdentifier md4WithRSAEncryption = pkcs_1.branch("3"); /** PKCS#1: 1.2.840.113549.1.1.4 */ static final ASN1ObjectIdentifier md5WithRSAEncryption = pkcs_1.branch("4"); /** PKCS#1: 1.2.840.113549.1.1.5 */ static final ASN1ObjectIdentifier sha1WithRSAEncryption = pkcs_1.branch("5"); /** PKCS#1: 1.2.840.113549.1.1.6 */ static final ASN1ObjectIdentifier srsaOAEPEncryptionSET = pkcs_1.branch("6"); /** PKCS#1: 1.2.840.113549.1.1.7 */ static final ASN1ObjectIdentifier id_RSAES_OAEP = pkcs_1.branch("7"); /** PKCS#1: 1.2.840.113549.1.1.8 */ static final ASN1ObjectIdentifier id_mgf1 = pkcs_1.branch("8"); /** PKCS#1: 1.2.840.113549.1.1.9 */ static final ASN1ObjectIdentifier id_pSpecified = pkcs_1.branch("9"); /** PKCS#1: 1.2.840.113549.1.1.10 */ static final ASN1ObjectIdentifier id_RSASSA_PSS = pkcs_1.branch("10"); /** PKCS#1: 1.2.840.113549.1.1.11 */ static final ASN1ObjectIdentifier sha256WithRSAEncryption = pkcs_1.branch("11"); /** PKCS#1: 1.2.840.113549.1.1.12 */ static final ASN1ObjectIdentifier sha384WithRSAEncryption = pkcs_1.branch("12"); /** PKCS#1: 1.2.840.113549.1.1.13 */ static final ASN1ObjectIdentifier sha512WithRSAEncryption = pkcs_1.branch("13"); /** PKCS#1: 1.2.840.113549.1.1.14 */ static final ASN1ObjectIdentifier sha224WithRSAEncryption = pkcs_1.branch("14"); // // pkcs-3 OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 3 } // /** PKCS#3: 1.2.840.113549.1.3 */ static final ASN1ObjectIdentifier pkcs_3 = new ASN1ObjectIdentifier("1.2.840.113549.1.3"); /** PKCS#3: 1.2.840.113549.1.3.1 */ static final ASN1ObjectIdentifier dhKeyAgreement = pkcs_3.branch("1"); // // pkcs-5 OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 5 } // /** PKCS#5: 1.2.840.113549.1.5 */ static final ASN1ObjectIdentifier pkcs_5 = new ASN1ObjectIdentifier("1.2.840.113549.1.5"); /** PKCS#5: 1.2.840.113549.1.5.1 */ static final ASN1ObjectIdentifier pbeWithMD2AndDES_CBC = pkcs_5.branch("1"); /** PKCS#5: 1.2.840.113549.1.5.4 */ static final ASN1ObjectIdentifier pbeWithMD2AndRC2_CBC = pkcs_5.branch("4"); /** PKCS#5: 1.2.840.113549.1.5.3 */ static final ASN1ObjectIdentifier pbeWithMD5AndDES_CBC = pkcs_5.branch("3"); /** PKCS#5: 1.2.840.113549.1.5.6 */ static final ASN1ObjectIdentifier pbeWithMD5AndRC2_CBC = pkcs_5.branch("6"); /** PKCS#5: 1.2.840.113549.1.5.10 */ static final ASN1ObjectIdentifier pbeWithSHA1AndDES_CBC = pkcs_5.branch("10"); /** PKCS#5: 1.2.840.113549.1.5.11 */ static final ASN1ObjectIdentifier pbeWithSHA1AndRC2_CBC = pkcs_5.branch("11"); /** PKCS#5: 1.2.840.113549.1.5.13 */ static final ASN1ObjectIdentifier id_PBES2 = pkcs_5.branch("13"); /** PKCS#5: 1.2.840.113549.1.5.12 */ static final ASN1ObjectIdentifier id_PBKDF2 = pkcs_5.branch("12"); // // encryptionAlgorithm OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) 3 } // /** 1.2.840.113549.3 */ static final ASN1ObjectIdentifier encryptionAlgorithm = new ASN1ObjectIdentifier("1.2.840.113549.3"); /** 1.2.840.113549.3.7 */ static final ASN1ObjectIdentifier des_EDE3_CBC = encryptionAlgorithm.branch("7"); /** 1.2.840.113549.3.2 */ static final ASN1ObjectIdentifier RC2_CBC = encryptionAlgorithm.branch("2"); /** 1.2.840.113549.3.4 */ static final ASN1ObjectIdentifier rc4 = encryptionAlgorithm.branch("4"); // // object identifiers for digests // /** 1.2.840.113549.2 */ static final ASN1ObjectIdentifier digestAlgorithm = new ASN1ObjectIdentifier("1.2.840.113549.2"); // // md2 OBJECT IDENTIFIER ::= // {iso(1) member-body(2) US(840) rsadsi(113549) digestAlgorithm(2) 2} // /** 1.2.840.113549.2.2 */ static final ASN1ObjectIdentifier md2 = digestAlgorithm.branch("2"); // // md4 OBJECT IDENTIFIER ::= // {iso(1) member-body(2) US(840) rsadsi(113549) digestAlgorithm(2) 4} // /** 1.2.840.113549.2.4 */ static final ASN1ObjectIdentifier md4 = digestAlgorithm.branch("4"); // // md5 OBJECT IDENTIFIER ::= // {iso(1) member-body(2) US(840) rsadsi(113549) digestAlgorithm(2) 5} // /** 1.2.840.113549.2.5 */ static final ASN1ObjectIdentifier md5 = digestAlgorithm.branch("5"); /** 1.2.840.113549.2.7 */ static final ASN1ObjectIdentifier id_hmacWithSHA1 = digestAlgorithm.branch("7"); /** 1.2.840.113549.2.8 */ static final ASN1ObjectIdentifier id_hmacWithSHA224 = digestAlgorithm.branch("8"); /** 1.2.840.113549.2.9 */ static final ASN1ObjectIdentifier id_hmacWithSHA256 = digestAlgorithm.branch("9"); /** 1.2.840.113549.2.10 */ static final ASN1ObjectIdentifier id_hmacWithSHA384 = digestAlgorithm.branch("10"); /** 1.2.840.113549.2.11 */ static final ASN1ObjectIdentifier id_hmacWithSHA512 = digestAlgorithm.branch("11"); // // pkcs-7 OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 7 } // /** pkcs#7: 1.2.840.113549.1.7 */ static final ASN1ObjectIdentifier pkcs_7 = new ASN1ObjectIdentifier("1.2.840.113549.1.7"); /** PKCS#7: 1.2.840.113549.1.7.1 */ static final ASN1ObjectIdentifier data = new ASN1ObjectIdentifier("1.2.840.113549.1.7.1"); /** PKCS#7: 1.2.840.113549.1.7.2 */ static final ASN1ObjectIdentifier signedData = new ASN1ObjectIdentifier("1.2.840.113549.1.7.2"); /** PKCS#7: 1.2.840.113549.1.7.3 */ static final ASN1ObjectIdentifier envelopedData = new ASN1ObjectIdentifier("1.2.840.113549.1.7.3"); /** PKCS#7: 1.2.840.113549.1.7.4 */ static final ASN1ObjectIdentifier signedAndEnvelopedData = new ASN1ObjectIdentifier("1.2.840.113549.1.7.4"); /** PKCS#7: 1.2.840.113549.1.7.5 */ static final ASN1ObjectIdentifier digestedData = new ASN1ObjectIdentifier("1.2.840.113549.1.7.5"); /** PKCS#7: 1.2.840.113549.1.7.76 */ static final ASN1ObjectIdentifier encryptedData = new ASN1ObjectIdentifier("1.2.840.113549.1.7.6"); // // pkcs-9 OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 9 } // /** PKCS#9: 1.2.840.113549.1.9 */ static final ASN1ObjectIdentifier pkcs_9 = new ASN1ObjectIdentifier("1.2.840.113549.1.9"); /** PKCS#9: 1.2.840.113549.1.9.1 */ static final ASN1ObjectIdentifier pkcs_9_at_emailAddress = pkcs_9.branch("1"); /** PKCS#9: 1.2.840.113549.1.9.2 */ static final ASN1ObjectIdentifier pkcs_9_at_unstructuredName = pkcs_9.branch("2"); /** PKCS#9: 1.2.840.113549.1.9.3 */ static final ASN1ObjectIdentifier pkcs_9_at_contentType = pkcs_9.branch("3"); /** PKCS#9: 1.2.840.113549.1.9.4 */ static final ASN1ObjectIdentifier pkcs_9_at_messageDigest = pkcs_9.branch("4"); /** PKCS#9: 1.2.840.113549.1.9.5 */ static final ASN1ObjectIdentifier pkcs_9_at_signingTime = pkcs_9.branch("5"); /** PKCS#9: 1.2.840.113549.1.9.6 */ static final ASN1ObjectIdentifier pkcs_9_at_counterSignature = pkcs_9.branch("6"); /** PKCS#9: 1.2.840.113549.1.9.7 */ static final ASN1ObjectIdentifier pkcs_9_at_challengePassword = pkcs_9.branch("7"); /** PKCS#9: 1.2.840.113549.1.9.8 */ static final ASN1ObjectIdentifier pkcs_9_at_unstructuredAddress = pkcs_9.branch("8"); /** PKCS#9: 1.2.840.113549.1.9.9 */ static final ASN1ObjectIdentifier pkcs_9_at_extendedCertificateAttributes = pkcs_9.branch("9"); /** PKCS#9: 1.2.840.113549.1.9.13 */ static final ASN1ObjectIdentifier pkcs_9_at_signingDescription = pkcs_9.branch("13"); /** PKCS#9: 1.2.840.113549.1.9.14 */ static final ASN1ObjectIdentifier pkcs_9_at_extensionRequest = pkcs_9.branch("14"); /** PKCS#9: 1.2.840.113549.1.9.15 */ static final ASN1ObjectIdentifier pkcs_9_at_smimeCapabilities = pkcs_9.branch("15"); /** PKCS#9: 1.2.840.113549.1.9.16 */ static final ASN1ObjectIdentifier id_smime = pkcs_9.branch("16"); /** PKCS#9: 1.2.840.113549.1.9.20 */ static final ASN1ObjectIdentifier pkcs_9_at_friendlyName = pkcs_9.branch("20"); /** PKCS#9: 1.2.840.113549.1.9.21 */ static final ASN1ObjectIdentifier pkcs_9_at_localKeyId = pkcs_9.branch("21"); /** PKCS#9: 1.2.840.113549.1.9.22.1 * @deprecated use x509Certificate instead */ static final ASN1ObjectIdentifier x509certType = pkcs_9.branch("22.1"); /** PKCS#9: 1.2.840.113549.1.9.22 */ static final ASN1ObjectIdentifier certTypes = pkcs_9.branch("22"); /** PKCS#9: 1.2.840.113549.1.9.22.1 */ static final ASN1ObjectIdentifier x509Certificate = certTypes.branch("1"); /** PKCS#9: 1.2.840.113549.1.9.22.2 */ static final ASN1ObjectIdentifier sdsiCertificate = certTypes.branch("2"); /** PKCS#9: 1.2.840.113549.1.9.23 */ static final ASN1ObjectIdentifier crlTypes = pkcs_9.branch("23"); /** PKCS#9: 1.2.840.113549.1.9.23.1 */ static final ASN1ObjectIdentifier x509Crl = crlTypes.branch("1"); // // SMIME capability sub oids. // /** PKCS#9: 1.2.840.113549.1.9.15.1 -- smime capability */ static final ASN1ObjectIdentifier preferSignedData = pkcs_9.branch("15.1"); /** PKCS#9: 1.2.840.113549.1.9.15.2 -- smime capability */ static final ASN1ObjectIdentifier canNotDecryptAny = pkcs_9.branch("15.2"); /** PKCS#9: 1.2.840.113549.1.9.15.3 -- smime capability */ static final ASN1ObjectIdentifier sMIMECapabilitiesVersions = pkcs_9.branch("15.3"); // // id-ct OBJECT IDENTIFIER ::= {iso(1) member-body(2) usa(840) // rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) ct(1)} // /** PKCS#9: 1.2.840.113549.1.9.16.1 -- smime ct */ static final ASN1ObjectIdentifier id_ct = new ASN1ObjectIdentifier("1.2.840.113549.1.9.16.1"); /** PKCS#9: 1.2.840.113549.1.9.16.1.2 -- smime ct authData */ static final ASN1ObjectIdentifier id_ct_authData = id_ct.branch("2"); /** PKCS#9: 1.2.840.113549.1.9.16.1.4 -- smime ct TSTInfo*/ static final ASN1ObjectIdentifier id_ct_TSTInfo = id_ct.branch("4"); /** PKCS#9: 1.2.840.113549.1.9.16.1.9 -- smime ct compressedData */ static final ASN1ObjectIdentifier id_ct_compressedData = id_ct.branch("9"); /** PKCS#9: 1.2.840.113549.1.9.16.1.23 -- smime ct authEnvelopedData */ static final ASN1ObjectIdentifier id_ct_authEnvelopedData = id_ct.branch("23"); /** PKCS#9: 1.2.840.113549.1.9.16.1.31 -- smime ct timestampedData*/ static final ASN1ObjectIdentifier id_ct_timestampedData = id_ct.branch("31"); /** S/MIME: Algorithm Identifiers ; 1.2.840.113549.1.9.16.3 */ static final ASN1ObjectIdentifier id_alg = id_smime.branch("3"); /** PKCS#9: 1.2.840.113549.1.9.16.3.9 */ static final ASN1ObjectIdentifier id_alg_PWRI_KEK = id_alg.branch("9"); // // id-cti OBJECT IDENTIFIER ::= {iso(1) member-body(2) usa(840) // rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) cti(6)} // /** PKCS#9: 1.2.840.113549.1.9.16.6 -- smime cti */ static final ASN1ObjectIdentifier id_cti = new ASN1ObjectIdentifier("1.2.840.113549.1.9.16.6"); /** PKCS#9: 1.2.840.113549.1.9.16.6.1 -- smime cti proofOfOrigin */ static final ASN1ObjectIdentifier id_cti_ets_proofOfOrigin = id_cti.branch("1"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2 -- smime cti proofOfReceipt*/ static final ASN1ObjectIdentifier id_cti_ets_proofOfReceipt = id_cti.branch("2"); /** PKCS#9: 1.2.840.113549.1.9.16.6.3 -- smime cti proofOfDelivery */ static final ASN1ObjectIdentifier id_cti_ets_proofOfDelivery = id_cti.branch("3"); /** PKCS#9: 1.2.840.113549.1.9.16.6.4 -- smime cti proofOfSender */ static final ASN1ObjectIdentifier id_cti_ets_proofOfSender = id_cti.branch("4"); /** PKCS#9: 1.2.840.113549.1.9.16.6.5 -- smime cti proofOfApproval */ static final ASN1ObjectIdentifier id_cti_ets_proofOfApproval = id_cti.branch("5"); /** PKCS#9: 1.2.840.113549.1.9.16.6.6 -- smime cti proofOfCreation */ static final ASN1ObjectIdentifier id_cti_ets_proofOfCreation = id_cti.branch("6"); // // id-aa OBJECT IDENTIFIER ::= {iso(1) member-body(2) usa(840) // rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) attributes(2)} // /** PKCS#9: 1.2.840.113549.1.9.16.6.2 - smime attributes */ static final ASN1ObjectIdentifier id_aa = new ASN1ObjectIdentifier("1.2.840.113549.1.9.16.2"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.1 -- smime attribute receiptRequest */ static final ASN1ObjectIdentifier id_aa_receiptRequest = id_aa.branch("1"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.4 - See <a href="http://tools.ietf.org/html/rfc2634">RFC 2634</a> */ static final ASN1ObjectIdentifier id_aa_contentHint = id_aa.branch("4"); // See RFC 2634 /** PKCS#9: 1.2.840.113549.1.9.16.6.2.5 */ static final ASN1ObjectIdentifier id_aa_msgSigDigest = id_aa.branch("5"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.10 */ static final ASN1ObjectIdentifier id_aa_contentReference = id_aa.branch("10"); /* * id-aa-encrypKeyPref OBJECT IDENTIFIER ::= {id-aa 11} * */ /** PKCS#9: 1.2.840.113549.1.9.16.6.2.11 */ static final ASN1ObjectIdentifier id_aa_encrypKeyPref = id_aa.branch("11"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.12 */ static final ASN1ObjectIdentifier id_aa_signingCertificate = id_aa.branch("12"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.47 */ static final ASN1ObjectIdentifier id_aa_signingCertificateV2 = id_aa.branch("47"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.7 - See <a href="http://tools.ietf.org/html/rfc2634">RFC 2634</a> */ static final ASN1ObjectIdentifier id_aa_contentIdentifier = id_aa.branch("7"); // See RFC 2634 /* * RFC 3126 */ /** PKCS#9: 1.2.840.113549.1.9.16.6.2.14 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_signatureTimeStampToken = id_aa.branch("14"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.15 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_sigPolicyId = id_aa.branch("15"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.16 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_commitmentType = id_aa.branch("16"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.17 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_signerLocation = id_aa.branch("17"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.18 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_signerAttr = id_aa.branch("18"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.19 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_otherSigCert = id_aa.branch("19"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.20 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_contentTimestamp = id_aa.branch("20"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.21 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_certificateRefs = id_aa.branch("21"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.22 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_revocationRefs = id_aa.branch("22"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.23 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_certValues = id_aa.branch("23"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.24 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_revocationValues = id_aa.branch("24"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.25 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_escTimeStamp = id_aa.branch("25"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.26 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_certCRLTimestamp = id_aa.branch("26"); /** PKCS#9: 1.2.840.113549.1.9.16.6.2.27 - <a href="http://tools.ietf.org/html/rfc3126">RFC 3126</a> */ static final ASN1ObjectIdentifier id_aa_ets_archiveTimestamp = id_aa.branch("27"); /** @deprecated use id_aa_ets_sigPolicyId instead */ static final ASN1ObjectIdentifier id_aa_sigPolicyId = id_aa_ets_sigPolicyId; /** @deprecated use id_aa_ets_commitmentType instead */ static final ASN1ObjectIdentifier id_aa_commitmentType = id_aa_ets_commitmentType; /** @deprecated use id_aa_ets_signerLocation instead */ static final ASN1ObjectIdentifier id_aa_signerLocation = id_aa_ets_signerLocation; /** @deprecated use id_aa_ets_otherSigCert instead */ static final ASN1ObjectIdentifier id_aa_otherSigCert = id_aa_ets_otherSigCert; /** * id-spq OBJECT IDENTIFIER ::= {iso(1) member-body(2) usa(840) * rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) id-spq(5)}; <p> * 1.2.840.113549.1.9.16.5 */ final String id_spq = "1.2.840.113549.1.9.16.5"; /** SMIME SPQ URI: 1.2.840.113549.1.9.16.5.1 */ static final ASN1ObjectIdentifier id_spq_ets_uri = new ASN1ObjectIdentifier(id_spq + ".1"); /** SMIME SPQ UNOTICE: 1.2.840.113549.1.9.16.5.2 */ static final ASN1ObjectIdentifier id_spq_ets_unotice = new ASN1ObjectIdentifier(id_spq + ".2"); // // pkcs-12 OBJECT IDENTIFIER ::= { // iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 12 } // /** PKCS#12: 1.2.840.113549.1.12 */ static final ASN1ObjectIdentifier pkcs_12 = new ASN1ObjectIdentifier("1.2.840.113549.1.12"); /** PKCS#12: 1.2.840.113549.1.12.10.1 */ static final ASN1ObjectIdentifier bagtypes = pkcs_12.branch("10.1"); /** PKCS#12: 1.2.840.113549.1.12.10.1.1 */ static final ASN1ObjectIdentifier keyBag = bagtypes.branch("1"); /** PKCS#12: 1.2.840.113549.1.12.10.1.2 */ static final ASN1ObjectIdentifier pkcs8ShroudedKeyBag = bagtypes.branch("2"); /** PKCS#12: 1.2.840.113549.1.12.10.1.3 */ static final ASN1ObjectIdentifier certBag = bagtypes.branch("3"); /** PKCS#12: 1.2.840.113549.1.12.10.1.4 */ static final ASN1ObjectIdentifier crlBag = bagtypes.branch("4"); /** PKCS#12: 1.2.840.113549.1.12.10.1.5 */ static final ASN1ObjectIdentifier secretBag = bagtypes.branch("5"); /** PKCS#12: 1.2.840.113549.1.12.10.1.6 */ static final ASN1ObjectIdentifier safeContentsBag = bagtypes.branch("6"); /** PKCS#12: 1.2.840.113549.1.12.1 */ static final ASN1ObjectIdentifier pkcs_12PbeIds = pkcs_12.branch("1"); /** PKCS#12: 1.2.840.113549.1.12.1.1 */ static final ASN1ObjectIdentifier pbeWithSHAAnd128BitRC4 = pkcs_12PbeIds.branch("1"); /** PKCS#12: 1.2.840.113549.1.12.1.2 */ static final ASN1ObjectIdentifier pbeWithSHAAnd40BitRC4 = pkcs_12PbeIds.branch("2"); /** PKCS#12: 1.2.840.113549.1.12.1.3 */ static final ASN1ObjectIdentifier pbeWithSHAAnd3_KeyTripleDES_CBC = pkcs_12PbeIds.branch("3"); /** PKCS#12: 1.2.840.113549.1.12.1.4 */ static final ASN1ObjectIdentifier pbeWithSHAAnd2_KeyTripleDES_CBC = pkcs_12PbeIds.branch("4"); /** PKCS#12: 1.2.840.113549.1.12.1.5 */ static final ASN1ObjectIdentifier pbeWithSHAAnd128BitRC2_CBC = pkcs_12PbeIds.branch("5"); /** PKCS#12: 1.2.840.113549.1.12.1.6 */ static final ASN1ObjectIdentifier pbeWithSHAAnd40BitRC2_CBC = pkcs_12PbeIds.branch("6"); /** * PKCS#12: 1.2.840.113549.1.12.1.6 * @deprecated use pbeWithSHAAnd40BitRC2_CBC */ static final ASN1ObjectIdentifier pbewithSHAAnd40BitRC2_CBC = pkcs_12PbeIds.branch("6"); /** PKCS#9: 1.2.840.113549.1.9.16.3.6 */ static final ASN1ObjectIdentifier id_alg_CMS3DESwrap = new ASN1ObjectIdentifier("1.2.840.113549.1.9.16.3.6"); /** PKCS#9: 1.2.840.113549.1.9.16.3.7 */ static final ASN1ObjectIdentifier id_alg_CMSRC2wrap = new ASN1ObjectIdentifier("1.2.840.113549.1.9.16.3.7"); }
mit
v1v/jenkins
test/src/test/java/org/kohsuke/stapler/beanutils/BeanUtilsTagLibrary.java
855
package org.kohsuke.stapler.beanutils; import org.apache.commons.jelly.JellyException; import org.apache.commons.jelly.Tag; import org.apache.commons.jelly.TagLibrary; import org.apache.commons.jelly.impl.TagScript; import org.xml.sax.Attributes; public class BeanUtilsTagLibrary extends TagLibrary { public BeanUtilsTagLibrary() { registerTag("tagWithStringTypedClassProperty", DynaTagWithStringProperty.class); registerTag("tagWithObjectTypedClassProperty", BasicTagWithObjectProperty.class); } @Override public Tag createTag(String name, Attributes attributes) throws JellyException { return super.createTag(name, attributes); } @Override public TagScript createTagScript(String name, Attributes attributes) throws JellyException { return super.createTagScript(name, attributes); } }
mit
chrisrico/XChange
xchange-kraken/src/main/java/org/knowm/xchange/kraken/dto/marketdata/KrakenAssetPair.java
4457
package org.knowm.xchange.kraken.dto.marketdata; import com.fasterxml.jackson.annotation.JsonProperty; import java.math.BigDecimal; import java.util.List; public class KrakenAssetPair { private final String altName; private final String classBase; private final String base; private final String classQuote; private final String quote; private final String volumeLotSize; private final int pairScale; private final int volumeLotScale; private final BigDecimal volumeMultiplier; private final List<String> leverage_buy; private final List<String> leverage_sell; private final List<KrakenFee> fees; private final List<KrakenFee> fees_maker; private final String feeVolumeCurrency; private final BigDecimal marginCall; private final BigDecimal marginStop; /** * Constructor * * @param altName * @param classBase * @param base * @param classQuote * @param quote * @param volumeLotSize * @param pairScale * @param volumeLotScale * @param volumeMultiplier * @param leverage * @param fees * @param feeVolumeCurrency * @param marginCall * @param marginStop */ public KrakenAssetPair( @JsonProperty("altname") String altName, @JsonProperty("aclass_base") String classBase, @JsonProperty("base") String base, @JsonProperty("aclass_quote") String classQuote, @JsonProperty("quote") String quote, @JsonProperty("lot") String volumeLotSize, @JsonProperty("pair_decimals") int pairScale, @JsonProperty("lot_decimals") int volumeLotScale, @JsonProperty("lot_multiplier") BigDecimal volumeMultiplier, @JsonProperty("fees") List<KrakenFee> fees, @JsonProperty("fees_maker") List<KrakenFee> fees_maker, @JsonProperty("fee_volume_currency") String feeVolumeCurrency, @JsonProperty("margin_call") BigDecimal marginCall, @JsonProperty("margin_stop") BigDecimal marginStop, @JsonProperty("leverage_buy") List<String> leverage_buy, @JsonProperty("leverage_sell") List<String> leverage_sell) { this.altName = altName; this.classBase = classBase; this.base = base; this.classQuote = classQuote; this.quote = quote; this.volumeLotSize = volumeLotSize; this.pairScale = pairScale; this.volumeLotScale = volumeLotScale; this.volumeMultiplier = volumeMultiplier; this.fees = fees; this.fees_maker = fees_maker; this.leverage_buy = leverage_buy; this.leverage_sell = leverage_sell; this.feeVolumeCurrency = feeVolumeCurrency; this.marginCall = marginCall; this.marginStop = marginStop; } public String getAltName() { return altName; } public String getClassBase() { return classBase; } public String getBase() { return base; } public String getClassQuote() { return classQuote; } public String getQuote() { return quote; } public String getVolumeLotSize() { return volumeLotSize; } public int getPairScale() { return pairScale; } public int getVolumeLotScale() { return volumeLotScale; } public BigDecimal getVolumeMultiplier() { return volumeMultiplier; } public List<String> getLeverage_buy() { return leverage_buy; } public List<String> getLeverage_sell() { return leverage_sell; } public List<KrakenFee> getFees() { return fees; } public List<KrakenFee> getFees_maker() { return fees_maker; } public String getFeeVolumeCurrency() { return feeVolumeCurrency; } public BigDecimal getMarginCall() { return marginCall; } public BigDecimal getMarginStop() { return marginStop; } @Override public String toString() { return "KrakenAssetPairInfo [altName=" + altName + ", classBase=" + classBase + ", base=" + base + ", classQuote=" + classQuote + ", quote=" + quote + ", volumeLotSize=" + volumeLotSize + ", pairScale=" + pairScale + ", volumeLotScale=" + volumeLotScale + ", volumeMultiplier=" + volumeMultiplier + ", leverage_buy=" + leverage_buy + ", leverage_sell=" + leverage_sell + ", fees=" + fees + ", feeVolumeCurrency=" + feeVolumeCurrency + ", marginCall=" + marginCall + ", marginStop=" + marginStop + "]"; } }
mit
sk89q/CommandHelper
src/main/java/com/laytonsmith/core/functions/Trades.java
14831
package com.laytonsmith.core.functions; import com.laytonsmith.PureUtilities.Version; import com.laytonsmith.abstraction.MCEntity; import com.laytonsmith.abstraction.MCItemStack; import com.laytonsmith.abstraction.MCMerchant; import com.laytonsmith.abstraction.MCMerchantRecipe; import com.laytonsmith.abstraction.MCPlayer; import com.laytonsmith.abstraction.StaticLayer; import com.laytonsmith.abstraction.entities.MCTrader; import com.laytonsmith.abstraction.enums.MCRecipeType; import com.laytonsmith.annotations.api; import com.laytonsmith.core.ArgumentValidation; import com.laytonsmith.core.MSVersion; import com.laytonsmith.core.ObjectGenerator; import com.laytonsmith.core.Static; import com.laytonsmith.core.constructs.CArray; import com.laytonsmith.core.constructs.CBoolean; import com.laytonsmith.core.constructs.CInt; import com.laytonsmith.core.constructs.CNull; import com.laytonsmith.core.constructs.CString; import com.laytonsmith.core.constructs.CVoid; import com.laytonsmith.core.constructs.Target; import com.laytonsmith.core.environments.CommandHelperEnvironment; import com.laytonsmith.core.environments.Environment; import com.laytonsmith.core.exceptions.CRE.CREBadEntityException; import com.laytonsmith.core.exceptions.CRE.CRECastException; import com.laytonsmith.core.exceptions.CRE.CREFormatException; import com.laytonsmith.core.exceptions.CRE.CREIllegalArgumentException; import com.laytonsmith.core.exceptions.CRE.CRELengthException; import com.laytonsmith.core.exceptions.CRE.CRENotFoundException; import com.laytonsmith.core.exceptions.CRE.CREPlayerOfflineException; import com.laytonsmith.core.exceptions.CRE.CRERangeException; import com.laytonsmith.core.exceptions.CRE.CREThrowable; import com.laytonsmith.core.exceptions.ConfigRuntimeException; import com.laytonsmith.core.natives.interfaces.Mixed; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class Trades { public static String docs() { return "Functions related to the management of trades and merchants. A trade is a special kind of recipe" + " accessed through the merchant interface. A merchant is a provider of trades," + " which may or may not be attached to a Villager or Wandering Trader."; } @api(environments = {CommandHelperEnvironment.class}) public static class get_merchant_trades extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[]{CREIllegalArgumentException.class, CREFormatException.class, CREBadEntityException.class}; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { CArray ret = new CArray(t); for(MCMerchantRecipe mr : GetMerchant(args[0], t).getRecipes()) { ret.push(trade(mr, t), t); } return ret; } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "get_merchant_trades"; } @Override public Integer[] numArgs() { return new Integer[]{1}; } @Override public String docs() { return "array {specifier} Returns a list of trades used by the specified merchant." + " Specifier can be the UUID of an entity or a virtual merchant ID."; } } @api(environments = {CommandHelperEnvironment.class}) public static class set_merchant_trades extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[]{CREIllegalArgumentException.class, CREFormatException.class, CRECastException.class, CREBadEntityException.class, CRENotFoundException.class}; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { MCMerchant merchant = GetMerchant(args[0], t); CArray trades = ArgumentValidation.getArray(args[1], t); List<MCMerchantRecipe> recipes = new ArrayList<>(); if(trades.isAssociative()) { throw new CRECastException("Expected non-associative array for list of trade arrays.", t); } for(Mixed trade : trades.asList()) { recipes.add(trade(trade, t)); } merchant.setRecipes(recipes); return CVoid.VOID; } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "set_merchant_trades"; } @Override public Integer[] numArgs() { return new Integer[]{2}; } @Override public String docs() { return "void {specifier, array} Sets the list of trades the specified merchant can use to the provided" + " array of TradeArrays. The specifier can be the UUID of a physical entity or the ID" + " of a user-created virtual merchant. ----" + " TradeArrays are similar to RecipeArray format and contain the following keys:" + " <pre>" + " result: The result item array of the trade.\n" + " ingredients: Items the player must provide. Must be 1 or 2 itemstacks.\n" + " uses: (Optional) The number of times the recipe has been used. Defaults to 0." + " Note: this number is not kept in sync between merchants and the master list.\n" + " maxuses: (Optional) The maximum number of times this trade can be made before it is disabled." + " Defaults to " + Integer.MAX_VALUE + ".\n" + " hasxpreward: (Optional) Whether xp is given to the player for making this trade." + " Defaults to true." + " </pre>" + " Example 1: Turns 9 stone into obsidian." + " <pre>" + "{\n" + " result: {name: OBSIDIAN},\n" + " ingredients: {{name: STONE, qty: 9}}\n" + "}" + "</pre>" + " Example 2: Combines a diamond and dirt to make grass, but only once." + "<pre>" + "{\n" + " result: {name: 'GRASS'},\n" + " ingredients: {{name: 'DIRT'}, {name: 'DIAMOND'}}\n" + " maxuses: 1\n" + "}" + "</pre>"; } } @api(environments = {CommandHelperEnvironment.class}) public static class get_virtual_merchants extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[0]; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { CArray ret = CArray.GetAssociativeArray(t); for(Map.Entry<String, MCMerchant> entry : VIRTUAL_MERCHANTS.entrySet()) { if(entry.getValue() == null) { VIRTUAL_MERCHANTS.remove(entry.getKey()); continue; } ret.set(entry.getKey(), entry.getValue().getTitle(), t); } return ret; } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "get_virtual_merchants"; } @Override public Integer[] numArgs() { return new Integer[]{0}; } @Override public String docs() { return "array {} Returns an array where the keys are currently registered merchant IDs and the values are" + " the corresponding window titles of those merchants."; } } @api(environments = {CommandHelperEnvironment.class}) public static class create_virtual_merchant extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[] {CREIllegalArgumentException.class}; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { if(VIRTUAL_MERCHANTS.containsKey(args[0].val())) { throw new CREIllegalArgumentException("There is already a merchant with id " + args[0].val(), t); } else { VIRTUAL_MERCHANTS.put(args[0].val(), Static.getServer().createMerchant(args[1].val())); return CVoid.VOID; } } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "create_virtual_merchant"; } @Override public Integer[] numArgs() { return new Integer[]{2}; } @Override public String docs() { return "void {ID, title} Creates a merchant that can be traded with by players but is not attached to" + " a physical entity. The ID given should not be a UUID. The title is the text that will display" + " at the top of the window while a player is trading with it. Created merchants will persist" + " across recompiles, but not across server restarts. An exception will be thrown if a merchant" + " already exists using the given ID."; } } @api(environments = {CommandHelperEnvironment.class}) public static class delete_virtual_merchant extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[0]; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { return CBoolean.get(VIRTUAL_MERCHANTS.remove(args[0].val()) != null); } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "delete_virtual_merchant"; } @Override public Integer[] numArgs() { return new Integer[]{1}; } @Override public String docs() { return "boolean {string} Deletes a virtual merchant if one by the given ID exists. Returns true if" + " one was removed, or false if there was no match for the ID."; } } @api(environments = {CommandHelperEnvironment.class}) public static class popen_trading extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[]{CREPlayerOfflineException.class, CRELengthException.class, CREIllegalArgumentException.class, CREBadEntityException.class, CREFormatException.class}; } @Override public Mixed exec(Target t, Environment env, Mixed... args) throws ConfigRuntimeException { MCPlayer player; boolean force = false; if(args.length > 1) { player = Static.GetPlayer(args[1], t); } else { player = Static.getPlayer(env, t); } if(args.length == 3) { force = ArgumentValidation.getBooleanish(args[2], t); } MCMerchant merchant = GetMerchant(args[0], t); if(!force && merchant.isTrading()) { return CBoolean.FALSE; } return CBoolean.get(player.openMerchant(merchant, force) != null); } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "popen_trading"; } @Override public Integer[] numArgs() { return new Integer[]{1, 2, 3}; } @Override public String docs() { return "boolean {specifier, [player], [force]} Opens a trading interface for the current player," + " or the one specified. Only one player can trade with a merchant at a time." + " If the merchant is already being traded with, the function will do nothing." + " When true, force will make the merchant trade with the player, closing the trade with" + " the previous player if there was one. Function returns true if trading was successfully" + " opened, and false if not."; } } @api(environments = {CommandHelperEnvironment.class}) public static class merchant_trader extends Recipes.recipeFunction { @Override public Class<? extends CREThrowable>[] thrown() { return new Class[]{CREBadEntityException.class, CREFormatException.class, CREFormatException.class, CREIllegalArgumentException.class, CRELengthException.class}; } @Override public Mixed exec(Target t, Environment environment, Mixed... args) throws ConfigRuntimeException { MCMerchant merchant = GetMerchant(args[0], t); return merchant.isTrading() ? new CString(merchant.getTrader().getUniqueId().toString(), t) : CNull.NULL; } @Override public Version since() { return MSVersion.V3_3_3; } @Override public String getName() { return "merchant_trader"; } @Override public Integer[] numArgs() { return new Integer[]{1}; } @Override public String docs() { return "UUID {specifier} Returns the UUID of the user trading with the merchant, or null if no one is."; } } private static final HashMap<String, MCMerchant> VIRTUAL_MERCHANTS = new HashMap<>(); /** * Returns the merchant specified. * @param specifier The string representing the merchant, whether entity UUID or virtual id. * @param t * @return abstracted merchant */ private static MCMerchant GetMerchant(Mixed specifier, Target t) { MCMerchant merchant; if(specifier.val().length() == 36 || specifier.val().length() == 32) { try { MCEntity entity = Static.getEntity(specifier, t); if(!(entity instanceof MCTrader)) { throw new CREIllegalArgumentException("The entity specified is not capable of being an merchant.", t); } return ((MCTrader) entity).asMerchant(); } catch (CREFormatException iae) { // not a UUID } } merchant = VIRTUAL_MERCHANTS.get(specifier.val()); if(merchant == null) { throw new CREIllegalArgumentException("A merchant named \"" + specifier.val() + "\" does not exist.", t); } return merchant; } private static MCMerchantRecipe trade(Mixed c, Target t) { CArray recipe = ArgumentValidation.getArray(c, t); MCItemStack result = ObjectGenerator.GetGenerator().item(recipe.get("result", t), t); MCMerchantRecipe mer = (MCMerchantRecipe) StaticLayer.GetNewRecipe(null, MCRecipeType.MERCHANT, result); if(recipe.containsKey("maxuses")) { mer.setMaxUses(ArgumentValidation.getInt32(recipe.get("maxuses", t), t)); } if(recipe.containsKey("uses")) { mer.setUses(ArgumentValidation.getInt32(recipe.get("uses", t), t)); } if(recipe.containsKey("hasxpreward")) { mer.setHasExperienceReward(ArgumentValidation.getBoolean(recipe.get("hasxpreward", t), t)); } CArray ingredients = ArgumentValidation.getArray(recipe.get("ingredients", t), t); if(ingredients.inAssociativeMode()) { throw new CREFormatException("Ingredients array is invalid.", t); } if(ingredients.size() < 1 || ingredients.size() > 2) { throw new CRERangeException("Ingredients for merchants must contain 1 or 2 items, found " + ingredients.size(), t); } List<MCItemStack> mcIngredients = new ArrayList<>(); for(Mixed ingredient : ingredients.asList()) { mcIngredients.add(ObjectGenerator.GetGenerator().item(ingredient, t)); } mer.setIngredients(mcIngredients); return mer; } private static Mixed trade(MCMerchantRecipe r, Target t) { if(r == null) { return CNull.NULL; } CArray ret = CArray.GetAssociativeArray(t); ret.set("result", ObjectGenerator.GetGenerator().item(r.getResult(), t), t); CArray il = new CArray(t); for(MCItemStack i : r.getIngredients()) { il.push(ObjectGenerator.GetGenerator().item(i, t), t); } ret.set("ingredients", il, t); ret.set("maxuses", new CInt(r.getMaxUses(), t), t); ret.set("uses", new CInt(r.getUses(), t), t); ret.set("hasxpreward", CBoolean.get(r.hasExperienceReward()), t); return ret; } }
mit
Johnson-Chou/test
opendaylight/config/config-persister-impl/src/test/java/org/opendaylight/controller/config/persist/impl/CapabilityStrippingConfigSnapshotHolderTest.java
1887
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.config.persist.impl; import static org.junit.Assert.assertEquals; import com.google.common.base.Charsets; import com.google.common.collect.Sets; import com.google.common.io.Resources; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.junit.Test; import org.opendaylight.controller.config.util.xml.XmlUtil; import org.w3c.dom.Element; public class CapabilityStrippingConfigSnapshotHolderTest { @Test public void testCapabilityStripping() throws Exception { Set<String> allCapabilities = readLines("/capabilities-all.txt"); Set<String> expectedCapabilities = readLines("/capabilities-stripped.txt"); String snapshotAsString = readToString("/snapshot.xml"); Element element = XmlUtil.readXmlToElement(snapshotAsString); CapabilityStrippingConfigSnapshotHolder tested = new CapabilityStrippingConfigSnapshotHolder( element, allCapabilities); assertEquals(expectedCapabilities, tested.getCapabilities()); Set<String> obsoleteCapabilities = Sets.difference(allCapabilities, expectedCapabilities); assertEquals(obsoleteCapabilities, tested.getObsoleteCapabilities()); } private Set<String> readLines(String fileName) throws IOException { return new HashSet<>(Resources.readLines(getClass().getResource(fileName), Charsets.UTF_8)); } private String readToString(String fileName) throws IOException { return Resources.toString(getClass().getResource(fileName), Charsets.UTF_8); } }
epl-1.0
kaloyan-raev/che
plugins/plugin-java/che-plugin-java-ext-lang-client/src/main/java/org/eclipse/che/ide/ext/java/client/editor/JavaFormatter.java
3459
/******************************************************************************* * Copyright (c) 2012-2016 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.ext.java.client.editor; import com.google.inject.Inject; import org.eclipse.che.api.promises.client.Operation; import org.eclipse.che.api.promises.client.OperationException; import org.eclipse.che.api.promises.client.Promise; import org.eclipse.che.api.promises.client.PromiseError; import org.eclipse.che.ide.api.editor.EditorAgent; import org.eclipse.che.ide.api.editor.EditorPartPresenter; import org.eclipse.che.ide.api.editor.texteditor.HandlesUndoRedo; import org.eclipse.che.ide.api.editor.texteditor.UndoableEditor; import org.eclipse.che.ide.ext.java.shared.dto.Change; import org.eclipse.che.ide.api.editor.document.Document; import org.eclipse.che.ide.api.editor.formatter.ContentFormatter; import org.eclipse.che.ide.util.loging.Log; import java.util.List; /** * ContentFormatter implementation * * @author Roman Nikitenko */ public class JavaFormatter implements ContentFormatter { private JavaCodeAssistClient service; private EditorAgent editorAgent; @Inject public JavaFormatter(JavaCodeAssistClient service, EditorAgent editorAgent) { this.service = service; this.editorAgent = editorAgent; } @Override public void format(final Document document) { int offset = document.getSelectedLinearRange().getStartOffset(); int length = document.getSelectedLinearRange().getLength(); if (length <= 0 || offset < 0) { offset = 0; length = document.getContentsCharCount(); } Promise<List<Change>> changesPromise = service.format(offset, length, document.getContents()); changesPromise.then(new Operation<List<Change>>() { @Override public void apply(List<Change> changes) throws OperationException { applyChanges(changes, document); } }).catchError(new Operation<PromiseError>() { @Override public void apply(PromiseError arg) throws OperationException { Log.error(getClass(), arg.getCause()); } }); } private void applyChanges(List<Change> changes, Document document) { HandlesUndoRedo undoRedo = null; EditorPartPresenter editorPartPresenter = editorAgent.getActiveEditor(); if (editorPartPresenter instanceof UndoableEditor) { undoRedo = ((UndoableEditor)editorPartPresenter).getUndoRedo(); } try { if (undoRedo != null) { undoRedo.beginCompoundChange(); } for (Change change : changes) { document.replace(change.getOffset(), change.getLength(), change.getText()); } } catch (final Exception e) { Log.error(getClass(), e); } finally { if (undoRedo != null) { undoRedo.endCompoundChange(); } } } }
epl-1.0
rex-xxx/mt6572_x201
external/apache-harmony/sql/src/test/java/org/apache/harmony/sql/tests/java/sql/TestHelper_Driver2.java
1715
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.sql.tests.java.sql; import java.sql.Driver; import java.sql.DriverManager; import java.sql.SQLException; /** * Basic JDBC driver implementation to help with tests * */ public class TestHelper_Driver2 extends TestHelper_Driver1 { static { Driver theDriver = new TestHelper_Driver2(); /* * System.out.println("Driver2 classloader: " + * theDriver.getClass().getClassLoader() ); System.out.println("Driver2 * object is: " + theDriver ); */ try { DriverManager.registerDriver(theDriver); } catch (SQLException e) { System.out.println("Failed to register driver!"); } } // end static block initializer protected TestHelper_Driver2() { super(); baseURL = "jdbc:mikes2"; } // end constructor TestHelper_Driver1() } // end class TestHelper_Driver2
gpl-2.0
SpoonLabs/astor
examples/math_50v2/src/test/java/org/apache/commons/math/ode/sampling/StepNormalizerOutputTest.java
1717
package org.apache.commons.math.ode.sampling; /** Step normalizer output tests, for problems where the first and last points * are not fixed points. */ public class StepNormalizerOutputTest extends StepNormalizerOutputTestBase { @Override protected double getStart() { return 0.3; } @Override protected double getEnd() { return 10.1; } @Override protected double[] getExpInc() { return new double[] { 0.3, 0.8, 1.3, 1.8, 2.3, 2.8, 3.3, 3.8, 4.3, 4.8, 5.3, 5.8, 6.3, 6.8, 7.3, 7.8, 8.3, 8.8, 9.3, 9.8, 10.1 }; } @Override protected double[] getExpIncRev() { return new double[] { 10.1, 9.6, 9.1, 8.6, 8.1, 7.6, 7.1, 6.6, 6.1, 5.6, 5.1, 4.6, 4.1, 3.6, 3.1, 2.6, 2.1, 1.6, 1.1, 0.6, 0.3 }; } @Override protected double[] getExpMul() { return new double[] { 0.3, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0, 10.1 }; } @Override protected double[] getExpMulRev() { return new double[] { 10.1, 10.0, 9.5, 9.0, 8.5, 8.0, 7.5, 7.0, 6.5, 6.0, 5.5, 5.0, 4.5, 4.0, 3.5, 3.0, 2.5, 2.0, 1.5, 1.0, 0.5, 0.3 }; } @Override protected int[][] getO() { return new int[][] {{1, 1}, {1, 1}, {0, 1}, {0, 1}, {1, 0}, {1, 0}, {0, 0}, {0, 0}, {1, 1}, {1, 1}, {0, 1}, {0, 1}, {1, 0}, {1, 0}, {0, 0}, {0, 0}}; } }
gpl-2.0
greghaskins/openjdk-jdk7u-jdk
test/java/lang/invoke/RicochetTest.java
33273
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @summary unit tests for recursive method handles * @run junit/othervm/timeout=3600 -XX:+IgnoreUnrecognizedVMOptions -XX:-VerifyDependencies -DRicochetTest.MAX_ARITY=10 test.java.lang.invoke.RicochetTest */ /* * @ignore The following test creates an unreasonable number of adapters in -Xcomp mode (7049122) * @run junit/othervm -DRicochetTest.MAX_ARITY=255 test.java.lang.invoke.RicochetTest */ package test.java.lang.invoke; import java.lang.invoke.*; import java.util.*; import org.junit.*; import static java.lang.invoke.MethodType.*; import static java.lang.invoke.MethodHandles.*; import static org.junit.Assert.*; /** * * @author jrose */ public class RicochetTest { private static final Class<?> CLASS = RicochetTest.class; private static final int MAX_ARITY = Integer.getInteger(CLASS.getSimpleName()+".MAX_ARITY", 40); public static void main(String... av) throws Throwable { RicochetTest test = new RicochetTest(); if (av.length > 0) test.testOnly = Arrays.asList(av).toString(); if (REPEAT == 1 || test.testOnly != null) { test.testAll(); if (test.testOnlyTests == null) throw new RuntimeException("no matching test: "+test.testOnly); } else if (REPEAT == 0) { org.junit.runner.JUnitCore.runClasses(RicochetTest.class); } else { verbose(1, "REPEAT="+REPEAT); for (int i = 0; i < REPEAT; i++) { test.testRepetition = (i+1); verbose(0, "[#"+test.testRepetition+"]"); test.testAll(); } } } int testRepetition; public void testAll() throws Throwable { testNull(); testBoxInteger(); testFilterReturnValue(); testFilterObject(); testBoxLong(); testFilterInteger(); testIntSpreads(); testByteSpreads(); testLongSpreads(); testIntCollects(); testReturns(); testRecursion(); } @Test public void testNull() throws Throwable { if (testRepetition > (1+REPEAT/100)) return; // trivial test if (!startTest("testNull")) return; assertEquals(opI(37), opI.invokeWithArguments(37)); assertEqualFunction(opI, opI); } @Test public void testBoxInteger() throws Throwable { if (!startTest("testBoxInteger")) return; assertEqualFunction(opI, opI.asType(opL_I.type()).asType(opI.type())); } @Test public void testFilterReturnValue() throws Throwable { if (!startTest("testFilterReturnValue")) return; int[] ints = { 12, 23, 34, 45, 56, 67, 78, 89 }; Object res = list8ints.invokeExact(ints[0], ints[1], ints[2], ints[3], ints[4], ints[5], ints[6], ints[7]); assertEquals(Arrays.toString(ints), res.toString()); MethodHandle idreturn = filterReturnValue(list8ints, identity(Object.class)); res = idreturn.invokeExact(ints[0], ints[1], ints[2], ints[3], ints[4], ints[5], ints[6], ints[7]); assertEquals(Arrays.toString(ints), res.toString()); MethodHandle add0 = addL.bindTo(0); assertEqualFunction(filterReturnValue(opL2, add0), opL2); } @Test public void testFilterObject() throws Throwable { if (!startTest("testFilterObject")) return; MethodHandle add0 = addL.bindTo(0); assertEqualFunction(sequence(opL2, add0), opL2); int bump13 = -13; // value near 20 works as long as test values are near [-80..80] MethodHandle add13 = addL.bindTo(bump13); MethodHandle add13_0 = addL.bindTo(opI2(bump13, 0)); MethodHandle add13_1 = addL.bindTo(opI2(0, bump13)); assertEqualFunction(sequence(opL2, add13_0), filterArguments(opL2, 0, add13)); assertEqualFunction(sequence(opL2, add13_1), filterArguments(opL2, 1, add13)); System.out.println("[testFilterObject done]"); } @Test public void testBoxLong() throws Throwable { if (!startTest("testBoxLong")) return; assertEqualFunction(opJ, opJ.asType(opL_J.type()).asType(opJ.type())); } @Test public void testFilterInteger() throws Throwable { if (!startTest("testFilterInteger")) return; assertEqualFunction(opI, sequence(convI_L, opL_I)); } @Test public void testIntSpreads() throws Throwable { if (!startTest("testIntSpreads")) return; MethodHandle id = identity(int[].class); final int MAX = MAX_ARITY-2; // 253+1 would cause parameter overflow with 'this' added for (int nargs = 0; nargs <= MAX; nargs++) { if (nargs > 30 && nargs < MAX-20) nargs += 10; int[] args = new int[nargs]; for (int j = 0; j < args.length; j++) args[j] = j + 11; //System.out.println("testIntSpreads "+Arrays.toString(args)); int[] args1 = (int[]) id.invokeExact(args); assertArrayEquals(args, args1); MethodHandle coll = id.asCollector(int[].class, nargs); int[] args2 = args; switch (nargs) { case 0: args2 = (int[]) coll.invokeExact(); break; case 1: args2 = (int[]) coll.invokeExact(args[0]); break; case 2: args2 = (int[]) coll.invokeExact(args[0], args[1]); break; case 3: args2 = (int[]) coll.invokeExact(args[0], args[1], args[2]); break; case 4: args2 = (int[]) coll.invokeExact(args[0], args[1], args[2], args[3]); break; case 5: args2 = (int[]) coll.invokeExact(args[0], args[1], args[2], args[3], args[4]); break; } assertArrayEquals(args, args2); MethodHandle mh = coll.asSpreader(int[].class, nargs); int[] args3 = (int[]) mh.invokeExact(args); assertArrayEquals(args, args3); } } @Test public void testByteSpreads() throws Throwable { if (!startTest("testByteSpreads")) return; MethodHandle id = identity(byte[].class); final int MAX = MAX_ARITY-2; // 253+1 would cause parameter overflow with 'this' added for (int nargs = 0; nargs <= MAX; nargs++) { if (nargs > 30 && nargs < MAX-20) nargs += 10; byte[] args = new byte[nargs]; for (int j = 0; j < args.length; j++) args[j] = (byte)(j + 11); //System.out.println("testByteSpreads "+Arrays.toString(args)); byte[] args1 = (byte[]) id.invokeExact(args); assertArrayEquals(args, args1); MethodHandle coll = id.asCollector(byte[].class, nargs); byte[] args2 = args; switch (nargs) { case 0: args2 = (byte[]) coll.invokeExact(); break; case 1: args2 = (byte[]) coll.invokeExact(args[0]); break; case 2: args2 = (byte[]) coll.invokeExact(args[0], args[1]); break; case 3: args2 = (byte[]) coll.invokeExact(args[0], args[1], args[2]); break; case 4: args2 = (byte[]) coll.invokeExact(args[0], args[1], args[2], args[3]); break; case 5: args2 = (byte[]) coll.invokeExact(args[0], args[1], args[2], args[3], args[4]); break; } assertArrayEquals(args, args2); MethodHandle mh = coll.asSpreader(byte[].class, nargs); byte[] args3 = (byte[]) mh.invokeExact(args); assertArrayEquals(args, args3); } } @Test public void testLongSpreads() throws Throwable { if (!startTest("testLongSpreads")) return; MethodHandle id = identity(long[].class); final int MAX = (MAX_ARITY - 2) / 2; // 253/2+1 would cause parameter overflow with 'this' added for (int nargs = 0; nargs <= MAX; nargs++) { if (nargs > 30 && nargs < MAX-20) nargs += 10; long[] args = new long[nargs]; for (int j = 0; j < args.length; j++) args[j] = (long)(j + 11); //System.out.println("testLongSpreads "+Arrays.toString(args)); long[] args1 = (long[]) id.invokeExact(args); assertArrayEquals(args, args1); MethodHandle coll = id.asCollector(long[].class, nargs); long[] args2 = args; switch (nargs) { case 0: args2 = (long[]) coll.invokeExact(); break; case 1: args2 = (long[]) coll.invokeExact(args[0]); break; case 2: args2 = (long[]) coll.invokeExact(args[0], args[1]); break; case 3: args2 = (long[]) coll.invokeExact(args[0], args[1], args[2]); break; case 4: args2 = (long[]) coll.invokeExact(args[0], args[1], args[2], args[3]); break; case 5: args2 = (long[]) coll.invokeExact(args[0], args[1], args[2], args[3], args[4]); break; } assertArrayEquals(args, args2); MethodHandle mh = coll.asSpreader(long[].class, nargs); long[] args3 = (long[]) mh.invokeExact(args); assertArrayEquals(args, args3); } } @Test public void testIntCollects() throws Throwable { if (!startTest("testIntCollects")) return; for (MethodHandle lister : INT_LISTERS) { int outputs = lister.type().parameterCount(); for (int collects = 0; collects <= Math.min(outputs, INT_COLLECTORS.length-1); collects++) { int inputs = outputs - 1 + collects; if (inputs < 0) continue; for (int pos = 0; pos + collects <= inputs; pos++) { MethodHandle collector = INT_COLLECTORS[collects]; int[] args = new int[inputs]; int ap = 0, arg = 31; for (int i = 0; i < pos; i++) args[ap++] = arg++ + 0; for (int i = 0; i < collects; i++) args[ap++] = arg++ + 10; while (ap < args.length) args[ap++] = arg++ + 20; // calculate piecemeal: //System.out.println("testIntCollects "+Arrays.asList(lister, pos, collector)+" on "+Arrays.toString(args)); int[] collargs = Arrays.copyOfRange(args, pos, pos+collects); int coll = (int) collector.asSpreader(int[].class, collargs.length).invokeExact(collargs); int[] listargs = Arrays.copyOfRange(args, 0, outputs); System.arraycopy(args, pos+collects, listargs, pos+1, outputs - (pos+1)); listargs[pos] = coll; //System.out.println(" coll="+coll+" listargs="+Arrays.toString(listargs)); Object expect = lister.asSpreader(int[].class, listargs.length).invokeExact(listargs); //System.out.println(" expect="+expect); // now use the combined MH, and test the output: MethodHandle mh = collectArguments(lister, pos, int[].class, INT_COLLECTORS[collects]); if (mh == null) continue; // no infix collection, yet assert(mh.type().parameterCount() == inputs); Object observe = mh.asSpreader(int[].class, args.length).invokeExact(args); assertEquals(expect, observe); } } } } @Test public void testByteCollects() throws Throwable { if (!startTest("testByteCollects")) return; for (MethodHandle lister : BYTE_LISTERS) { int outputs = lister.type().parameterCount(); for (int collects = 0; collects <= Math.min(outputs, BYTE_COLLECTORS.length-1); collects++) { int inputs = outputs - 1 + collects; if (inputs < 0) continue; for (int pos = 0; pos + collects <= inputs; pos++) { MethodHandle collector = BYTE_COLLECTORS[collects]; byte[] args = new byte[inputs]; int ap = 0, arg = 31; for (int i = 0; i < pos; i++) args[ap++] = (byte)(arg++ + 0); for (int i = 0; i < collects; i++) args[ap++] = (byte)(arg++ + 10); while (ap < args.length) args[ap++] = (byte)(arg++ + 20); // calculate piecemeal: //System.out.println("testIntCollects "+Arrays.asList(lister, pos, collector)+" on "+Arrays.toString(args)); byte[] collargs = Arrays.copyOfRange(args, pos, pos+collects); byte coll = (byte) collector.asSpreader(byte[].class, collargs.length).invokeExact(collargs); byte[] listargs = Arrays.copyOfRange(args, 0, outputs); System.arraycopy(args, pos+collects, listargs, pos+1, outputs - (pos+1)); listargs[pos] = coll; //System.out.println(" coll="+coll+" listargs="+Arrays.toString(listargs)); Object expect = lister.asSpreader(byte[].class, listargs.length).invokeExact(listargs); //System.out.println(" expect="+expect); // now use the combined MH, and test the output: MethodHandle mh = collectArguments(lister, pos, byte[].class, BYTE_COLLECTORS[collects]); if (mh == null) continue; // no infix collection, yet assert(mh.type().parameterCount() == inputs); Object observe = mh.asSpreader(byte[].class, args.length).invokeExact(args); assertEquals(expect, observe); } } } } private static MethodHandle collectArguments(MethodHandle lister, int pos, Class<?> array, MethodHandle collector) { int collects = collector.type().parameterCount(); int outputs = lister.type().parameterCount(); if (pos == outputs - 1) return MethodHandles.filterArguments(lister, pos, collector.asSpreader(array, collects)) .asCollector(array, collects); //return MethodHandles.collectArguments(lister, pos, collector); //no such animal return null; } private static final Class<?>[] RETURN_TYPES = { Object.class, String.class, Integer.class, int.class, long.class, boolean.class, byte.class, char.class, short.class, float.class, double.class, void.class, }; @Test public void testReturns() throws Throwable { if (!startTest("testReturns")) return; // fault injection: int faultCount = 0; // total of 1296 tests faultCount = Integer.getInteger("testReturns.faultCount", 0); for (Class<?> ret : RETURN_TYPES) { // make a complicated identity function and pass something through it System.out.println(ret.getSimpleName()); Class<?> vret = (ret == void.class) ? Void.class : ret; MethodHandle id = // (vret)->ret identity(vret).asType(methodType(ret, vret)); final int LENGTH = 4; int[] index = {0}; Object vals = java.lang.reflect.Array.newInstance(vret, LENGTH); MethodHandle indexGetter = //()->int insertArguments(arrayElementGetter(index.getClass()), 0, index, 0); MethodHandle valSelector = // (int)->vret arrayElementGetter(vals.getClass()).bindTo(vals); MethodHandle valGetter = // ()->vret foldArguments(valSelector, indexGetter); if (ret != void.class) { for (int i = 0; i < LENGTH; i++) { Object val = (i + 50); if (ret == boolean.class) val = (i % 3 == 0); if (ret == String.class) val = "#"+i; if (ret == char.class) val = (char)('a'+i); if (ret == byte.class) val = (byte)~i; if (ret == short.class) val = (short)(1<<i); java.lang.reflect.Array.set(vals, i, val); } } for (int i = 0; i < LENGTH; i++) { Object val = java.lang.reflect.Array.get(vals, i); System.out.println(i+" => "+val); index[0] = i; if (--faultCount == 0) index[0] ^= 1; Object x = valGetter.invokeWithArguments(); assertEquals(val, x); // make a return-filter call: x = id(valGetter()) if (--faultCount == 0) index[0] ^= 1; x = filterReturnValue(valGetter, id).invokeWithArguments(); assertEquals(val, x); // make a filter call: x = id(*,valGetter(),*) for (int len = 1; len <= 4; len++) { for (int pos = 0; pos < len; pos++) { MethodHandle proj = id; // lambda(..., vret x,...){x} for (int j = 0; j < len; j++) { if (j == pos) continue; proj = dropArguments(proj, j, Object.class); } assert(proj.type().parameterCount() == len); // proj: (Object*, pos: vret, Object*)->ret assertEquals(vret, proj.type().parameterType(pos)); MethodHandle vgFilter = dropArguments(valGetter, 0, Object.class); if (--faultCount == 0) index[0] ^= 1; x = filterArguments(proj, pos, vgFilter).invokeWithArguments(new Object[len]); assertEquals(val, x); } } // make a fold call: for (int len = 0; len <= 4; len++) { for (int fold = 0; fold <= len; fold++) { MethodHandle proj = id; // lambda(ret x, ...){x} if (ret == void.class) proj = constant(Object.class, null); int arg0 = (ret == void.class ? 0 : 1); for (int j = 0; j < len; j++) { proj = dropArguments(proj, arg0, Object.class); } assert(proj.type().parameterCount() == arg0 + len); // proj: (Object*, pos: vret, Object*)->ret if (arg0 != 0) assertEquals(vret, proj.type().parameterType(0)); MethodHandle vgFilter = valGetter.asType(methodType(ret)); for (int j = 0; j < fold; j++) { vgFilter = dropArguments(vgFilter, j, Object.class); } x = foldArguments(proj, vgFilter).invokeWithArguments(new Object[len]); if (--faultCount == 0) index[0] ^= 1; assertEquals(val, x); } } } } //System.out.println("faultCount="+faultCount); } @Test public void testRecursion() throws Throwable { if (!startTest("testRecursion")) return; final int LIMIT = 10; for (int i = 0; i < LIMIT; i++) { RFCB rfcb = new RFCB(i); Object x = "x", y = "y"; Object result = rfcb.recursiveFunction(x, y); verbose(1, result); } } /** Recursive Function Control Block */ private static class RFCB { java.util.Random random; final MethodHandle[] fns; int depth; @SuppressWarnings("LeakingThisInConstructor") RFCB(int seed) throws Throwable { this.random = new java.util.Random(seed); this.fns = new MethodHandle[Math.max(29, (1 << MAX_DEPTH-2)/3)]; java.util.Arrays.fill(fns, lookup().bind(this, "recursiveFunction", genericMethodType(2))); for (int i = 5; i < fns.length; i++) { switch (i % 4) { case 0: fns[i] = filterArguments(fns[i - 5], 0, insertArguments(fns[i - 4], 1, ".")); break; case 1: fns[i] = filterArguments(fns[i - 5], 1, insertArguments(fns[i - 3], 1, ".")); break; case 2: fns[i] = filterReturnValue(fns[i - 5], insertArguments(fns[i - 2], 1, ".")); break; } } } Object recursiveFunction(Object x, Object y) throws Throwable { depth++; try { final int ACTION_COUNT = 11; switch (random.nextInt(ACTION_COUNT)) { case 1: Throwable ex = new RuntimeException(); ex.fillInStackTrace(); if (VERBOSITY >= 2) ex.printStackTrace(System.out); x = "ST; " + x; break; case 2: System.gc(); x = "GC; " + x; break; } boolean isLeaf = (depth >= MAX_DEPTH); if (isLeaf) { return Arrays.asList(x, y).toString(); } return fns[random.nextInt(fns.length)].invokeExact(x, y); } finally { depth--; } } } private static MethodHandle sequence(MethodHandle mh1, MethodHandle... mhs) { MethodHandle res = mh1; for (MethodHandle mh2 : mhs) res = filterReturnValue(res, mh2); return res; } private static void assertEqualFunction(MethodHandle x, MethodHandle y) throws Throwable { assertEquals(x.type(), y.type()); //?? MethodType t = x.type(); if (t.parameterCount() == 0) { assertEqualFunctionAt(null, x, y); return; } Class<?> ptype = t.parameterType(0); if (ptype == long.class || ptype == Long.class) { for (long i = -10; i <= 10; i++) { assertEqualFunctionAt(i, x, y); } } else { for (int i = -10; i <= 10; i++) { assertEqualFunctionAt(i, x, y); } } } private static void assertEqualFunctionAt(Object v, MethodHandle x, MethodHandle y) throws Throwable { Object[] args = new Object[x.type().parameterCount()]; Arrays.fill(args, v); Object xval = invokeWithCatch(x, args); Object yval = invokeWithCatch(y, args); String msg = "ok"; if (!Objects.equals(xval, yval)) { msg = ("applying "+x+" & "+y+" to "+v); } assertEquals(msg, xval, yval); } private static Object invokeWithCatch(MethodHandle mh, Object... args) throws Throwable { try { return mh.invokeWithArguments(args); } catch (Throwable ex) { System.out.println("threw: "+mh+Arrays.asList(args)); ex.printStackTrace(System.out); return ex; } } private static final Lookup LOOKUP = lookup(); private static MethodHandle findStatic(String name, Class<?> rtype, Class<?>... ptypes) { try { return LOOKUP.findStatic(LOOKUP.lookupClass(), name, methodType(rtype, ptypes)); } catch (ReflectiveOperationException ex) { throw new RuntimeException(ex); } } private static MethodHandle findStatic(String name, Class<?> rtype, List<?> ptypes) { return findStatic(name, rtype, ptypes.toArray(new Class<?>[ptypes.size()])); } static int getProperty(String name, int dflt) { String qual = LOOKUP.lookupClass().getName(); String prop = System.getProperty(qual+"."+name); if (prop == null) prop = System.getProperty(name); if (prop == null) return dflt; return Integer.parseInt(prop); } private static int opI(int... xs) { stress(); int base = 100; int z = 0; for (int x : xs) { z = (z * base) + (x % base); } verbose("opI", xs.length, xs, z); return z; } private static int opI2(int x, int y) { return opI(x, y); } // x*100 + y%100 private static int opI3(int x, int y, int z) { return opI(x, y, z); } private static int opI4(int w, int x, int y, int z) { return opI(w, x, y, z); } private static int opI(int x) { return opI2(x, 37); } private static Object opI_L(int x) { return (Object) opI(x); } private static long opJ3(long x, long y, long z) { return (long) opI3((int)x, (int)y, (int)z); } private static long opJ2(long x, long y) { return (long) opI2((int)x, (int)y); } private static long opJ(long x) { return (long) opI((int)x); } private static Object opL2(Object x, Object y) { return (Object) opI2((int)x, (int)y); } private static Object opL(Object x) { return (Object) opI((int)x); } private static int opL2_I(Object x, Object y) { return opI2((int)x, (int)y); } private static int opL_I(Object x) { return opI((int)x); } private static long opL_J(Object x) { return (long) opI((int)x); } private static final MethodHandle opI, opI2, opI3, opI4, opI_L, opJ, opJ2, opJ3, opL2, opL, opL2_I, opL_I, opL_J; static { opI4 = findStatic("opI4", int.class, int.class, int.class, int.class, int.class); opI3 = findStatic("opI3", int.class, int.class, int.class, int.class); opI2 = findStatic("opI2", int.class, int.class, int.class); opI = findStatic("opI", int.class, int.class); opI_L = findStatic("opI_L", Object.class, int.class); opJ = findStatic("opJ", long.class, long.class); opJ2 = findStatic("opJ2", long.class, long.class, long.class); opJ3 = findStatic("opJ3", long.class, long.class, long.class, long.class); opL2 = findStatic("opL2", Object.class, Object.class, Object.class); opL = findStatic("opL", Object.class, Object.class); opL2_I = findStatic("opL2_I", int.class, Object.class, Object.class); opL_I = findStatic("opL_I", int.class, Object.class); opL_J = findStatic("opL_J", long.class, Object.class); } private static final MethodHandle[] INT_COLLECTORS = { constant(int.class, 42), opI, opI2, opI3, opI4 }; private static final MethodHandle[] BYTE_COLLECTORS = { constant(byte.class, (byte)42), i2b(opI), i2b(opI2), i2b(opI3), i2b(opI4) }; private static final MethodHandle[] LONG_COLLECTORS = { constant(long.class, 42), opJ, opJ2, opJ3 }; private static int addI(int x, int y) { stress(); return x+y; } private static Object addL(Object x, Object y) { return addI((int)x, (int)y); } private static final MethodHandle addI, addL; static { addI = findStatic("addI", int.class, int.class, int.class); addL = findStatic("addL", Object.class, Object.class, Object.class); } private static Object list8ints(int a, int b, int c, int d, int e, int f, int g, int h) { return Arrays.asList(a, b, c, d, e, f, g, h); } private static Object list8longs(long a, long b, long c, long d, long e, long f, long g, long h) { return Arrays.asList(a, b, c, d, e, f, g, h); } private static final MethodHandle list8ints = findStatic("list8ints", Object.class, Collections.nCopies(8, int.class)); private static final MethodHandle list8longs = findStatic("list8longs", Object.class, Collections.nCopies(8, long.class)); private static final MethodHandle[] INT_LISTERS, LONG_LISTERS, BYTE_LISTERS; static { int listerCount = list8ints.type().parameterCount() + 1; INT_LISTERS = new MethodHandle[listerCount]; LONG_LISTERS = new MethodHandle[listerCount]; BYTE_LISTERS = new MethodHandle[listerCount]; MethodHandle lister = list8ints; MethodHandle llister = list8longs; for (int i = listerCount - 1; ; i--) { INT_LISTERS[i] = lister; LONG_LISTERS[i] = llister; BYTE_LISTERS[i] = i2b(lister); if (i == 0) break; lister = insertArguments(lister, i-1, 0); llister = insertArguments(llister, i-1, 0L); } } private static MethodHandle i2b(MethodHandle mh) { return MethodHandles.explicitCastArguments(mh, subst(mh.type(), int.class, byte.class)); } private static MethodType subst(MethodType mt, Class<?> from, Class<?> to) { for (int i = 0; i < mt.parameterCount(); i++) { if (mt.parameterType(i) == from) mt = mt.changeParameterType(i, to); } if (mt.returnType() == from) mt = mt.changeReturnType(to); return mt; } private static Object convI_L(int x) { stress(); return (Object) x; } private static int convL_I(Object x) { stress(); return (int) x; } private static Object convJ_L(long x) { stress(); return (Object) x; } private static long convL_J(Object x) { stress(); return (long) x; } private static int convJ_I(long x) { stress(); return (int) x; } private static long convI_J(int x) { stress(); return (long) x; } private static final MethodHandle convI_L, convL_I, convJ_L, convL_J, convJ_I, convI_J; static { convI_L = findStatic("convI_L", Object.class, int.class); convL_I = findStatic("convL_I", int.class, Object.class); convJ_L = findStatic("convJ_L", Object.class, long.class); convL_J = findStatic("convL_J", long.class, Object.class); convJ_I = findStatic("convJ_I", int.class, long.class); convI_J = findStatic("convI_J", long.class, int.class); } // stress modes: private static final int MAX_DEPTH = getProperty("MAX_DEPTH", 5); private static final int REPEAT = getProperty("REPEAT", 0); private static final int STRESS = getProperty("STRESS", 0); private static /*v*/ int STRESS_COUNT; private static final Object[] SINK = new Object[4]; private static void stress() { if (STRESS <= 0) return; int count = STRESS + (STRESS_COUNT++ & 0x1); // non-constant value for (int i = 0; i < count; i++) { SINK[i % SINK.length] = new Object[STRESS + i % (SINK.length + 1)]; } } // verbosity: private static final int VERBOSITY = getProperty("VERBOSITY", 0) + (REPEAT == 0 ? 0 : -1); private static void verbose(Object a, Object b, Object c, Object d) { if (VERBOSITY <= 0) return; verbose(1, a, b, c, d); } private static void verbose(Object a, Object b, Object c) { if (VERBOSITY <= 0) return; verbose(1, a, b, c); } private static void verbose(int level, Object a, Object... bcd) { if (level > VERBOSITY) return; String m = a.toString(); if (bcd != null && bcd.length > 0) { List<Object> l = new ArrayList<>(bcd.length); for (Object x : bcd) { if (x instanceof Object[]) x = Arrays.asList((Object[])x); if (x instanceof int[]) x = Arrays.toString((int[])x); if (x instanceof long[]) x = Arrays.toString((long[])x); l.add(x); } m = m+Arrays.asList(bcd); } System.out.println(m); } String testOnly; String testOnlyTests; private boolean startTest(String name) { if (testOnly != null && !testOnly.contains(name)) return false; verbose(0, "["+name+"]"); testOnlyTests = (testOnlyTests == null) ? name : testOnlyTests+" "+name; return true; } }
gpl-2.0
Acidburn0zzz/org.numixproject.hermes
hermes/src/main/java/org/numixproject/hermes/slides/FirstSlide.java
524
package org.numixproject.hermes.slides; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import org.numixproject.hermes.R; public class FirstSlide extends Fragment { @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.intro, container, false); return v; } }
gpl-2.0
marcoaandrade/DC-UFSCar-ES2-201601--Grupo-da-Rapeize-v1.2
src/main/java/net/sf/jabref/logic/cleanup/FormatterCleanup.java
1827
/* Copyright (C) 2003-2015 JabRef contributors. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package net.sf.jabref.logic.cleanup; import java.util.ArrayList; import java.util.List; import net.sf.jabref.logic.FieldChange; import net.sf.jabref.logic.formatter.Formatter; import net.sf.jabref.model.entry.BibEntry; /** * Runs a formatter on every field. */ public class FormatterCleanup implements CleanupJob { private final Formatter formatter; public FormatterCleanup(Formatter formatter) { this.formatter = formatter; } @Override public List<FieldChange> cleanup(BibEntry entry) { List<FieldChange> changes = new ArrayList<>(); for (String field : entry.getFieldNames()) { String oldValue = entry.getField(field); // Run formatter String newValue = formatter.format(oldValue); if (!oldValue.equals(newValue)) { entry.setField(field, newValue); FieldChange change = new FieldChange(entry, field, oldValue, newValue); changes.add(change); } } return changes; } }
gpl-2.0
jthaine/checker-framework
eclipse/checker-framework-eclipse-plugin/src/org/checkerframework/eclipse/actions/CheckerHandler.java
3466
package org.checkerframework.eclipse.actions; import org.checkerframework.checker.nullness.qual.Nullable; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.jdt.core.IJavaElement; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ITreeSelection; import org.eclipse.ui.handlers.HandlerUtil; import java.util.ArrayList; import java.util.List; public abstract class CheckerHandler extends AbstractHandler { protected IJavaElement element(final ISelection selection) { throw new UnsupportedOperationException("This is only used by unused code at the moment!"); } /** * Takes the current selection. Determines the target project using the first element in the selection. * Return each element in the selection (top-level or nested) that is part of the target project * @param selection Current user selection * @return A list of JavaElements that are in the same project and in the given selection */ protected /*@Nullable*/ List<IJavaElement> selectionToJavaElements(final ISelection selection) { //ITreeSelection final List<IJavaElement> elements; if (selection instanceof IStructuredSelection) { IStructuredSelection structuredSelection = (IStructuredSelection) selection; elements = toSingleProjectElements(structuredSelection.toArray()); } else { elements = new ArrayList<IJavaElement>(); } return elements; } protected List<IJavaElement> toSingleProjectElements(final Object [] elements) { final List<IJavaElement> javaElements = new ArrayList<IJavaElement>(); IJavaProject project = null; for(final Object element : elements) { if(element instanceof IJavaProject) { //If the project is in the selection return only it if(project == null || element.equals(project)) { javaElements.clear(); javaElements.add((IJavaProject) element); break; } } else if(element instanceof IJavaElement) { final IJavaElement jEl = (IJavaElement) element; if(project == null) { project = jEl.getJavaProject(); javaElements.add(jEl); //Only add those elements that are in project } else if(!projectsEqual(jEl.getJavaProject(), project)) { javaElements.add(jEl); } } } return javaElements; } //TODO: There must be a better way to do this protected boolean projectsEqual(final IJavaProject project1, final IJavaProject project2) { return project1.getPath().equals(project2); } /** * Retrieve the selection from the menu or otherwise when called from * elsewhere * * @param event * @return the current selection */ protected ISelection getSelection(ExecutionEvent event) { ISelection selection = HandlerUtil.getActiveMenuSelection(event); /* use the current selection when not called from popup menu */ if (selection == null) selection = HandlerUtil.getCurrentSelection(event); return selection; } }
gpl-2.0
Taichi-SHINDO/jdk9-jdk
test/java/awt/TextArea/ScrollbarIntersectionTest/ScrollbarIntersectionTest.java
5748
/* * Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test @bug 6429174 @summary Tests that mouse click at the are of intersection of two scrollbars for text area doesn't trigger any scrolling @author artem.ananiev@sun.com: area=awt.text @library ../../../../lib/testlibrary @build jdk.testlibrary.OSInfo @run main ScrollbarIntersectionTest */ import java.awt.*; import java.awt.event.*; import jdk.testlibrary.OSInfo; public class ScrollbarIntersectionTest { private static void init() { Frame f = new Frame("F"); f.setBounds(100, 100, 480, 360); f.setLayout(new BorderLayout()); TextArea ta = new TextArea(null, 8, 24, TextArea.SCROLLBARS_BOTH); // append several lines to show vertical scrollbar for (int i = 0; i < 128; i++) { ta.append("" + i + "\n"); } // and some characters into the last line for horizontal scrollbar for (int i = 0; i < 128; i++) { ta.append("" + i); } ta.append("\n"); f.add(ta); f.setVisible(true); Robot r = null; try { r = new Robot(); r.setAutoDelay(20); } catch (Exception z) { z.printStackTrace(System.err); fail(z.getMessage()); return; } r.waitForIdle(); ta.setCaretPosition(0); r.waitForIdle(); Point p = ta.getLocationOnScreen(); Dimension d = ta.getSize(); int fh = 8; Graphics g = ta.getGraphics(); try { FontMetrics fm = g.getFontMetrics(); fh = fm.getHeight(); } finally { if (g != null) { g.dispose(); } }; r.mouseMove(p.x + d.width - 2, p.y + d.height - 2); r.mousePress(InputEvent.BUTTON1_MASK); r.mouseRelease(InputEvent.BUTTON1_MASK); r.waitForIdle(); // select 1st line in the text area r.mouseMove(p.x + 2, p.y + 2 + fh / 2); r.mousePress(InputEvent.BUTTON1_MASK); for (int i = 0; i < d.width - 4; i += 4) { r.mouseMove(p.x + 2 + i, p.y + 2 + fh / 2); } r.mouseRelease(InputEvent.BUTTON1_MASK); r.waitForIdle(); String sel = ta.getSelectedText(); System.err.println("Selected text: " + sel); if ((sel == null) || !sel.startsWith("0")) { fail("Test FAILED: TextArea is scrolled"); return; } pass(); } private static boolean theTestPassed = false; private static boolean testGeneratedInterrupt = false; private static String failureMessage = ""; private static Thread mainThread = null; private static int sleepTime = 300000; public static void main( String args[] ) throws InterruptedException { if (OSInfo.getOSType() == OSInfo.OSType.MACOSX) { // On OS X, this area is commandeered by the system, // and frame would be wildly resized System.out.println("Not for OS X"); return; } mainThread = Thread.currentThread(); try { init(); } catch( TestPassedException e ) { return; } try { Thread.sleep( sleepTime ); throw new RuntimeException( "Timed out after " + sleepTime/1000 + " seconds" ); } catch (InterruptedException e) { if( ! testGeneratedInterrupt ) throw e; testGeneratedInterrupt = false; if ( theTestPassed == false ) { throw new RuntimeException( failureMessage ); } } } public static synchronized void setTimeoutTo( int seconds ) { sleepTime = seconds * 1000; } public static synchronized void pass() { if ( mainThread == Thread.currentThread() ) { theTestPassed = true; throw new TestPassedException(); } theTestPassed = true; testGeneratedInterrupt = true; mainThread.interrupt(); } public static synchronized void fail() { fail( "it just plain failed! :-)" ); } public static synchronized void fail( String whyFailed ) { if ( mainThread == Thread.currentThread() ) { throw new RuntimeException( whyFailed ); } theTestPassed = false; testGeneratedInterrupt = true; failureMessage = whyFailed; mainThread.interrupt(); } } class TestPassedException extends RuntimeException { }
gpl-2.0
aigamo/primecloud-controller
auto-project/auto-process/src/main/java/jp/primecloud/auto/component/prjserver/PrjserverConstants.java
1214
/* * Copyright 2014 by SCSK Corporation. * * This file is part of PrimeCloud Controller(TM). * * PrimeCloud Controller(TM) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * PrimeCloud Controller(TM) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with PrimeCloud Controller(TM). If not, see <http://www.gnu.org/licenses/>. */ package jp.primecloud.auto.component.prjserver; /** * <p> * TODO: クラスコメントを記述 * </p> * */ public class PrjserverConstants { public static final String COMPONENT_TYPE_NAME = "prjserver"; public static final String AWS_VOLUME_DEVICE = "/dev/sdf"; public static final Integer VMWARE_DISK_SCSI_ID = 15; //VCloudのUnitNoは動的に変更される為、PCC側では設定しない }
gpl-2.0
ggasoftware/gga-selenium-framework
jdi-uitest-webtests/src/test/java/com/epam/jdi/uitests/testing/unittests/tests/complex/table/base/TableTestBase.java
505
package com.epam.jdi.uitests.testing.unittests.tests.complex.table.base; import com.epam.jdi.uitests.testing.unittests.InitTests; import com.epam.web.matcher.verify.Verify; import org.testng.annotations.BeforeMethod; import java.io.IOException; import java.lang.reflect.Method; /** * Created by Natalia_Grebenshchikova on 12/17/2015. */ public class TableTestBase extends InitTests{ @BeforeMethod protected void before(Method method) throws IOException { Verify.getFails(); } }
gpl-3.0
10xEngineer/My-Wallet-Android
src/com/google/common/collect/Sets.java
58742
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Collections2.FilteredCollection; import com.google.common.math.IntMath; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.AbstractSet; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArraySet; import javax.annotation.Nullable; /** * Static utility methods pertaining to {@link Set} instances. Also see this * class's counterparts {@link Lists} and {@link Maps}. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/CollectionUtilitiesExplained#Sets"> * {@code Sets}</a>. * * @author Kevin Bourrillion * @author Jared Levy * @author Chris Povirk * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Sets { private Sets() {} /** * {@link AbstractSet} substitute without the potentially-quadratic * {@code removeAll} implementation. */ abstract static class ImprovedAbstractSet<E> extends AbstractSet<E> { @Override public boolean removeAll(Collection<?> c) { return removeAllImpl(this, c); } @Override public boolean retainAll(Collection<?> c) { return super.retainAll(checkNotNull(c)); // GWT compatibility } } /** * Returns an immutable set instance containing the given enum elements. * Internally, the returned set will be backed by an {@link EnumSet}. * * <p>The iteration order of the returned set follows the enum's iteration * order, not the order in which the elements are provided to the method. * * @param anElement one of the elements the set should contain * @param otherElements the rest of the elements the set should contain * @return an immutable set containing those elements, minus duplicates */ // http://code.google.com/p/google-web-toolkit/issues/detail?id=3028 @GwtCompatible(serializable = true) public static <E extends Enum<E>> ImmutableSet<E> immutableEnumSet( E anElement, E... otherElements) { return new ImmutableEnumSet<E>(EnumSet.of(anElement, otherElements)); } /** * Returns an immutable set instance containing the given enum elements. * Internally, the returned set will be backed by an {@link EnumSet}. * * <p>The iteration order of the returned set follows the enum's iteration * order, not the order in which the elements appear in the given collection. * * @param elements the elements, all of the same {@code enum} type, that the * set should contain * @return an immutable set containing those elements, minus duplicates */ // http://code.google.com/p/google-web-toolkit/issues/detail?id=3028 @GwtCompatible(serializable = true) public static <E extends Enum<E>> ImmutableSet<E> immutableEnumSet( Iterable<E> elements) { Iterator<E> iterator = elements.iterator(); if (!iterator.hasNext()) { return ImmutableSet.of(); } if (elements instanceof EnumSet) { EnumSet<E> enumSetClone = EnumSet.copyOf((EnumSet<E>) elements); return new ImmutableEnumSet<E>(enumSetClone); } E first = iterator.next(); EnumSet<E> set = EnumSet.of(first); while (iterator.hasNext()) { set.add(iterator.next()); } return new ImmutableEnumSet<E>(set); } /** * Returns a new {@code EnumSet} instance containing the given elements. * Unlike {@link EnumSet#copyOf(Collection)}, this method does not produce an * exception on an empty collection, and it may be called on any iterable, not * just a {@code Collection}. */ public static <E extends Enum<E>> EnumSet<E> newEnumSet(Iterable<E> iterable, Class<E> elementType) { /* * TODO(cpovirk): noneOf() and addAll() will both throw * NullPointerExceptions when appropriate. However, NullPointerTester will * fail on this method because it passes in Class.class instead of an enum * type. This means that, when iterable is null but elementType is not, * noneOf() will throw a ClassCastException before addAll() has a chance to * throw a NullPointerException. NullPointerTester considers this a failure. * Ideally the test would be fixed, but it would require a special case for * Class<E> where E extends Enum. Until that happens (if ever), leave * checkNotNull() here. For now, contemplate the irony that checking * elementType, the problem argument, is harmful, while checking iterable, * the innocent bystander, is effective. */ checkNotNull(iterable); EnumSet<E> set = EnumSet.noneOf(elementType); Iterables.addAll(set, iterable); return set; } // HashSet /** * Creates a <i>mutable</i>, empty {@code HashSet} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSet#of()} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use {@link * EnumSet#noneOf} instead. * * @return a new, empty {@code HashSet} */ public static <E> HashSet<E> newHashSet() { return new HashSet<E>(); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use an overload of {@link ImmutableSet#of()} (for varargs) or * {@link ImmutableSet#copyOf(Object[])} (for an array) instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use {@link * EnumSet#of(Enum, Enum[])} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(E... elements) { HashSet<E> set = newHashSetWithExpectedSize(elements.length); Collections.addAll(set, elements); return set; } /** * Creates a {@code HashSet} instance, with a high enough "initial capacity" * that it <i>should</i> hold {@code expectedSize} elements without growth. * This behavior cannot be broadly guaranteed, but it is observed to be true * for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned set. * * @param expectedSize the number of elements you expect to add to the * returned set * @return a new, empty {@code HashSet} with enough capacity to hold {@code * expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative */ public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) { return new HashSet<E>(Maps.capacity(expectedSize)); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, use * {@link #newEnumSet(Iterable, Class)} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(Iterable<? extends E> elements) { return (elements instanceof Collection) ? new HashSet<E>(Collections2.cast(elements)) : newHashSet(elements.iterator()); } /** * Creates a <i>mutable</i> {@code HashSet} instance containing the given * elements in unspecified order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> if {@code E} is an {@link Enum} type, you should create an * {@link EnumSet} instead. * * @param elements the elements that the set should contain * @return a new {@code HashSet} containing those elements (minus duplicates) */ public static <E> HashSet<E> newHashSet(Iterator<? extends E> elements) { HashSet<E> set = newHashSet(); while (elements.hasNext()) { set.add(elements.next()); } return set; } // LinkedHashSet /** * Creates a <i>mutable</i>, empty {@code LinkedHashSet} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSet#of()} instead. * * @return a new, empty {@code LinkedHashSet} */ public static <E> LinkedHashSet<E> newLinkedHashSet() { return new LinkedHashSet<E>(); } /** * Creates a {@code LinkedHashSet} instance, with a high enough "initial * capacity" that it <i>should</i> hold {@code expectedSize} elements without * growth. This behavior cannot be broadly guaranteed, but it is observed to * be true for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned set. * * @param expectedSize the number of elements you expect to add to the * returned set * @return a new, empty {@code LinkedHashSet} with enough capacity to hold * {@code expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative * @since 11.0 */ public static <E> LinkedHashSet<E> newLinkedHashSetWithExpectedSize( int expectedSize) { return new LinkedHashSet<E>(Maps.capacity(expectedSize)); } /** * Creates a <i>mutable</i> {@code LinkedHashSet} instance containing the * given elements in order. * * <p><b>Note:</b> if mutability is not required and the elements are * non-null, use {@link ImmutableSet#copyOf(Iterable)} instead. * * @param elements the elements that the set should contain, in order * @return a new {@code LinkedHashSet} containing those elements (minus * duplicates) */ public static <E> LinkedHashSet<E> newLinkedHashSet( Iterable<? extends E> elements) { if (elements instanceof Collection) { return new LinkedHashSet<E>(Collections2.cast(elements)); } LinkedHashSet<E> set = newLinkedHashSet(); for (E element : elements) { set.add(element); } return set; } // TreeSet /** * Creates a <i>mutable</i>, empty {@code TreeSet} instance sorted by the * natural sort ordering of its elements. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedSet#of()} instead. * * @return a new, empty {@code TreeSet} */ public static <E extends Comparable> TreeSet<E> newTreeSet() { return new TreeSet<E>(); } /** * Creates a <i>mutable</i> {@code TreeSet} instance containing the given * elements sorted by their natural ordering. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedSet#copyOf(Iterable)} instead. * * <p><b>Note:</b> If {@code elements} is a {@code SortedSet} with an explicit * comparator, this method has different behavior than * {@link TreeSet#TreeSet(SortedSet)}, which returns a {@code TreeSet} with * that comparator. * * @param elements the elements that the set should contain * @return a new {@code TreeSet} containing those elements (minus duplicates) */ public static <E extends Comparable> TreeSet<E> newTreeSet( Iterable<? extends E> elements) { TreeSet<E> set = newTreeSet(); for (E element : elements) { set.add(element); } return set; } /** * Creates a <i>mutable</i>, empty {@code TreeSet} instance with the given * comparator. * * <p><b>Note:</b> if mutability is not required, use {@code * ImmutableSortedSet.orderedBy(comparator).build()} instead. * * @param comparator the comparator to use to sort the set * @return a new, empty {@code TreeSet} * @throws NullPointerException if {@code comparator} is null */ public static <E> TreeSet<E> newTreeSet(Comparator<? super E> comparator) { return new TreeSet<E>(checkNotNull(comparator)); } /** * Creates an empty {@code Set} that uses identity to determine equality. It * compares object references, instead of calling {@code equals}, to * determine whether a provided object matches an element in the set. For * example, {@code contains} returns {@code false} when passed an object that * equals a set member, but isn't the same instance. This behavior is similar * to the way {@code IdentityHashMap} handles key lookups. * * @since 8.0 */ public static <E> Set<E> newIdentityHashSet() { return Sets.newSetFromMap(Maps.<E, Boolean>newIdentityHashMap()); } /** * Creates an empty {@code CopyOnWriteArraySet} instance. * * <p><b>Note:</b> if you need an immutable empty {@link Set}, use * {@link Collections#emptySet} instead. * * @return a new, empty {@code CopyOnWriteArraySet} * @since 12.0 */ @GwtIncompatible("CopyOnWriteArraySet") public static <E> CopyOnWriteArraySet<E> newCopyOnWriteArraySet() { return new CopyOnWriteArraySet<E>(); } /** * Creates a {@code CopyOnWriteArraySet} instance containing the given elements. * * @param elements the elements that the set should contain, in order * @return a new {@code CopyOnWriteArraySet} containing those elements * @since 12.0 */ @GwtIncompatible("CopyOnWriteArraySet") public static <E> CopyOnWriteArraySet<E> newCopyOnWriteArraySet( Iterable<? extends E> elements) { // We copy elements to an ArrayList first, rather than incurring the // quadratic cost of adding them to the COWAS directly. Collection<? extends E> elementsCollection = (elements instanceof Collection) ? Collections2.cast(elements) : Lists.newArrayList(elements); return new CopyOnWriteArraySet<E>(elementsCollection); } /** * Creates an {@code EnumSet} consisting of all enum values that are not in * the specified collection. If the collection is an {@link EnumSet}, this * method has the same behavior as {@link EnumSet#complementOf}. Otherwise, * the specified collection must contain at least one element, in order to * determine the element type. If the collection could be empty, use * {@link #complementOf(Collection, Class)} instead of this method. * * @param collection the collection whose complement should be stored in the * enum set * @return a new, modifiable {@code EnumSet} containing all values of the enum * that aren't present in the given collection * @throws IllegalArgumentException if {@code collection} is not an * {@code EnumSet} instance and contains no elements */ public static <E extends Enum<E>> EnumSet<E> complementOf( Collection<E> collection) { if (collection instanceof EnumSet) { return EnumSet.complementOf((EnumSet<E>) collection); } checkArgument(!collection.isEmpty(), "collection is empty; use the other version of this method"); Class<E> type = collection.iterator().next().getDeclaringClass(); return makeComplementByHand(collection, type); } /** * Creates an {@code EnumSet} consisting of all enum values that are not in * the specified collection. This is equivalent to * {@link EnumSet#complementOf}, but can act on any input collection, as long * as the elements are of enum type. * * @param collection the collection whose complement should be stored in the * {@code EnumSet} * @param type the type of the elements in the set * @return a new, modifiable {@code EnumSet} initially containing all the * values of the enum not present in the given collection */ public static <E extends Enum<E>> EnumSet<E> complementOf( Collection<E> collection, Class<E> type) { checkNotNull(collection); return (collection instanceof EnumSet) ? EnumSet.complementOf((EnumSet<E>) collection) : makeComplementByHand(collection, type); } private static <E extends Enum<E>> EnumSet<E> makeComplementByHand( Collection<E> collection, Class<E> type) { EnumSet<E> result = EnumSet.allOf(type); result.removeAll(collection); return result; } /* * Regarding newSetForMap() and SetFromMap: * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/licenses/publicdomain */ /** * Returns a set backed by the specified map. The resulting set displays * the same ordering, concurrency, and performance characteristics as the * backing map. In essence, this factory method provides a {@link Set} * implementation corresponding to any {@link Map} implementation. There is no * need to use this method on a {@link Map} implementation that already has a * corresponding {@link Set} implementation (such as {@link java.util.HashMap} * or {@link java.util.TreeMap}). * * <p>Each method invocation on the set returned by this method results in * exactly one method invocation on the backing map or its {@code keySet} * view, with one exception. The {@code addAll} method is implemented as a * sequence of {@code put} invocations on the backing map. * * <p>The specified map must be empty at the time this method is invoked, * and should not be accessed directly after this method returns. These * conditions are ensured if the map is created empty, passed directly * to this method, and no reference to the map is retained, as illustrated * in the following code fragment: <pre> {@code * * Set<Object> identityHashSet = Sets.newSetFromMap( * new IdentityHashMap<Object, Boolean>());}</pre> * * This method has the same behavior as the JDK 6 method * {@code Collections.newSetFromMap()}. The returned set is serializable if * the backing map is. * * @param map the backing map * @return the set backed by the map * @throws IllegalArgumentException if {@code map} is not empty */ public static <E> Set<E> newSetFromMap(Map<E, Boolean> map) { return new SetFromMap<E>(map); } private static class SetFromMap<E> extends AbstractSet<E> implements Set<E>, Serializable { private final Map<E, Boolean> m; // The backing map private transient Set<E> s; // Its keySet SetFromMap(Map<E, Boolean> map) { checkArgument(map.isEmpty(), "Map is non-empty"); m = map; s = map.keySet(); } @Override public void clear() { m.clear(); } @Override public int size() { return m.size(); } @Override public boolean isEmpty() { return m.isEmpty(); } @Override public boolean contains(Object o) { return m.containsKey(o); } @Override public boolean remove(Object o) { return m.remove(o) != null; } @Override public boolean add(E e) { return m.put(e, Boolean.TRUE) == null; } @Override public Iterator<E> iterator() { return s.iterator(); } @Override public Object[] toArray() { return s.toArray(); } @Override public <T> T[] toArray(T[] a) { return s.toArray(a); } @Override public String toString() { return s.toString(); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object object) { return this == object || this.s.equals(object); } @Override public boolean containsAll(Collection<?> c) { return s.containsAll(c); } @Override public boolean removeAll(Collection<?> c) { return s.removeAll(c); } @Override public boolean retainAll(Collection<?> c) { return s.retainAll(c); } // addAll is the only inherited implementation @GwtIncompatible("not needed in emulated source") private static final long serialVersionUID = 0; @GwtIncompatible("java.io.ObjectInputStream") private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); s = m.keySet(); } } /** * An unmodifiable view of a set which may be backed by other sets; this view * will change as the backing sets do. Contains methods to copy the data into * a new set which will then remain stable. There is usually no reason to * retain a reference of type {@code SetView}; typically, you either use it * as a plain {@link Set}, or immediately invoke {@link #immutableCopy} or * {@link #copyInto} and forget the {@code SetView} itself. * * @since 2.0 (imported from Google Collections Library) */ public abstract static class SetView<E> extends AbstractSet<E> { private SetView() {} // no subclasses but our own /** * Returns an immutable copy of the current contents of this set view. * Does not support null elements. * * <p><b>Warning:</b> this may have unexpected results if a backing set of * this view uses a nonstandard notion of equivalence, for example if it is * a {@link TreeSet} using a comparator that is inconsistent with {@link * Object#equals(Object)}. */ public ImmutableSet<E> immutableCopy() { return ImmutableSet.copyOf(this); } /** * Copies the current contents of this set view into an existing set. This * method has equivalent behavior to {@code set.addAll(this)}, assuming that * all the sets involved are based on the same notion of equivalence. * * @return a reference to {@code set}, for convenience */ // Note: S should logically extend Set<? super E> but can't due to either // some javac bug or some weirdness in the spec, not sure which. public <S extends Set<E>> S copyInto(S set) { set.addAll(this); return set; } } /** * Returns an unmodifiable <b>view</b> of the union of two sets. The returned * set contains all elements that are contained in either backing set. * Iterating over the returned set iterates first over all the elements of * {@code set1}, then over each element of {@code set2}, in order, that is not * contained in {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based on * different equivalence relations (as {@link HashSet}, {@link TreeSet}, and * the {@link Map#keySet} of an {@code IdentityHashMap} all are). * * <p><b>Note:</b> The returned view performs better when {@code set1} is the * smaller of the two sets. If you have reason to believe one of your sets * will generally be smaller than the other, pass it first. * * <p>Further, note that the current implementation is not suitable for nested * {@code union} views, i.e. the following should be avoided when in a loop: * {@code union = Sets.union(union, anotherSet);}, since iterating over the resulting * set has a cubic complexity to the depth of the nesting. */ public static <E> SetView<E> union( final Set<? extends E> set1, final Set<? extends E> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Set<? extends E> set2minus1 = difference(set2, set1); return new SetView<E>() { @Override public int size() { return set1.size() + set2minus1.size(); } @Override public boolean isEmpty() { return set1.isEmpty() && set2.isEmpty(); } @Override public Iterator<E> iterator() { return Iterators.unmodifiableIterator( Iterators.concat(set1.iterator(), set2minus1.iterator())); } @Override public boolean contains(Object object) { return set1.contains(object) || set2.contains(object); } @Override public <S extends Set<E>> S copyInto(S set) { set.addAll(set1); set.addAll(set2); return set; } @Override public ImmutableSet<E> immutableCopy() { return new ImmutableSet.Builder<E>() .addAll(set1).addAll(set2).build(); } }; } /** * Returns an unmodifiable <b>view</b> of the intersection of two sets. The * returned set contains all elements that are contained by both backing sets. * The iteration order of the returned set matches that of {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * * <p><b>Note:</b> The returned view performs slightly better when {@code * set1} is the smaller of the two sets. If you have reason to believe one of * your sets will generally be smaller than the other, pass it first. * Unfortunately, since this method sets the generic type of the returned set * based on the type of the first set passed, this could in rare cases force * you to make a cast, for example: <pre> {@code * * Set<Object> aFewBadObjects = ... * Set<String> manyBadStrings = ... * * // impossible for a non-String to be in the intersection * SuppressWarnings("unchecked") * Set<String> badStrings = (Set) Sets.intersection( * aFewBadObjects, manyBadStrings);}</pre> * * This is unfortunate, but should come up only very rarely. */ public static <E> SetView<E> intersection( final Set<E> set1, final Set<?> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Predicate<Object> inSet2 = Predicates.in(set2); return new SetView<E>() { @Override public Iterator<E> iterator() { return Iterators.filter(set1.iterator(), inSet2); } @Override public int size() { return Iterators.size(iterator()); } @Override public boolean isEmpty() { return !iterator().hasNext(); } @Override public boolean contains(Object object) { return set1.contains(object) && set2.contains(object); } @Override public boolean containsAll(Collection<?> collection) { return set1.containsAll(collection) && set2.containsAll(collection); } }; } /** * Returns an unmodifiable <b>view</b> of the difference of two sets. The * returned set contains all elements that are contained by {@code set1} and * not contained by {@code set2}. {@code set2} may also contain elements not * present in {@code set1}; these are simply ignored. The iteration order of * the returned set matches that of {@code set1}. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). */ public static <E> SetView<E> difference( final Set<E> set1, final Set<?> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); final Predicate<Object> notInSet2 = Predicates.not(Predicates.in(set2)); return new SetView<E>() { @Override public Iterator<E> iterator() { return Iterators.filter(set1.iterator(), notInSet2); } @Override public int size() { return Iterators.size(iterator()); } @Override public boolean isEmpty() { return set2.containsAll(set1); } @Override public boolean contains(Object element) { return set1.contains(element) && !set2.contains(element); } }; } /** * Returns an unmodifiable <b>view</b> of the symmetric difference of two * sets. The returned set contains all elements that are contained in either * {@code set1} or {@code set2} but not in both. The iteration order of the * returned set is undefined. * * <p>Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * * @since 3.0 */ public static <E> SetView<E> symmetricDifference( Set<? extends E> set1, Set<? extends E> set2) { checkNotNull(set1, "set1"); checkNotNull(set2, "set2"); // TODO(kevinb): Replace this with a more efficient implementation return difference(union(set1, set2), intersection(set1, set2)); } /** * Returns the elements of {@code unfiltered} that satisfy a predicate. The * returned set is a live view of {@code unfiltered}; changes to one affect * the other. * * <p>The resulting set's iterator does not support {@code remove()}, but all * other set methods are supported. When given an element that doesn't satisfy * the predicate, the set's {@code add()} and {@code addAll()} methods throw * an {@link IllegalArgumentException}. When methods such as {@code * removeAll()} and {@code clear()} are called on the filtered set, only * elements that satisfy the filter will be removed from the underlying set. * * <p>The returned set isn't threadsafe or serializable, even if * {@code unfiltered} is. * * <p>Many of the filtered set's methods, such as {@code size()}, iterate * across every element in the underlying set and determine which elements * satisfy the filter. When a live view is <i>not</i> needed, it may be faster * to copy {@code Iterables.filter(unfiltered, predicate)} and use the copy. * * <p><b>Warning:</b> {@code predicate} must be <i>consistent with equals</i>, * as documented at {@link Predicate#apply}. Do not provide a predicate such * as {@code Predicates.instanceOf(ArrayList.class)}, which is inconsistent * with equals. (See {@link Iterables#filter(Iterable, Class)} for related * functionality.) */ // TODO(kevinb): how to omit that last sentence when building GWT javadoc? public static <E> Set<E> filter( Set<E> unfiltered, Predicate<? super E> predicate) { if (unfiltered instanceof SortedSet) { return filter((SortedSet<E>) unfiltered, predicate); } if (unfiltered instanceof FilteredSet) { // Support clear(), removeAll(), and retainAll() when filtering a filtered // collection. FilteredSet<E> filtered = (FilteredSet<E>) unfiltered; Predicate<E> combinedPredicate = Predicates.<E>and(filtered.predicate, predicate); return new FilteredSet<E>( (Set<E>) filtered.unfiltered, combinedPredicate); } return new FilteredSet<E>( checkNotNull(unfiltered), checkNotNull(predicate)); } private static class FilteredSet<E> extends FilteredCollection<E> implements Set<E> { FilteredSet(Set<E> unfiltered, Predicate<? super E> predicate) { super(unfiltered, predicate); } @Override public boolean equals(@Nullable Object object) { return equalsImpl(this, object); } @Override public int hashCode() { return hashCodeImpl(this); } } /** * Returns the elements of a {@code SortedSet}, {@code unfiltered}, that * satisfy a predicate. The returned set is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting set's iterator does not support {@code remove()}, but all * other set methods are supported. When given an element that doesn't satisfy * the predicate, the set's {@code add()} and {@code addAll()} methods throw * an {@link IllegalArgumentException}. When methods such as * {@code removeAll()} and {@code clear()} are called on the filtered set, * only elements that satisfy the filter will be removed from the underlying * set. * * <p>The returned set isn't threadsafe or serializable, even if * {@code unfiltered} is. * * <p>Many of the filtered set's methods, such as {@code size()}, iterate across * every element in the underlying set and determine which elements satisfy * the filter. When a live view is <i>not</i> needed, it may be faster to copy * {@code Iterables.filter(unfiltered, predicate)} and use the copy. * * <p><b>Warning:</b> {@code predicate} must be <i>consistent with equals</i>, * as documented at {@link Predicate#apply}. Do not provide a predicate such as * {@code Predicates.instanceOf(ArrayList.class)}, which is inconsistent with * equals. (See {@link Iterables#filter(Iterable, Class)} for related * functionality.) * * @since 11.0 */ @SuppressWarnings("unchecked") public static <E> SortedSet<E> filter( SortedSet<E> unfiltered, Predicate<? super E> predicate) { if (unfiltered instanceof FilteredSet) { // Support clear(), removeAll(), and retainAll() when filtering a filtered // collection. FilteredSet<E> filtered = (FilteredSet<E>) unfiltered; Predicate<E> combinedPredicate = Predicates.<E>and(filtered.predicate, predicate); return new FilteredSortedSet<E>( (SortedSet<E>) filtered.unfiltered, combinedPredicate); } return new FilteredSortedSet<E>( checkNotNull(unfiltered), checkNotNull(predicate)); } private static class FilteredSortedSet<E> extends FilteredCollection<E> implements SortedSet<E> { FilteredSortedSet(SortedSet<E> unfiltered, Predicate<? super E> predicate) { super(unfiltered, predicate); } @Override public boolean equals(@Nullable Object object) { return equalsImpl(this, object); } @Override public int hashCode() { return hashCodeImpl(this); } @Override public Comparator<? super E> comparator() { return ((SortedSet<E>) unfiltered).comparator(); } @Override public SortedSet<E> subSet(E fromElement, E toElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).subSet(fromElement, toElement), predicate); } @Override public SortedSet<E> headSet(E toElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).headSet(toElement), predicate); } @Override public SortedSet<E> tailSet(E fromElement) { return new FilteredSortedSet<E>(((SortedSet<E>) unfiltered).tailSet(fromElement), predicate); } @Override public E first() { return iterator().next(); } @Override public E last() { SortedSet<E> sortedUnfiltered = (SortedSet<E>) unfiltered; while (true) { E element = sortedUnfiltered.last(); if (predicate.apply(element)) { return element; } sortedUnfiltered = sortedUnfiltered.headSet(element); } } } /** * Returns every possible list that can be formed by choosing one element * from each of the given sets in order; the "n-ary * <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian * product</a>" of the sets. For example: <pre> {@code * * Sets.cartesianProduct(ImmutableList.of( * ImmutableSet.of(1, 2), * ImmutableSet.of("A", "B", "C")))}</pre> * * returns a set containing six lists: * * <ul> * <li>{@code ImmutableList.of(1, "A")} * <li>{@code ImmutableList.of(1, "B")} * <li>{@code ImmutableList.of(1, "C")} * <li>{@code ImmutableList.of(2, "A")} * <li>{@code ImmutableList.of(2, "B")} * <li>{@code ImmutableList.of(2, "C")} * </ul> * * The order in which these lists are returned is not guaranteed, however the * position of an element inside a tuple always corresponds to the position of * the set from which it came in the input list. Note that if any input set is * empty, the Cartesian product will also be empty. If no sets at all are * provided (an empty list), the resulting Cartesian product has one element, * an empty list (counter-intuitive, but mathematically consistent). * * <p><i>Performance notes:</i> while the cartesian product of sets of size * {@code m, n, p} is a set of size {@code m x n x p}, its actual memory * consumption is much smaller. When the cartesian set is constructed, the * input sets are merely copied. Only as the resulting set is iterated are the * individual lists created, and these are not retained after iteration. * * @param sets the sets to choose elements from, in the order that * the elements chosen from those sets should appear in the resulting * lists * @param <B> any common base class shared by all axes (often just {@link * Object}) * @return the Cartesian product, as an immutable set containing immutable * lists * @throws NullPointerException if {@code sets}, any one of the {@code sets}, * or any element of a provided set is null * @since 2.0 */ public static <B> Set<List<B>> cartesianProduct( List<? extends Set<? extends B>> sets) { for (Set<? extends B> set : sets) { if (set.isEmpty()) { return ImmutableSet.of(); } } CartesianSet<B> cartesianSet = new CartesianSet<B>(sets); return cartesianSet; } /** * Returns every possible list that can be formed by choosing one element * from each of the given sets in order; the "n-ary * <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian * product</a>" of the sets. For example: <pre> {@code * * Sets.cartesianProduct( * ImmutableSet.of(1, 2), * ImmutableSet.of("A", "B", "C"))}</pre> * * returns a set containing six lists: * * <ul> * <li>{@code ImmutableList.of(1, "A")} * <li>{@code ImmutableList.of(1, "B")} * <li>{@code ImmutableList.of(1, "C")} * <li>{@code ImmutableList.of(2, "A")} * <li>{@code ImmutableList.of(2, "B")} * <li>{@code ImmutableList.of(2, "C")} * </ul> * * The order in which these lists are returned is not guaranteed, however the * position of an element inside a tuple always corresponds to the position of * the set from which it came in the input list. Note that if any input set is * empty, the Cartesian product will also be empty. If no sets at all are * provided, the resulting Cartesian product has one element, an empty list * (counter-intuitive, but mathematically consistent). * * <p><i>Performance notes:</i> while the cartesian product of sets of size * {@code m, n, p} is a set of size {@code m x n x p}, its actual memory * consumption is much smaller. When the cartesian set is constructed, the * input sets are merely copied. Only as the resulting set is iterated are the * individual lists created, and these are not retained after iteration. * * @param sets the sets to choose elements from, in the order that * the elements chosen from those sets should appear in the resulting * lists * @param <B> any common base class shared by all axes (often just {@link * Object}) * @return the Cartesian product, as an immutable set containing immutable * lists * @throws NullPointerException if {@code sets}, any one of the {@code sets}, * or any element of a provided set is null * @since 2.0 */ public static <B> Set<List<B>> cartesianProduct( Set<? extends B>... sets) { return cartesianProduct(Arrays.asList(sets)); } private static class CartesianSet<B> extends AbstractSet<List<B>> { final ImmutableList<Axis> axes; final int size; CartesianSet(List<? extends Set<? extends B>> sets) { int dividend = 1; ImmutableList.Builder<Axis> builder = ImmutableList.builder(); try { for (Set<? extends B> set : sets) { Axis axis = new Axis(set, dividend); builder.add(axis); dividend = IntMath.checkedMultiply(dividend, axis.size()); } } catch (ArithmeticException overflow) { throw new IllegalArgumentException("cartesian product too big"); } this.axes = builder.build(); size = dividend; } @Override public int size() { return size; } @Override public UnmodifiableIterator<List<B>> iterator() { return new AbstractIndexedListIterator<List<B>>(size) { @Override protected List<B> get(int index) { Object[] tuple = new Object[axes.size()]; for (int i = 0 ; i < tuple.length; i++) { tuple[i] = axes.get(i).getForIndex(index); } @SuppressWarnings("unchecked") // only B's are put in here List<B> result = (ImmutableList<B>) ImmutableList.copyOf(tuple); return result; } }; } @Override public boolean contains(Object element) { if (!(element instanceof List)) { return false; } List<?> tuple = (List<?>) element; int dimensions = axes.size(); if (tuple.size() != dimensions) { return false; } for (int i = 0; i < dimensions; i++) { if (!axes.get(i).contains(tuple.get(i))) { return false; } } return true; } @Override public boolean equals(@Nullable Object object) { // Warning: this is broken if size() == 0, so it is critical that we // substitute an empty ImmutableSet to the user in place of this if (object instanceof CartesianSet) { CartesianSet<?> that = (CartesianSet<?>) object; return this.axes.equals(that.axes); } return super.equals(object); } @Override public int hashCode() { // Warning: this is broken if size() == 0, so it is critical that we // substitute an empty ImmutableSet to the user in place of this // It's a weird formula, but tests prove it works. int adjust = size - 1; for (int i = 0; i < axes.size(); i++) { adjust *= 31; } return axes.hashCode() + adjust; } private class Axis { final ImmutableSet<? extends B> choices; final ImmutableList<? extends B> choicesList; final int dividend; Axis(Set<? extends B> set, int dividend) { choices = ImmutableSet.copyOf(set); choicesList = choices.asList(); this.dividend = dividend; } int size() { return choices.size(); } B getForIndex(int index) { return choicesList.get(index / dividend % size()); } boolean contains(Object target) { return choices.contains(target); } @Override public boolean equals(Object obj) { if (obj instanceof CartesianSet.Axis) { CartesianSet.Axis that = (CartesianSet.Axis) obj; return this.choices.equals(that.choices); // dividends must be equal or we wouldn't have gotten this far } return false; } @Override public int hashCode() { // Because Axis instances are not exposed, we can // opportunistically choose whatever bizarre formula happens // to make CartesianSet.hashCode() as simple as possible. return size / choices.size() * choices.hashCode(); } } } /** * Returns the set of all possible subsets of {@code set}. For example, * {@code powerSet(ImmutableSet.of(1, 2))} returns the set {@code {{}, * {1}, {2}, {1, 2}}}. * * <p>Elements appear in these subsets in the same iteration order as they * appeared in the input set. The order in which these subsets appear in the * outer set is undefined. Note that the power set of the empty set is not the * empty set, but a one-element set containing the empty set. * * <p>The returned set and its constituent sets use {@code equals} to decide * whether two elements are identical, even if the input set uses a different * concept of equivalence. * * <p><i>Performance notes:</i> while the power set of a set with size {@code * n} is of size {@code 2^n}, its memory usage is only {@code O(n)}. When the * power set is constructed, the input set is merely copied. Only as the * power set is iterated are the individual subsets created, and these subsets * themselves occupy only a few bytes of memory regardless of their size. * * @param set the set of elements to construct a power set from * @return the power set, as an immutable set of immutable sets * @throws IllegalArgumentException if {@code set} has more than 30 unique * elements (causing the power set size to exceed the {@code int} range) * @throws NullPointerException if {@code set} is or contains {@code null} * @see <a href="http://en.wikipedia.org/wiki/Power_set">Power set article at * Wikipedia</a> * @since 4.0 */ @GwtCompatible(serializable = false) public static <E> Set<Set<E>> powerSet(Set<E> set) { ImmutableSet<E> input = ImmutableSet.copyOf(set); checkArgument(input.size() <= 30, "Too many elements to create power set: %s > 30", input.size()); return new PowerSet<E>(input); } private static final class PowerSet<E> extends AbstractSet<Set<E>> { final ImmutableSet<E> inputSet; final ImmutableList<E> inputList; final int powerSetSize; PowerSet(ImmutableSet<E> input) { this.inputSet = input; this.inputList = input.asList(); this.powerSetSize = 1 << input.size(); } @Override public int size() { return powerSetSize; } @Override public boolean isEmpty() { return false; } @Override public Iterator<Set<E>> iterator() { return new AbstractIndexedListIterator<Set<E>>(powerSetSize) { @Override protected Set<E> get(final int setBits) { return new AbstractSet<E>() { @Override public int size() { return Integer.bitCount(setBits); } @Override public Iterator<E> iterator() { return new BitFilteredSetIterator<E>(inputList, setBits); } }; } }; } private static final class BitFilteredSetIterator<E> extends UnmodifiableIterator<E> { final ImmutableList<E> input; int remainingSetBits; BitFilteredSetIterator(ImmutableList<E> input, int allSetBits) { this.input = input; this.remainingSetBits = allSetBits; } @Override public boolean hasNext() { return remainingSetBits != 0; } @Override public E next() { int index = Integer.numberOfTrailingZeros(remainingSetBits); if (index == 32) { throw new NoSuchElementException(); } int currentElementMask = 1 << index; remainingSetBits &= ~currentElementMask; return input.get(index); } } @Override public boolean contains(@Nullable Object obj) { if (obj instanceof Set) { Set<?> set = (Set<?>) obj; return inputSet.containsAll(set); } return false; } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof PowerSet) { PowerSet<?> that = (PowerSet<?>) obj; return inputSet.equals(that.inputSet); } return super.equals(obj); } @Override public int hashCode() { /* * The sum of the sums of the hash codes in each subset is just the sum of * each input element's hash code times the number of sets that element * appears in. Each element appears in exactly half of the 2^n sets, so: */ return inputSet.hashCode() << (inputSet.size() - 1); } @Override public String toString() { return "powerSet(" + inputSet + ")"; } } /** * An implementation for {@link Set#hashCode()}. */ static int hashCodeImpl(Set<?> s) { int hashCode = 0; for (Object o : s) { hashCode += o != null ? o.hashCode() : 0; } return hashCode; } /** * An implementation for {@link Set#equals(Object)}. */ static boolean equalsImpl(Set<?> s, @Nullable Object object){ if (s == object) { return true; } if (object instanceof Set) { Set<?> o = (Set<?>) object; try { return s.size() == o.size() && s.containsAll(o); } catch (NullPointerException ignored) { return false; } catch (ClassCastException ignored) { return false; } } return false; } /** * Returns an unmodifiable view of the specified navigable set. This method * allows modules to provide users with "read-only" access to internal * navigable sets. Query operations on the returned set "read through" to the * specified set, and attempts to modify the returned set, whether direct or * via its collection views, result in an * {@code UnsupportedOperationException}. * * <p>The returned navigable set will be serializable if the specified * navigable set is serializable. * * @param set the navigable set for which an unmodifiable view is to be * returned * @return an unmodifiable view of the specified navigable set * @since 12.0 */ @GwtIncompatible("NavigableSet") public static <E> NavigableSet<E> unmodifiableNavigableSet( NavigableSet<E> set) { if (set instanceof ImmutableSortedSet || set instanceof UnmodifiableNavigableSet) { return set; } return new UnmodifiableNavigableSet<E>(set); } @GwtIncompatible("NavigableSet") static final class UnmodifiableNavigableSet<E> extends ForwardingSortedSet<E> implements NavigableSet<E>, Serializable { private final NavigableSet<E> delegate; UnmodifiableNavigableSet(NavigableSet<E> delegate) { this.delegate = checkNotNull(delegate); } @Override protected SortedSet<E> delegate() { return Collections.unmodifiableSortedSet(delegate); } @Override public E lower(E e) { return delegate.lower(e); } @Override public E floor(E e) { return delegate.floor(e); } @Override public E ceiling(E e) { return delegate.ceiling(e); } @Override public E higher(E e) { return delegate.higher(e); } @Override public E pollFirst() { throw new UnsupportedOperationException(); } @Override public E pollLast() { throw new UnsupportedOperationException(); } private transient UnmodifiableNavigableSet<E> descendingSet; @Override public NavigableSet<E> descendingSet() { UnmodifiableNavigableSet<E> result = descendingSet; if (result == null) { result = descendingSet = new UnmodifiableNavigableSet<E>( delegate.descendingSet()); result.descendingSet = this; } return result; } @Override public Iterator<E> descendingIterator() { return Iterators.unmodifiableIterator(delegate.descendingIterator()); } @Override public NavigableSet<E> subSet( E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return unmodifiableNavigableSet(delegate.subSet( fromElement, fromInclusive, toElement, toInclusive)); } @Override public NavigableSet<E> headSet(E toElement, boolean inclusive) { return unmodifiableNavigableSet(delegate.headSet(toElement, inclusive)); } @Override public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return unmodifiableNavigableSet( delegate.tailSet(fromElement, inclusive)); } private static final long serialVersionUID = 0; } /** * Returns a synchronized (thread-safe) navigable set backed by the specified * navigable set. In order to guarantee serial access, it is critical that * <b>all</b> access to the backing navigable set is accomplished * through the returned navigable set (or its views). * * <p>It is imperative that the user manually synchronize on the returned * sorted set when iterating over it or any of its {@code descendingSet}, * {@code subSet}, {@code headSet}, or {@code tailSet} views. <pre> {@code * * NavigableSet<E> set = synchronizedNavigableSet(new TreeSet<E>()); * ... * synchronized (set) { * // Must be in the synchronized block * Iterator<E> it = set.iterator(); * while (it.hasNext()){ * foo(it.next()); * } * }}</pre> * * or: <pre> {@code * * NavigableSet<E> set = synchronizedNavigableSet(new TreeSet<E>()); * NavigableSet<E> set2 = set.descendingSet().headSet(foo); * ... * synchronized (set) { // Note: set, not set2!!! * // Must be in the synchronized block * Iterator<E> it = set2.descendingIterator(); * while (it.hasNext()) * foo(it.next()); * } * }}</pre> * * Failure to follow this advice may result in non-deterministic behavior. * * <p>The returned navigable set will be serializable if the specified * navigable set is serializable. * * @param navigableSet the navigable set to be "wrapped" in a synchronized * navigable set. * @return a synchronized view of the specified navigable set. * @since 13.0 */ @Beta @GwtIncompatible("NavigableSet") public static <E> NavigableSet<E> synchronizedNavigableSet( NavigableSet<E> navigableSet) { return Synchronized.navigableSet(navigableSet); } /** * Remove each element in an iterable from a set. */ static boolean removeAllImpl(Set<?> set, Iterator<?> iterator) { boolean changed = false; while (iterator.hasNext()) { changed |= set.remove(iterator.next()); } return changed; } static boolean removeAllImpl(Set<?> set, Collection<?> collection) { checkNotNull(collection); // for GWT if (collection instanceof Multiset) { collection = ((Multiset<?>) collection).elementSet(); } /* * AbstractSet.removeAll(List) has quadratic behavior if the list size * is just less than the set's size. We augment the test by * assuming that sets have fast contains() performance, and other * collections don't. See * http://code.google.com/p/guava-libraries/issues/detail?id=1013 */ if (collection instanceof Set && collection.size() > set.size()) { Iterator<?> setIterator = set.iterator(); boolean changed = false; while (setIterator.hasNext()) { if (collection.contains(setIterator.next())) { changed = true; setIterator.remove(); } } return changed; } else { return removeAllImpl(set, collection.iterator()); } } @GwtIncompatible("NavigableSet") static class DescendingSet<E> extends ForwardingNavigableSet<E> { private final NavigableSet<E> forward; DescendingSet(NavigableSet<E> forward) { this.forward = forward; } @Override protected NavigableSet<E> delegate() { return forward; } @Override public E lower(E e) { return forward.higher(e); } @Override public E floor(E e) { return forward.ceiling(e); } @Override public E ceiling(E e) { return forward.floor(e); } @Override public E higher(E e) { return forward.lower(e); } @Override public E pollFirst() { return forward.pollLast(); } @Override public E pollLast() { return forward.pollFirst(); } @Override public NavigableSet<E> descendingSet() { return forward; } @Override public Iterator<E> descendingIterator() { return forward.iterator(); } @Override public NavigableSet<E> subSet( E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) { return forward.subSet(toElement, toInclusive, fromElement, fromInclusive).descendingSet(); } @Override public NavigableSet<E> headSet(E toElement, boolean inclusive) { return forward.tailSet(toElement, inclusive).descendingSet(); } @Override public NavigableSet<E> tailSet(E fromElement, boolean inclusive) { return forward.headSet(fromElement, inclusive).descendingSet(); } @SuppressWarnings("unchecked") @Override public Comparator<? super E> comparator() { Comparator<? super E> forwardComparator = forward.comparator(); if (forwardComparator == null) { return (Comparator) Ordering.natural().reverse(); } else { return reverse(forwardComparator); } } // If we inline this, we get a javac error. private static <T> Ordering<T> reverse(Comparator<T> forward) { return Ordering.from(forward).reverse(); } @Override public E first() { return forward.last(); } @Override public SortedSet<E> headSet(E toElement) { return standardHeadSet(toElement); } @Override public E last() { return forward.first(); } @Override public SortedSet<E> subSet(E fromElement, E toElement) { return standardSubSet(fromElement, toElement); } @Override public SortedSet<E> tailSet(E fromElement) { return standardTailSet(fromElement); } @Override public Iterator<E> iterator() { return forward.descendingIterator(); } @Override public Object[] toArray() { return standardToArray(); } @Override public <T> T[] toArray(T[] array) { return standardToArray(array); } @Override public String toString() { return standardToString(); } } /** * Used to avoid http://bugs.sun.com/view_bug.do?bug_id=6558557 */ static <T> SortedSet<T> cast(Iterable<T> iterable) { return (SortedSet<T>) iterable; } }
gpl-3.0
Niky4000/UsefulUtils
projects/others/eclipse-platform-parent/eclipse.jdt.core-master/org.eclipse.jdt.core.tests.model/workspace/Converter/src/test0494/A.java
60
package test0494; public class A { Class[][][] cls; }
gpl-3.0
hairongwang/R_BOD
src/main/java/org/jeecgframework/web/cgreport/service/core/CgReportServiceI.java
1454
package org.jeecgframework.web.cgreport.service.core; import java.util.List; import java.util.Map; import org.jeecgframework.core.common.service.CommonService; /** * * @Title:CgReportServiceI * @description:动态报表服务接口 * @author 赵俊夫 * @date Jul 30, 2013 8:43:01 AM * @version V1.0 */ public interface CgReportServiceI extends CommonService{ /** * 根据报表的ID获得报表的抬头配置以及明细配置 * @param reportId * @return */ public Map<String,Object> queryCgReportConfig(String reportId); /** * 根据报表id获得报表抬头配置 * @param reportId * @return */ public Map<String,Object> queryCgReportMainConfig(String reportId); /** * 根据报表id获得报表明细配置 * @param reportId * @return */ public List<Map<String,Object>> queryCgReportItems(String reportId); /** * 执行报表SQL获取结果集 * @param sql 报表SQL * @param params 查询条件 * @param page 页面数 * @param rows 要获取的条目总数 * @return */ public List<Map<String,Object>> queryByCgReportSql(String sql,Map params,int page,int rows); /** * 获取报表sql结果集大小 * @param sql 报表SQL * @param params 查询条件 * @return */ public long countQueryByCgReportSql(String sql,Map params); /** * 通过执行sql获得该sql语句中的字段集合 * @param sql 报表sql * @return */ public List<String> getSqlFields(String sql); }
gpl-3.0
fluddokt/opsu
src/itdelatrisu/opsu/objects/curves/CatmullCurve.java
2487
/* * opsu! - an open-source osu! client * Copyright (C) 2014-2017 Jeffrey Han * * opsu! is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * opsu! is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with opsu!. If not, see <http://www.gnu.org/licenses/>. */ package itdelatrisu.opsu.objects.curves; import itdelatrisu.opsu.beatmap.HitObject; import java.util.LinkedList; /** * Representation of Catmull Curve with equidistant points. * * @author fluddokt (https://github.com/fluddokt) */ public class CatmullCurve extends EqualDistanceMultiCurve { /** * Constructor. * @param hitObject the associated HitObject */ public CatmullCurve(HitObject hitObject) { this(hitObject, true); } /** * Constructor. * @param hitObject the associated HitObject * @param scaled whether to use scaled coordinates */ public CatmullCurve(HitObject hitObject, boolean scaled) { super(hitObject, scaled); LinkedList<CurveType> catmulls = new LinkedList<CurveType>(); int ncontrolPoints = hitObject.getSliderX().length + 1; LinkedList<Vec2f> points = new LinkedList<Vec2f>(); // temporary list of points to separate different curves // repeat the first and last points as controls points // only if the first/last two points are different // aabb // aabc abcc // aabc abcd bcdd if (getX(0) != getX(1) || getY(0) != getY(1)) points.addLast(new Vec2f(getX(0), getY(0))); for (int i = 0; i < ncontrolPoints; i++) { points.addLast(new Vec2f(getX(i), getY(i))); if (points.size() >= 4) { catmulls.add(new CentripetalCatmullRom(points.toArray(new Vec2f[0]))); points.removeFirst(); } } if (getX(ncontrolPoints - 1) != getX(ncontrolPoints - 2) || getY(ncontrolPoints - 1) != getY(ncontrolPoints - 2)) points.addLast(new Vec2f(getX(ncontrolPoints - 1), getY(ncontrolPoints - 1))); if (points.size() >= 4) catmulls.add(new CentripetalCatmullRom(points.toArray(new Vec2f[0]))); init(catmulls); } }
gpl-3.0
jonathant/rhodes
platform/shared/rubyJVM/src/javolution/context/ImmortalContext.java
5604
/* * Javolution - Java(TM) Solution for Real-Time and Embedded Systems * Copyright (C) 2007 - Javolution (http://javolution.org/) * All rights reserved. * * Permission to use, copy, modify, and distribute this software is * freely granted, provided that this notice is preserved. */ package javolution.context; import j2me.lang.ThreadLocal; import j2mex.realtime.MemoryArea; import javolution.util.FastMap; import javolution.util.FastTable; /** * <p> This class represents an allocator from immortal memory (RTSJ).</p> * * <p> It is typically used to allocate (and recycle) from immortal memory * allowing dynamically created static instances to be accessible by * <code>NoHeapRealtimeThread</code>:[code] * public synchronized Text intern() { * if (!INTERN_INSTANCES.containsKey(this)) { * ImmortalContext.enter(); * try { // Forces interned instance to be in immortal memory. * Text txt = this.copy(); // In ImmortalMemory. * INTERN_INSTANCES.put(txt, txt); * } finally { * ImmortalContext.exit(); * } * } * return (Text) INTERN_INSTANCES.get(str); * }[/code]</p> * <p> Because class initialization may occur while running in a non-heap * context (e.g. {@link StackContext}), it is recommended to force * factory produced constants to immortal memory:[code] * public class Rational { * public static final Rational ZERO; * public static final Rational ONE; * ... * static { // Forces constants to ImmortalMemory. * ImmortalContext.enter(); * try { * ZERO = Rational.valueOf(0, 1); // Factory produced. * ONE = Rational.valueOf(1, 1); // Factory produced. * } finally { * ImmortalContext.exit(); * } * } * }[/code]</p> * * @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a> * @version 5.2, August 19, 2007 */ public final class ImmortalContext extends AllocatorContext { /** * Holds the class. */ private static final Class CLASS = new ImmortalContext().getClass(); /** * Holds the factory to allocator mapping (per thread). */ private static final ThreadLocal FACTORY_TO_ALLOCATOR = new ThreadLocal() { protected Object initialValue() { return new FastMap(); } }; /** * Holds the allocators which have been activated (per thread). */ private static final ThreadLocal ACTIVE_ALLOCATORS = new ThreadLocal() { protected Object initialValue() { return new FastTable(); } }; /** * Enters an immortal memory context. * * @return the immortal memory context entered. */ public static ImmortalContext enter() { return (ImmortalContext) Context.enter(ImmortalContext.CLASS); } /** * Exits the current immortal memory context. * * @return the immortal context being exited. * @throws ClassCastException if the context is not an immortal context. */ public static/*ImmortalContext*/Context exit() { return (ImmortalContext) Context.exit(); } /** * Default constructor (private, instances are factory produced). */ private ImmortalContext() { } // Overrides. protected void deactivate() { FastTable allocators = (FastTable) ACTIVE_ALLOCATORS.get(); for (int i = 0, n = allocators.size(); i < n;) { ((Allocator) allocators.get(i++)).user = null; } allocators.clear(); } // Overrides. protected Allocator getAllocator(ObjectFactory factory) { final FastMap factoryToAllocator = (FastMap) FACTORY_TO_ALLOCATOR.get(); ImmortalAllocator allocator = (ImmortalAllocator) factoryToAllocator .get(factory); if (allocator == null) { allocator = new ImmortalAllocator(factory); factoryToAllocator.put(factory, allocator); } if (allocator.user == null) { // Activate. allocator.user = Thread.currentThread(); FastTable activeAllocators = (FastTable) ACTIVE_ALLOCATORS.get(); activeAllocators.add(allocator); } return allocator; } // Overrides. protected void enterAction() { getOuter().getAllocatorContext().deactivate(); } // Overrides. protected void exitAction() { this.deactivate(); } // Holds immortal allocator implementation. private static final class ImmortalAllocator extends Allocator { private static final MemoryArea IMMORTAL = MemoryArea.getMemoryArea(""); private final ObjectFactory _factory; private Object _allocated; public ImmortalAllocator(ObjectFactory factory) { _factory = factory; } private final Runnable _allocate = new Runnable() { public void run() { _allocated = _factory.create(); } }; private final Runnable _resize = new Runnable() { public void run() { resize(); } }; protected Object allocate() { IMMORTAL.executeInArea(_allocate); return _allocated; } protected void recycle(Object object) { if (_factory.doCleanup()) { _factory.cleanup(object); } if (queueSize >= queue.length) { IMMORTAL.executeInArea(_resize); } queue[queueSize++] = object; } public String toString() { return "Immortal allocator for " + _factory.getClass(); } } // Allows instances to be factory produced (private constructor). static { ObjectFactory.setInstance(new ObjectFactory() { protected Object create() { return new ImmortalContext(); } }, ImmortalContext.CLASS); } }
gpl-3.0
pabalexa/calibre2opds
OpdsOutput/src/main/java/com/l2fprod/common/beans/editor/FontPropertyEditor.java
2810
/** * L2FProd.com Common Components 7.3 License. * * Copyright 2005-2007 L2FProd.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.beans.editor; import com.l2fprod.common.swing.ComponentFactory; import com.l2fprod.common.swing.JFontChooser; import com.l2fprod.common.swing.PercentLayout; import com.l2fprod.common.swing.renderer.DefaultCellRenderer; import com.l2fprod.common.util.ResourceManager; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * FontPropertyEditor.<br> */ public class FontPropertyEditor extends AbstractPropertyEditor { private DefaultCellRenderer label; private JButton button; private Font font; public FontPropertyEditor() { editor = new JPanel(new PercentLayout(PercentLayout.HORIZONTAL, 0)); ((JPanel) editor).add("*", label = new DefaultCellRenderer()); label.setOpaque(false); ((JPanel) editor).add(button = ComponentFactory.Helper.getFactory().createMiniButton()); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { selectFont(); } }); ((JPanel) editor).add(button = ComponentFactory.Helper.getFactory().createMiniButton()); button.setText("X"); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { selectNull(); } }); ((JPanel) editor).setOpaque(false); } public Object getValue() { return font; } public void setValue(Object value) { font = (Font) value; label.setValue(value); } protected void selectFont() { ResourceManager rm = ResourceManager.all(FontPropertyEditor.class); String title = rm.getString("FontPropertyEditor.title"); Font selectedFont = JFontChooser.showDialog(editor, title, font); if (selectedFont != null) { Font oldFont = font; Font newFont = selectedFont; label.setValue(newFont); font = newFont; firePropertyChange(oldFont, newFont); } } protected void selectNull() { Object oldFont = font; label.setValue(null); font = null; firePropertyChange(oldFont, null); } }
gpl-3.0
Scrik/Cauldron-1
eclipse/cauldron/src/main/java/cpw/mods/fml/repackage/com/nothome/delta/ByteBufferSeekableSource.java
2952
/* * ByteArraySeekableSource.java * * Created on May 17, 2006, 12:41 PM * Copyright (c) 2006 Heiko Klein * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. * */ package cpw.mods.fml.repackage.com.nothome.delta; import java.io.IOException; import java.nio.ByteBuffer; /** * Wraps a byte buffer as a source */ public class ByteBufferSeekableSource implements SeekableSource { private ByteBuffer bb; private ByteBuffer cur; /** * Constructs a new ByteArraySeekableSource. */ public ByteBufferSeekableSource(byte[] source) { this(ByteBuffer.wrap(source)); } /** * Constructs a new ByteArraySeekableSource. */ public ByteBufferSeekableSource(ByteBuffer bb) { if (bb == null) throw new NullPointerException("bb"); this.bb = bb; bb.rewind(); try { seek(0); } catch (IOException e) { throw new RuntimeException(e); } } @Override public void seek(long pos) throws IOException { cur = bb.slice(); if (pos > cur.limit()) throw new IOException("pos " + pos + " cannot seek " + cur.limit()); cur.position((int) pos); } @Override public int read(ByteBuffer dest) throws IOException { if (!cur.hasRemaining()) return -1; int c = 0; while (cur.hasRemaining() && dest.hasRemaining()) { dest.put(cur.get()); c++; } return c; } @Override public void close() throws IOException { bb = null; cur = null; } /** * Returns a debug <code>String</code>. */ @Override public String toString() { return "BBSeekable" + " bb=" + this.bb.position() + "-" + bb.limit() + " cur=" + this.cur.position() + "-" + cur.limit() + ""; } }
gpl-3.0
jotomo/AndroidAPS
app/src/main/java/info/nightscout/androidaps/plugins/pump/insight/exceptions/app_layer_errors/BolusDurationNotInRangeException.java
261
package info.nightscout.androidaps.plugins.pump.insight.exceptions.app_layer_errors; public class BolusDurationNotInRangeException extends AppLayerErrorException { public BolusDurationNotInRangeException(int errorCode) { super(errorCode); } }
agpl-3.0
moliva/proactive
src/Extra/org/objectweb/proactive/extra/multiactivecan/JoinResponse.java
2203
/* * ################################################################ * * ProActive Parallel Suite(TM): The Java(TM) library for * Parallel, Distributed, Multi-Core Computing for * Enterprise Grids & Clouds * * Copyright (C) 1997-2012 INRIA/University of * Nice-Sophia Antipolis/ActiveEon * Contact: proactive@ow2.org or contact@activeeon.com * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; version 3 of * the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): * * ################################################################ * $$PROACTIVE_INITIAL_DEV$$ */ package org.objectweb.proactive.extra.multiactivecan; import java.io.Serializable; import java.util.concurrent.ConcurrentHashMap; public class JoinResponse implements Serializable { private Router router; private ConcurrentHashMap<Key, Serializable> data; public JoinResponse(Router router, ConcurrentHashMap<Key, Serializable> data) { super(); this.router = router; this.data = data; } public Router getRouter() { return router; } public void setRouter(Router router) { this.router = router; } public ConcurrentHashMap<Key, Serializable> getData() { return data; } public void setData(ConcurrentHashMap<Key, Serializable> data) { this.data = data; } }
agpl-3.0
sanjupolus/KC6.oLatest
coeus-impl/src/main/java/org/kuali/coeus/common/budget/impl/core/category/BudgetCategoryExistenceRule.java
3022
/* * Kuali Coeus, a comprehensive research administration system for higher education. * * Copyright 2005-2015 Kuali, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.kuali.coeus.common.budget.impl.core.category; import org.kuali.coeus.common.budget.framework.core.category.BudgetCategory; import org.kuali.coeus.common.budget.framework.core.category.BudgetCategoryMapping; import org.kuali.coeus.common.budget.framework.core.CostElement; import org.kuali.coeus.sys.framework.rule.KcMaintenanceDocumentRuleBase; import org.kuali.rice.kns.document.MaintenanceDocument; import java.util.HashMap; import java.util.Map; public class BudgetCategoryExistenceRule extends KcMaintenanceDocumentRuleBase { public BudgetCategoryExistenceRule() { super(); } @Override protected boolean processCustomRouteDocumentBusinessRules(MaintenanceDocument document) { return checkExistence(document); } @Override protected boolean processCustomApproveDocumentBusinessRules(MaintenanceDocument document) { return checkExistence(document); } /** * * This method is to check the existence of budgetcategorycode in table. * @param maintenanceDocument * @return */ private boolean checkExistence(MaintenanceDocument maintenanceDocument) { boolean valid= true; if (LOG.isDebugEnabled()) { LOG.debug("new maintainable is: " + maintenanceDocument.getNewMaintainableObject().getClass()); } // shared by budgetcategorymapping & costelement String budgetCategoryCode; if (maintenanceDocument.getNewMaintainableObject().getDataObject() instanceof BudgetCategoryMapping) { BudgetCategoryMapping budgetCategoryMapping = (BudgetCategoryMapping) maintenanceDocument.getNewMaintainableObject().getDataObject(); budgetCategoryCode=budgetCategoryMapping.getBudgetCategoryCode(); } else { CostElement costElement = (CostElement) maintenanceDocument.getNewMaintainableObject().getDataObject(); budgetCategoryCode=costElement.getBudgetCategoryCode(); } Map pkMap = new HashMap(); pkMap.put("code", budgetCategoryCode); valid=checkExistenceFromTable(BudgetCategory.class,pkMap,"code", "Budget Category"); return valid; } }
agpl-3.0
AsherBond/MondocosmOS
wonderland/modules/foundation/xremwin/src/classes/org/jdesktop/wonderland/modules/xremwin/client/wm/X11WindowManager.java
30493
/** * Project Wonderland * * Copyright (c) 2004-2009, Sun Microsystems, Inc., All Rights Reserved * * Redistributions in source code form must reproduce the above * copyright and this condition. * * The contents of this file are subject to the GNU General Public * License, Version 2 (the "License"); you may not use this file * except in compliance with the License. A copy of the License is * available at http://www.opensource.org/licenses/gpl-license.php. * * Sun designates this particular file as subject to the "Classpath" * exception as provided by Sun in the License file that accompanied * this code. */ /* This file is derived from escher-0.2.2/gnu/app/puppet/Puppet.java of Escher 0.2.2. Here is the copyright notice of the original file: Copyright (c) 2000-2004, Stephen Tse All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the organization nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOTa LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jdesktop.wonderland.modules.xremwin.client.wm; import java.util.HashMap; import java.util.logging.Logger; import java.util.logging.Level; import gnu.x11.Application; import gnu.x11.Atom; import gnu.x11.Depth; import gnu.x11.Display; import gnu.x11.Error; import gnu.x11.Screen; import gnu.x11.Visual; import gnu.x11.Window; import gnu.x11.event.ClientMessage; import gnu.x11.event.ConfigureNotify; import gnu.x11.event.ConfigureRequest; import gnu.x11.event.CreateNotify; import gnu.x11.event.DestroyNotify; import gnu.x11.event.EnterNotify; import gnu.x11.event.Event; import gnu.x11.event.MapNotify; import gnu.x11.event.MapRequest; import gnu.x11.event.MappingNotify; import gnu.x11.event.PropertyNotify; import gnu.x11.event.ReparentNotify; import gnu.x11.event.UnmapNotify; //TODO: import java.awt.Event; import java.util.LinkedList; import org.jdesktop.wonderland.common.StableAPI; //TODO: import org.jdesktop.wonderland.modules.appbase.client.Window; //TODO: import sun.awt.X11.Depth; //TODO: import sun.awt.X11.Screen; //TODO: import sun.awt.X11.Visual; @StableAPI final public class X11WindowManager extends Application implements Runnable, Display.DisconnectListener { private static final Logger logger = Logger.getLogger("lg.x11"); /** * Provides a way for the window manager to notify other * Wonderland software components that a master window's * title has changed. */ public interface WindowTitleListener { /** * Window title of Master window has changed. * @param wid The X window ID of the window. * @param String The new window title. */ public void setWindowTitle(int wid, String windowTitle); } // useful atoms private final Atom atomWmState; private final Atom atomWmChangeState; private final Atom atomWmProtocols; private final Atom atomWmDeleteWindow; private final Atom atomWmTakeFocus; private final Atom atomWmColormapWindows; private final Atom atomWmTransientFor; private final Atom atomCompoundText; private final Atom atomServerShutdown; private final Atom[] atomsWmSelection; //X11WindowManagerHints x11WMHints = null; private Window rootWin = null; private Window rootWindows[]; private HashMap<Integer,Depth> screenDepth = null; private HashMap<Integer,Visual> screenVisual = null; private int defaultDepth; private WindowTitleListener wtl; // True while the window manager thread is running private boolean active; // Who to notify when the window manager exits private LinkedList<ExitListener> exitListeners = new LinkedList<ExitListener>(); public interface ExitListener { public void windowManagerExitted(); } public X11WindowManager(String dpy) { this(dpy, null); } public X11WindowManager(String dpy, WindowTitleListener wtl) { super(new String[] {"--display", dpy}); this.wtl = wtl; logger.config("Starting X Window Manager against display " + dpy); // TODO: windows sometimes appear and disappear so fast that // LG ends up sending some requests after a window has been // destroyed. As a simple work around the LG Display Server // is simply going to tell Escher to ignore troublesome requests. // But we should eventually see if there is a better way to work around. display.ignore_error(gnu.x11.Error.BAD_WINDOW); display.ignore_error(gnu.x11.Error.BAD_DRAWABLE); display.ignore_error(gnu.x11.Error.BAD_MATCH); display.ignore_error(gnu.x11.Error.BAD_ATOM); X11WindowManagerHints.initX11WindowManagerHint(display); //display.debug_mode = true; rootWin = display.default_root; Window.NONE.display = display; // for move pointer display.setDisconnectListener(this); screenDepth = new HashMap<Integer,Depth>(); screenVisual = new HashMap<Integer,Visual>(); defaultDepth = display.default_depth; atomsWmSelection = new Atom[display.screens.length]; Window wmSelectionWin = new Window(rootWin, 0,0, 1, 1, 0, new Window.Attributes()); for (int i = 0; i< display.screens.length; i++) { Screen screen = display.screens[i]; gnu.x11.Enum scrdepths = screen.depths(); while (scrdepths.more()) { Depth depth = (Depth) scrdepths.next(); screenDepth.put(depth.depth(), depth); gnu.x11.Enum enm = depth.visuals(); while (enm.more()) { Visual visual = (Visual) enm.next(); screenVisual.put(visual.id(), visual); } } // AWT in Java6u1 requires that the WM acquire these selections. // Note: even though the WM conventions recommend that CurrentTime // not be used we can use it here because the LG WM is the only one // acquiring these selections. atomsWmSelection[i] = (Atom)Atom.intern(display, "WM_S" + i); wmSelectionWin.set_selection_owner(atomsWmSelection[i], Display.CURRENT_TIME); display.check_error(); } atomWmState = (Atom)Atom.intern(display, "WM_STATE"); atomWmChangeState = (Atom)Atom.intern(display, "WM_CHANGE_STATE"); atomWmProtocols = (Atom)Atom.intern(display, "WM_PROTOCOLS"); atomWmDeleteWindow = (Atom)Atom.intern(display, "WM_DELETE_WINDOW"); atomWmTakeFocus = (Atom)Atom.intern(display, "WM_TAKE_FOCUS"); atomWmColormapWindows=(Atom)Atom.intern(display,"WM_COLORMAP_WINDOWS"); atomWmTransientFor = (Atom)Atom.intern(display, "WM_TRANSIENT_FOR"); atomCompoundText = (Atom)Atom.intern(display, "COMPOUND_TEXT"); atomServerShutdown = (Atom)Atom.intern(display, "SERVER_SHUTDOWN"); try { selectInput(rootWin); } catch (Error err) { if (err.code == Error.BAD_ACCESS && err.bad == rootWin.id) { logger.severe("Failed to access root window. Another WM is running?"); throw new RuntimeException ("Failed to access root window\n" + "Another WM is running?"); // FIXME } else { throw err; } } // Remote Windows: Automatically composite redirect all root top-level windows compositeTopLevelWindows(); // Init WmNET support for screen 0 // TODO: multiscreen Window[] rootWins = new Window[1]; Window[] checkWins = new Window[1]; rootWins[0] = rootWin; checkWins[0] = new Window(rootWin, 0,0, 1, 1, 0, new Window.Attributes()); X11WindowManagerHints.initWmNETSupport(display, rootWins, checkWins); // prepare for the event dispatch thread Thread eventThread = new Thread( this, "X11WindowManager" ); // eventThread.setPriority(Thread.NORM_PRIORITY + 2); // FIXME eventThread.start(); logger.info( "X Window Manager initialization completed against display " + dpy); } public void disconnect () { if (display != null) { display.setDisconnectListener(null); display.close(); display = null; } } // Display disconnected public void disconnected () { active = false; disconnect(); logger.severe("X11WindowManager exitted."); // We must clone the list because the exit listener may remove // itself as a listener LinkedList<ExitListener> listenersToNotify = (LinkedList<ExitListener>) exitListeners.clone(); for (ExitListener exitListener : listenersToNotify) { exitListener.windowManagerExitted(); } } private void compositeTopLevelWindows () { X11CompositeExtension compExt = null; try { compExt = new X11CompositeExtension(display); } catch (Exception ex) { throw new RuntimeException("Failed to access composite extension"); } compExt.redirectSubwindows(display.default_root, X11CompositeExtension.MANUAL); display.check_error(); // Xsync } private void selectInput(Window win) { win.select_input( // unmap, destroy notify Event.SUBSTRUCTURE_NOTIFY_MASK // map, configure, circulate request | Event.SUBSTRUCTURE_REDIRECT_MASK // ICCCM properties (wm name, hints, normal hints) | Event.PROPERTY_CHANGE_MASK); display.check_error(); } private void selectInputForPRW (Window win) { /* win.select_input( // unmap, destroy notify Event.SUBSTRUCTURE_NOTIFY_MASK // map, configure, circulate request | Event.SUBSTRUCTURE_REDIRECT_MASK // ICCCM properties (wm name, hints, normal hints) | Event.PROPERTY_CHANGE_MASK // Enter | Event.ENTER_WINDOW_MASK); display.check_error(); */ } public void run() { active = true; while (!exit_now) { try { readAndDispatchEvent(); } catch (Throwable t) { // FIXME -- how to deal with this situation? logger.log(Level.WARNING, "Error in X event dispatching: " + t, t); System.err.println("error in x event dispatching. Exit"); exit_now = true; } } System.err.println("X display disconnected."); disconnected(); } public void addExitListener (ExitListener listener) { exitListeners.add(listener); } public void removeExitListener (ExitListener listener) { exitListeners.remove(listener); } public boolean isActive () { return active; } private void alertUser(String message) { logger.warning(message); display.bell(-50); } private void readAndDispatchEvent() { Event firstEvent = null; try { firstEvent = display.next_event(); } catch (Exception e) { // We may get an exception at this point if the XS server goes // away. Just ignore it and exit the window manager. System.err.println("readAndDispatchEvent: exception. Exit"); exit_now = true; } if (firstEvent == null) { exit_now = true; } dispatch(firstEvent); if (display != null) { display.flush(); } } private void dispatch(Event event) { logger.fine(event.toString()); switch(event.code()) { case ClientMessage.CODE: // un-avoidable clientMessage((ClientMessage)event); break; case ConfigureRequest.CODE: // Event.SUBSTRUCTURE_NOTIFY configureRequest((ConfigureRequest)event); break; case ConfigureNotify.CODE: // Event.SUBSTRUCTURE_NOTIFY configureNotify((ConfigureNotify)event); break; case DestroyNotify.CODE: // Event.SUBSTRUCTURE_NOTIFY destroyNotify((DestroyNotify)event); break; case PropertyNotify.CODE: // Event.PROPERTY_CHANGE propertyNotify((PropertyNotify)event); break; case MapRequest.CODE: // Event.SUBSTRUCTURE_REDIRECT mapRequest((MapRequest)event); break; case MapNotify.CODE: // Event.SUBSTRUCTURE_NOTIFY mapNotify((MapNotify)event); break; case UnmapNotify.CODE: // Event.SUBSTRUCTURE_NOTIFY unmapNotify((UnmapNotify)event); break; case CreateNotify.CODE: // Event.SUBSTRUCTURE_NOTIFY, ignored createNotify((CreateNotify)event); break; case MappingNotify.CODE: // un-avoidable, ignored break; case ReparentNotify.CODE: // nothing to do, ignored break; case EnterNotify.CODE: enterNotify((EnterNotify) event); break; default: alertUser("Unhandled event: " + event); } } private void manage(X11Client client) { // ready for move and resize client.geometry(); // ready for next focus and preference client.classHint = client.wm_class_hint(); // ready for minimize client.sizeHints = client.wm_normal_hints(); // ready for info client.setName(client.wm_name()); client.change_save_set(false); } private void unmanage(X11Client client) { client.unintern(); } private void enterNotify(EnterNotify event) { X11Client client = (X11Client)X11Client.intern(this, event.read4(12)); if (!checkUnmapDestroyEvent(display, client)) { return; } client.set_input_focus(); display.check_error(); } private void clientMessage(ClientMessage event) { if (event.window_id() == 0) { Atom type = event.type(); if (type.name.equals("SERVER_SHUTDOWN")) { System.err.println("Xremwin server shutdown detected"); exit_now = true; return; } } X11Client client = (X11Client)X11Client.intern(this, event.window_id()); Atom type = event.type(); // client asks to change window state from normal to iconic if (event.format() == 32 /*atom*/) { if (type.name.equals("WM_CHANGE_STATE") && event.wm_data () == Window.WMHints.ICONIC) { hide(client); } else { alertUser("1- Unhandled client message: " + type); } } else { alertUser("2- Unhandled client message: " + type); } } private void configureRequest(ConfigureRequest event) { // client asks to change window configuration // @see icccm/sec-4.html#s-4.1.5 X11Client client = (X11Client)X11Client.intern(this, event.window_id()); if (!checkUnmapDestroyEvent(display, client)) { return; } /* Should I send a synthetic ConfigureNotify instead of actually * do a configure request on the window? We do not re-parent, and thus, * according to icccm, a ConfigureNotify will be fine. But xterm * relies on a window manager to honour its ConfigureRequest to * configure a window, or it falls back to width = height = 1. A mere * ConfigureNotify seems not sufficient. (Other clients does not * have this problems?) */ //System.err.println("Enter configureRequest: wid = " + client.getWID()); client.configure(event.changes()); client.set_geometry_cache(event.rectangle()); client.moveAndSizeWindow(); display.check_error(); } /** * Called by the Xserver (via event dispatch loop) to notify us that * some aspect of the window has changed. Could be size, location,... */ private void configureNotify(ConfigureNotify event) { X11Client client = (X11Client) X11Client.intern(this, event .window_id()); ConfigureNotify eventFixed = new ConfigureNotifyBugFixed(display, event.data); int aboveSiblingId = eventFixed.above_sibling_id(); client.set_geometry_cache(eventFixed.rectangle()); if (client.attributes == null) { client.attributes = client.attributes (); } if (client.attributes.override_redirect()) { // for override_redirect window (popup window) we don't get ConfigureRequest // so that we should handle ConfigureNotify here also. emacs configure its window // after mapping them. therefor we displayed its popup menus in wrong place. // see Issue 457 client.moveAndSizeWindow(); } else { if (aboveSiblingId > 0) { X11Client aboveSibling = (X11Client) X11Client.intern(this, aboveSiblingId); client.restackWindow(aboveSibling, Window.Changes.ABOVE); } } } private void propertyNotify(PropertyNotify event) { Atom atom = event.atom(display); X11Client client = (X11Client)X11Client.intern(this, event.window_id()); //if (atom == atomWmColormapWindows || atom == atomWmProtocols) { // logger.warning("Unhandled property notify: " + atom); //} // TODO: The handling of these atoms needs to be implemented. // Refer to lg3d-x11 programs/twm/add_window.c for an example. switch (atom.id) { case Atom.WM_HINTS_ID: // TODO any action? client.wm_hints(); break; case Atom.WM_NORMAL_HINTS_ID: // TODO any action? client.sizeHints = client.wm_normal_hints(); break; case Atom.WM_NAME_ID: // Appshare: tell the remote application directly // so it can update the title of its GUI windows //System.err.println("Window name changed for window " + event.window_id()); //System.err.println("client.wm_name = " + client.wm_name()); if (wtl != null) { wtl.setWindowTitle(event.window_id(), client.wm_name()); } break; case Atom.WM_ICON_NAME_ID: // fall through case Atom.WM_TRANSIENT_FOR_ID: // ignore (normal window manager should handle these) //logger.warning("unhandled atom: " + atom); // TODO: implement these break; } } private void createNotify(CreateNotify event) { X11Client client = (X11Client)X11Client.intern(this, event.window_id()); client.createNotify(); } public void deleteWindow (int wid) { X11Client client = (X11Client) X11Client.intern(this, wid); if (client != null) { client.delete(); // TODO: LG bug 44: for some reason we need to do this twice to destroy // the window client.delete(); } } private void mapRequest(MapRequest event) { // client asks to change window state from withdrawn to normal/iconic, // or from iconic to normal X11Client client = (X11Client)X11Client.intern(this, event.window_id()); // just ignore MapRequest if we already process one before. // Note: this condition will return false if we unmap this window. if(client.isMappedBefore()) { return; } /* THIS LG CODE HAS BEEN DELETED FROM WONDERLAND. IT IS NOT NEEDED. // Initialize pseudoRootWindow lazily so that FoundationWinSys // gets initialized completely. if (pseudoRootWindow == null) { initializePseudoRootWindow(); } */ // just ignore MapRequest if we already process one before. // Note: this condition will return false if we unmap this window. if(client.isMappedBefore()) { return; } Atom type = X11WindowManagerHints.getNetWindowType(display, client); if (type != null) { X11WindowManagerHints.setNetWindowType(display, client, type); // check if window is supported. we don't map it if not supported. if (!X11WindowManagerHints.isSupportedWinType(display, client)) { return; } X11WindowManagerHints.setNetWmState(display, client); X11WindowManagerHints.setNetAllowedActions(display, client); X11WindowManagerHints.setWindowFeatures(display, client); } client.geometry(); // Get override_redirect and map_state. if (client.attributes == null) { client.attributes = client.attributes (); } Window.WMHints wmHints = client.wm_hints(); // listen to PropertyNotify and EnterNotify // Check the input member of the WMHints property to see if the // WM should manage focus for the window. // // TODO: According to the version of the ICCCM in Schiefler and Gettys // "The X11 Window System", 3rd ed. the input member is not valid // unless INPUT_HINT_MASK is set in the flags. However, not all // applications set this flag. For example, xterm and emacs set the // flag but gnome-terminal and mozilla do not! I checked how twm deals // with this and twm completely ignores the flags word and just looks // at the input word. Following the twm approach appears to work but // it is disturbing that this doesn't match the ICCCM spec and manifest // app behavior. We should dig deeper into this if time permits. if ((wmHints != null) && (wmHints.input())) { client.select_input( // ICCCM properties (wm name, hints, normal hints) Event.PROPERTY_CHANGE_MASK | Event.ENTER_WINDOW_MASK); } else { // no input focus will be tracked. client.select_input( // ICCCM properties (wm name, hints, normal hints) Event.PROPERTY_CHANGE_MASK); } // assume NORMAL if initial_state not specified if (wmHints == null || (wmHints.flags () & Window.WMHints.STATE_HINT_MASK) == 0 || wmHints.initial_state () == Window.WMHints.NORMAL) { /* Do not do any visible operations on the window such as focusing * and warping pointer, until a window is actually map, ie. * MapNotify. * * @see #when_map_notify(MapNotify) */ client.map(); // now mark this window as mapped (only used to ignore other MapRequests // we receive, if application send more than MapWindow requests, before we // reparent it). client.setMappedBefore(true); } else { // must be iconic. client can try to map the window, when Window Manager has // already iconified it. therefore ignore its request. client.state = X11Client.HIDDEN; client.set_wm_state(Window.WMState.ICONIC); } // assume that client work in passive input focus mode. // TODO: need to check WM_TAKE_FOCUS atom to know in which mode // client work. // // DJ: removed to fix a problem mentioned in bugid 371. // We don't want x windows to capture the focus as soon as they // are mapped. This is incompatible with the focus-follows-mouse // policy that the DS normally follows. // client.set_input_focus(); // TODO: this was an attempt at fixing part of 210. The intent was to // express interest in ConfigureNotify and CirculateNotify events. // But Amir pointed out that the way I was doing this is wrong. // This particular code wipes out any interest expressed in EnterNotify. // We need to integrate this code into the above code which expresses // interest in EnterNotify // if (!client.attributes.override_redirect ()) { // client.select_input(Event.STRUCTURE_NOTIFY_MASK); // } display.check_error(); } private void mapNotify(MapNotify event) { final X11Client client = (X11Client)X11Client.intern(this, event.window_id()); //System.err.println("MapNotify: " + client); if (!checkUnmapDestroyEvent(display, client)) { return; } // DS can map the window again - resore window from taskbar - so we didn't get // mapRequest and only get mapNotify event, therefore we should mark this window // as mapped. if (!client.isMappedBefore()) { client.setMappedBefore(true); } // Get override_redirect and map_state. if (client.attributes == null) { client.attributes = client.attributes (); } // now manage the window manage(client); client.raise(); /* Now and only now sets the window state to NORMAL (except during * initialization). Setting this earlier gives false impression that * the window is mapped, but it does not happen until MapNotify. * Note window.raise () and window.map () do not guarantee the * visibility of a window (due to map request and configure request * redirection of wm). Hence, any operations that depends on visibility * (warp pointer and set input focus) should check window.state. */ client.state = X11Client.NORMAL; client.set_wm_state(Window.WMState.NORMAL); display.check_error(); Visual visual = screenVisual.get(client.getVisualID()); if (client.getWinClass() == X11Client.INPUT_ONLY) { if (client.attributes.override_redirect()) { client.initWindow3DRepresentation(false, true, defaultDepth, visual); } else { // use non-decoration for this also. will be change in feature // TODO: implement this feature logger.warning("non-override redirect InputOnly not implemented yet"); } } else { if (visual == null) { logger.warning("visual for ID " + client.getVisualID() + " not find" ); return; } /* Obsolete for Wonderland if (visual.klass() != Visual.TRUE_COLOR) { logger.warning("Other than TrueColor Visual is not yet supported" ); return; } */ client.initWindow3DRepresentation( !client.attributes.override_redirect(), false, defaultDepth, visual); } // client.geometry(); client.mapNotify(); display.check_error(); } private void unmapNotify(UnmapNotify event) { /* Unmapped != unmanaged, since it can be iconify-ing (or hiding * in our case). We unmanage a window when it is destroyed. * * @see #when_destroy_notify(DestroyNotify) */ final X11Client client = (X11Client)X11Client.intern(this, event.window_id()); //System.err.println("UnmapNotify: " + client); // ignore syntatic one here. if (event.synthetic) { return; } client.unmapNotify(); display.check_error(); if (display.checkEventTypeWindow(DestroyNotify.CODE, client.id)) { return; } // they withdraw it if (client.state != X11Client.HIDDEN) { /* From icccm 4.1.4: For compatibility with obsolete clients, * window managers should trigger the transition to the Withdrawn * state on the real UnmapNotify rather than waiting for the * synthetic one. They should also trigger the transition if they * receive a synthetic UnmapNotify on a window for which they have * not yet received a real UnmapNotify. * * Then, what's the use of synthetic UnmapNotify event? */ client.state = X11Client.UNMANAGED; client.set_wm_state(Window.WMState.WITHDRAWN); client.change_save_set(true); } display.check_error(); } private void destroyNotify(DestroyNotify event) { final X11Client client = (X11Client)X11Client.intern(this, event.window_id()); unmanage(client); client.destroyNotify(); client.state = X11Client.DESTROYED; } private boolean checkUnmapDestroyEvent(Display display, X11Client client) { display.check_error(); if (display.checkEventTypeWindow(DestroyNotify.CODE, client.id) || display.checkEventTypeWindow(UnmapNotify.CODE, client.id)) { return false; } return true; } public void hide(X11Client client) { if (client.state == X11Client.HIDDEN) { return; } /* Set this state to give hint to {@link * #when_unmap_notify(UnmapNotify)}. * * <p>Do it before <code>client.unmap ()</code>. */ client.state = X11Client.HIDDEN; client.set_wm_state(Window.WMState.ICONIC); client.unmap(); } public void unhide(X11Client client) { if (client.state != X11Client.HIDDEN) { return; } /* Do not set client.state here. Do it right in * {@link #when_map_notify(MapNotify)}. */ client.map(); } /**** public void unhide_same_class (Client client) { for (Iterator it=clients.iterator (); it.hasNext ();) { Client c = (Client) it.next (); if (c.state == X11Client.HIDDEN && c.class_hint != null && c.class_hint.class_equals (client.class_hint)) unhide (c); } } public void hide_others (Client client) { for (Iterator it=clients.iterator (); it.hasNext ();) { Client c = (Client) it.next (); if (c.state == NORMAL && c != client) hide (c); } } public void hide_same_class (Client client) { for (Iterator it=clients.iterator (); it.hasNext ();) { Client c = (Client) it.next (); if (c.state == NORMAL && c.class_hint != null && c.class_hint.class_equals (client.class_hint)) hide (c); } } public void grant_all_focus () { for (Iterator it=clients.iterator (); it.hasNext ();) { grant_focus ((Client) it.next ()); } } public void grant_focus (Client client) { if (client.state == NO_FOCUS) client.state = NORMAL; } public void key_dump_info () { System.out.println ("input focus: " + focus); System.out.println ("mouse at: " + root.pointer ().root_position ()); if (!argument_present) return; // `dump-basic-info' if (argument_negative) { // `dump-hidden-windows'" System.out.println ("all hidden clients: "); for (Iterator it=clients.iterator (); it.hasNext ();) { Client c = (Client) it.next (); if (c.state == X11Client.HIDDEN) System.out.println (c); } } else // `dump-all-windows' System.out.println ("all clients: " + clients); } ****/ private static final int windowWidthMaxMargin = 0; private static final int windowHeightMaxMargin = 16; // FIXME public Display getDisplay() { return display; } public int widTransientFor (int wid) { final X11Client client = (X11Client)X11Client.intern(this, wid); Window.PropertyReply reply = client.property(false, atomWmTransientFor, Atom.ANY_PROPERTY_TYPE, 0, 32); if (reply.format() == 0) { return 0; } gnu.x11.Enum e = reply.items(); while (e.more()) { // This loop does only one iteration return (int) e.next4(); } return 0; } }
agpl-3.0
JoeCarlson/intermine
intermine/web/main/src/org/intermine/web/tags/disclosure/DisclosureTag.java
4788
package org.intermine.web.tags.disclosure; /* * Copyright (C) 2002-2016 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.IOException; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.PageContext; import org.intermine.web.logic.Constants; import org.intermine.web.logic.results.WebState; /** * This class renders disclosure tag. See tag library descriptor for tag description. * @author Jakub Kulaviak */ public class DisclosureTag extends BaseDisclosureTag { private static final String DEFAULT_STYLE_CLASS = "disclosure"; private static final String CONSISTENT = "consistent"; private String id; private boolean opened = true; private String onClick; private String type = "simple"; /** * Returns type of tag. At this moment is relevant only 'consistent' type. * @return type of tag */ public String getType() { return type.toLowerCase(); } /** * Sets type of tag. * @param type type */ public void setType(String type) { this.type = type; } /** * {@inheritDoc} */ @Override protected String getDefaultStyleClass() { return DEFAULT_STYLE_CLASS; } /** * @return additional javascript code, that should be executed on element change. */ public String getOnClick() { return onClick; } /** * @param onChange additional javascript code, that should be executed on element change. */ public void setOnClick(String onChange) { this.onClick = onChange; } /** * Sets element id. Disclosure tag is implemented with div, it sets div id. * @param id element id */ public void setId(String id) { this.id = id; } /** * Gets element id. * @return element id */ public String getId() { return id; } /** * Returns true if disclosure is opened else false. * @return disclosure state */ public boolean getOpened() { if (isConsistentType()) { // Alas, we do not have access to the session, so we can't use SessionMethods WebState webState = (WebState) getJspContext().getAttribute(Constants.WEB_STATE, PageContext.SESSION_SCOPE); if (webState != null) { Boolean ret = webState.getToggledElements().get(getId()); if (ret != null) { return ret; } } } return opened; } /** * @return true if disclosure is consistent type, i.e. saves its state (opened or closed) * during user session */ public boolean isConsistentType() { return getType().equals(CONSISTENT); } /** * Sets new state of disclosure. * @param opened true if should be opened */ public void setOpened(boolean opened) { this.opened = opened; } /** * Renders tag. * @throws IOException if error occurs during writing to stream output * @throws JspException if JspException error occurs during rendering nested tags */ @Override public void doTag() throws JspException, IOException { JspWriter out = getJspContext().getOut(); out.write("<div"); printStyleAndClass(out); out.write(">"); getJspBody().invoke(null); // It is displayed opened and hidden (if specified) with javascript -> // Client browser without javascript doesn't hide the content and user can see it // Else he wouldn't have possibility to see the content if (!getOpened()) { printJavascriptHides(out); } out.write("</div>"); } private void printJavascriptHides(JspWriter out) throws IOException { out.write("<script type=\"text/javascript\">toggleHidden(\'"); out.write(getId()); out.write("\')</script>"); } /** * Returns link switching between displayed and hidden state. * @return link */ public String getLink() { StringBuilder sb = new StringBuilder(); sb.append("javascript:toggleHidden(\'"); sb.append(getId()); sb.append("\');"); if (isConsistentType()) { sb.append("saveToggleState(\'"); sb.append(getId()); sb.append("\');"); } if (getOnClick() != null) { sb.append(getOnClick()); sb.append(";"); } return sb.toString(); } }
lgpl-2.1
jstourac/wildfly
testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/ee/concurrent/DefaultManagedExecutorServiceTestCase.java
2863
/* * JBoss, Home of Professional Open Source. * Copyright 2013, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.test.integration.ee.concurrent; import java.util.concurrent.Callable; import javax.naming.InitialContext; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.as.test.shared.integration.ejb.security.Util; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Test; import org.junit.runner.RunWith; import org.wildfly.security.permission.ElytronPermission; import static org.jboss.as.test.shared.integration.ejb.security.PermissionUtils.createPermissionsXmlAsset; /** * Test for EE's default ManagedExecutorService * * @author Eduardo Martins */ @RunWith(Arquillian.class) public class DefaultManagedExecutorServiceTestCase { @Deployment public static WebArchive getDeployment() { return ShrinkWrap.create(WebArchive.class, DefaultManagedExecutorServiceTestCase.class.getSimpleName() + ".war") .addClasses(DefaultManagedExecutorServiceTestCase.class, DefaultManagedExecutorServiceTestEJB.class, TestEJBRunnable.class, Util.class) .addAsManifestResource(createPermissionsXmlAsset(new ElytronPermission("getSecurityDomain"), new ElytronPermission("authenticate")), "permissions.xml"); } @Test public void testTaskSubmit() throws Exception { final Callable<Void> callable = () -> { final DefaultManagedExecutorServiceTestEJB testEJB = (DefaultManagedExecutorServiceTestEJB) new InitialContext().lookup("java:module/" + DefaultManagedExecutorServiceTestEJB.class.getSimpleName()); testEJB.submit(new TestEJBRunnable()).get(); return null; }; Util.switchIdentitySCF("guest", "guest", callable); } }
lgpl-2.1
JoeCarlson/intermine
intermine/api/main/src/org/intermine/api/query/codegen/UnhandledFeatureException.java
1575
package org.intermine.api.query.codegen; /* * Copyright (C) 2002-2016 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ /** * Exception representing the failure to encode a feature in the target * language. * @author Alex Kalderimis * */ public class UnhandledFeatureException extends Exception { private static final long serialVersionUID = 1701853337768806064L; private static final String DEFAULT_MESSAGE = "This feature is not supported in this language"; /** * Create an exception with a default message. */ public UnhandledFeatureException() { super(DEFAULT_MESSAGE); } /** * Create an exception with a given message. * @param message What to tell the user. */ public UnhandledFeatureException(String message) { super(message); } /** * Create an exception when there is something to blame. * @param cause What caused this unfortunate situation. */ public UnhandledFeatureException(Throwable cause) { super(DEFAULT_MESSAGE, cause); } /** * Create an exception when there is a message and a cause * @param message What to tell the user. * @param cause What caused this unfortunate situation. */ public UnhandledFeatureException(String message, Throwable cause) { super(message, cause); } }
lgpl-2.1
JoeCarlson/intermine
bio/webapp/src/org/intermine/bio/webservice/GenomicRegionBedService.java
2673
package org.intermine.bio.webservice; /* * Copyright (C) 2002-2016 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.Arrays; import java.util.List; import org.apache.commons.lang.StringUtils; import org.intermine.api.InterMineAPI; import org.intermine.bio.web.export.BEDExporter; import org.intermine.bio.web.logic.SequenceFeatureExportUtil; import org.intermine.bio.web.logic.SequenceFeatureExportUtil.InvalidQueryException; import org.intermine.pathquery.PathQuery; import org.intermine.web.logic.export.Exporter; import org.intermine.webservice.server.exceptions.BadRequestException; /** * * @author Alexis Kalderimis. * */ public class GenomicRegionBedService extends AbstractRegionExportService { protected static final String SUFFIX = ".fasta"; private static final String UCSC_COMPATIBLE = "ucscCompatible"; private static final String TRACK_DESCRIPTION = "trackDescription"; /** * Constructor. * @param im A reference to an InterMine API settings bundle. */ public GenomicRegionBedService(InterMineAPI im) { super(im); } @Override protected Exporter getExporter(PathQuery pq) { boolean isUcsc = !"no".equalsIgnoreCase(getOptionalParameter(UCSC_COMPATIBLE, "yes")); // get the project title to be written in BED records String sourceName = webProperties.getProperty("project.title"); String sourceReleaseVersion = webProperties.getProperty("project.releaseVersion"); String descr = sourceName + " " + sourceReleaseVersion + " Custom Track"; String trackDescription = getOptionalParameter(TRACK_DESCRIPTION, descr); String organisms = StringUtils.join( SequenceFeatureExportUtil.getOrganisms(pq, im, getPermission().getProfile()), ","); List<Integer> indexes = Arrays.asList(new Integer(0)); return new BEDExporter(getPrintWriter(), indexes, sourceName, organisms, isUcsc, trackDescription); } @Override protected String getContentType() { return "text/x-ucsc-bed"; } @Override protected String getSuffix() { return ".bed"; } @Override protected void checkPathQuery(PathQuery pq) throws Exception { try { SequenceFeatureExportUtil.isValidSequenceFeatureQuery(pq); } catch (InvalidQueryException e) { throw new BadRequestException(e.getMessage(), e); } } }
lgpl-2.1
liujed/polyglot-eclipse
examples/coffer/compiler/src/coffer/ast/CofferConstructorDecl_c.java
12420
/* * This file is part of the Polyglot extensible compiler framework. * * Copyright (c) 2000-2006 Polyglot project group, Cornell University * */ package coffer.ast; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import polyglot.ast.Block; import polyglot.ast.ConstructorDecl; import polyglot.ast.ConstructorDecl_c; import polyglot.ast.Formal; import polyglot.ast.Id; import polyglot.ast.Node; import polyglot.ast.TypeNode; import polyglot.types.ClassType; import polyglot.types.Flags; import polyglot.types.SemanticException; import polyglot.util.CachingTransformingList; import polyglot.util.CodeWriter; import polyglot.util.CollectionUtil; import polyglot.util.InternalCompilerError; import polyglot.util.Position; import polyglot.util.SerialVersionUID; import polyglot.util.Transformation; import polyglot.visit.AmbiguityRemover; import polyglot.visit.NodeVisitor; import polyglot.visit.PrettyPrinter; import polyglot.visit.Translator; import polyglot.visit.TypeBuilder; import polyglot.visit.TypeChecker; import coffer.types.CofferClassType; import coffer.types.CofferConstructorInstance; import coffer.types.CofferTypeSystem; import coffer.types.KeySet; import coffer.types.ThrowConstraint; /** An implementation of the <code>CofferConstructorDecl</code> interface. * <code>ConstructorDecl</code> is extended with pre- and post-conditions. */ public class CofferConstructorDecl_c extends ConstructorDecl_c implements CofferConstructorDecl { private static final long serialVersionUID = SerialVersionUID.generate(); protected KeySetNode entryKeys; protected KeySetNode returnKeys; protected List<ThrowConstraintNode> throwConstraints; public CofferConstructorDecl_c(Position pos, Flags flags, Id name, List<Formal> formals, KeySetNode entryKeys, KeySetNode returnKeys, List<ThrowConstraintNode> throwConstraints, Block body) { super(pos, flags, name, formals, Collections.<TypeNode> emptyList(), body); this.entryKeys = entryKeys; this.returnKeys = returnKeys; this.throwConstraints = new ArrayList<ThrowConstraintNode>(throwConstraints); } @Override public KeySetNode entryKeys() { return this.entryKeys; } @Override public CofferConstructorDecl entryKeys(KeySetNode entryKeys) { CofferConstructorDecl_c n = (CofferConstructorDecl_c) copy(); n.entryKeys = entryKeys; return n; } @Override public KeySetNode returnKeys() { return this.returnKeys; } @Override public CofferConstructorDecl returnKeys(KeySetNode returnKeys) { CofferConstructorDecl_c n = (CofferConstructorDecl_c) copy(); n.returnKeys = returnKeys; return n; } @Override public List<ThrowConstraintNode> throwConstraints() { return this.throwConstraints; } @Override public List<TypeNode> throwTypes() { return new CachingTransformingList<ThrowConstraintNode, TypeNode>(throwConstraints, new GetType()); } public class GetType implements Transformation<ThrowConstraintNode, TypeNode> { @Override public TypeNode transform(ThrowConstraintNode tcn) { return tcn.type(); } } @Override public ConstructorDecl throwTypes(List<TypeNode> l) { throw new InternalCompilerError("unimplemented"); } @Override public CofferConstructorDecl throwConstraints( List<ThrowConstraintNode> throwConstraints) { CofferConstructorDecl_c n = (CofferConstructorDecl_c) copy(); n.throwConstraints = new ArrayList<ThrowConstraintNode>(throwConstraints); return n; } /* public Context enterScope(Context context) { CofferContext c = (CofferContext) super.enterScope(context); c = (CofferContext) c.pushBlock(); if (entryKeys != null) { for (Iterator i = entryKeys.keys().iterator(); i.hasNext(); ) { Key key = (Key) i.next(); c.addHeldKey(key); } } return c; } */ protected CofferConstructorDecl_c reconstruct(Id name, List<Formal> formals, KeySetNode entryKeys, KeySetNode returnKeys, List<ThrowConstraintNode> throwConstraints, Block body) { CofferConstructorDecl_c n = this; if (entryKeys != this.entryKeys || returnKeys != this.returnKeys || !CollectionUtil.equals(throwConstraints, this.throwConstraints)) { n = (CofferConstructorDecl_c) copy(); n.entryKeys = entryKeys; n.returnKeys = returnKeys; n.throwConstraints = new ArrayList<ThrowConstraintNode>(throwConstraints); } return (CofferConstructorDecl_c) reconstruct(n, name, formals, Collections.<TypeNode> emptyList(), body); } @Override public Node visitChildren(NodeVisitor v) { Id name = visitChild(this.name, v); List<Formal> formals = visitList(this.formals, v); KeySetNode entryKeys = visitChild(this.entryKeys, v); KeySetNode returnKeys = visitChild(this.returnKeys, v); List<ThrowConstraintNode> throwConstraints = visitList(this.throwConstraints, v); Block body = visitChild(this.body, v); return reconstruct(name, formals, entryKeys, returnKeys, throwConstraints, body); } @Override public Node buildTypes(TypeBuilder tb) throws SemanticException { CofferNodeFactory nf = (CofferNodeFactory) tb.nodeFactory(); CofferConstructorDecl n = (CofferConstructorDecl) super.buildTypes(tb); CofferConstructorInstance ci = (CofferConstructorInstance) n.constructorInstance(); if (n.entryKeys() == null) { n = n.entryKeys(nf.CanonicalKeySetNode(position(), ci.entryKeys())); } if (n.returnKeys() == null) { n = n.returnKeys(nf.CanonicalKeySetNode(position(), ci.returnKeys())); } List<ThrowConstraintNode> l = new LinkedList<ThrowConstraintNode>(); boolean changed = false; for (ThrowConstraintNode cn : n.throwConstraints()) { if (cn.keys() == null) { cn = cn.keys(n.entryKeys()); changed = true; } l.add(cn); } if (changed) { n = n.throwConstraints(l); } CofferTypeSystem vts = (CofferTypeSystem) tb.typeSystem(); ClassType ct = tb.currentClass(); KeySet entryKeys; KeySet returnKeys; if (n.entryKeys() == null) { entryKeys = vts.emptyKeySet(position()); } else { entryKeys = n.entryKeys().keys(); } if (n.returnKeys() == null) { returnKeys = vts.emptyKeySet(position()); if (ct instanceof CofferClassType) { CofferClassType vct = (CofferClassType) ct; if (vct.key() != null) returnKeys = returnKeys.add(vct.key()); } } else { returnKeys = n.returnKeys().keys(); } ci.setEntryKeys(entryKeys); ci.setReturnKeys(returnKeys); return n; } @Override public Node typeCheck(TypeChecker tc) throws SemanticException { CofferClassType ct = (CofferClassType) tc.context().currentClass(); CofferConstructorInstance ci = (CofferConstructorInstance) this.constructorInstance(); if (ct.key() != null) { if (ci.entryKeys().contains(ct.key())) { throw new SemanticException("Constructor cannot hold key \"" + ct.key() + "\" (associated with " + "this) on entry.", position()); } if (!ci.returnKeys().contains(ct.key())) { throw new SemanticException("Constructor must hold key \"" + ct.key() + "\" (associated with " + "this) on exit.", position()); } } return super.typeCheck(tc); } @Override public Node disambiguate(AmbiguityRemover ar) throws SemanticException { if (this.ci.isCanonical()) { return this; } if (this.entryKeys != null && !this.entryKeys.keys().isCanonical()) { return this; } if (this.returnKeys != null && !this.returnKeys.keys().isCanonical()) { return this; } CofferConstructorDecl_c n = (CofferConstructorDecl_c) super.disambiguate(ar); CofferTypeSystem vts = (CofferTypeSystem) ar.typeSystem(); ClassType ct = ar.context().currentClass(); KeySet entryKeys; KeySet returnKeys; if (n.entryKeys == null) { entryKeys = vts.emptyKeySet(position()); } else { entryKeys = n.entryKeys.keys(); } if (n.returnKeys == null) { returnKeys = vts.emptyKeySet(position()); if (ct instanceof CofferClassType) { CofferClassType vct = (CofferClassType) ct; if (vct.key() != null) returnKeys = returnKeys.add(vct.key()); } } else { returnKeys = n.returnKeys.keys(); } CofferConstructorInstance ci = (CofferConstructorInstance) n.ci; ci.setEntryKeys(entryKeys); ci.setReturnKeys(returnKeys); List<ThrowConstraint> throwConstraints = new ArrayList<ThrowConstraint>(n.throwConstraints.size()); for (ThrowConstraintNode cn : n.throwConstraints) { if (cn.constraint().keys() != null) { throwConstraints.add(cn.constraint()); } else { ThrowConstraint c = (ThrowConstraint) cn.constraint().copy(); c.setKeys(entryKeys); throwConstraints.add(c); } } ci.setThrowConstraints(throwConstraints); return n; } /** Write the constructor to an output file. */ @Override public void prettyPrintHeader(Flags flags, CodeWriter w, PrettyPrinter tr) { w.begin(0); w.write(flags.translate()); print(name, w, tr); w.write("("); w.begin(0); for (Iterator<Formal> i = formals.iterator(); i.hasNext();) { Formal f = i.next(); print(f, w, tr); if (i.hasNext()) { w.write(","); w.allowBreak(0, " "); } } w.end(); w.write(")"); if (!(tr instanceof Translator)) { if (entryKeys != null) { w.allowBreak(6, " "); print(entryKeys, w, tr); } if (returnKeys != null) { w.write(" -> "); print(returnKeys, w, tr); } } if (!throwConstraints.isEmpty()) { w.allowBreak(6); w.write("throws "); for (Iterator<ThrowConstraintNode> i = throwConstraints.iterator(); i.hasNext();) { ThrowConstraintNode cn = i.next(); print(cn, w, tr); if (i.hasNext()) { w.write(","); w.allowBreak(4, " "); } } } w.end(); } }
lgpl-2.1
kba/lanterna
src/main/java/com/googlecode/lanterna/gui/component/Separator.java
1338
/* * This file is part of lanterna (http://code.google.com/p/lanterna/). * * lanterna is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Copyright (C) 2010-2015 Martin */ package com.googlecode.lanterna.gui.component; import com.googlecode.lanterna.gui.TextGraphics; import com.googlecode.lanterna.gui.Theme.Category; import com.googlecode.lanterna.TerminalSize; /** * * @author Martin */ @Deprecated public class Separator extends AbstractComponent { @Override protected TerminalSize calculatePreferredSize() { return new TerminalSize(1, 1); } @Override public void repaint(TextGraphics graphics) { graphics.applyTheme(Category.DIALOG_AREA); graphics.fillArea('-'); } }
lgpl-3.0
Alfresco/alfresco-repository
src/main/java/org/alfresco/repo/virtual/ref/GetTemplatePathMethod.java
1497
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.virtual.ref; /** * Returns the virtual folder template inner path for a virtualized entity * reference. * * @author Bogdan Horje */ public class GetTemplatePathMethod extends AbstractProtocolMethod<String> { @Override public String execute(VirtualProtocol virtualProtocol, Reference reference) throws ProtocolMethodException { String path = virtualProtocol.getTemplatePath(reference); return path; } }
lgpl-3.0
dgageot/sonarqube
sonar-batch/src/test/java/org/sonar/batch/repository/DefaultServerIssuesLoaderTest.java
3013
/* * SonarQube, open source software quality management tool. * Copyright (C) 2008-2014 SonarSource * mailto:contact AT sonarsource DOT com * * SonarQube is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * SonarQube is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.batch.repository; import org.sonar.batch.cache.WSLoaderResult; import org.sonar.batch.cache.WSLoader; import com.google.common.io.ByteSource; import com.google.common.base.Function; import org.junit.Before; import org.junit.Test; import org.sonar.batch.protocol.input.BatchInput; import org.sonar.batch.protocol.input.BatchInput.ServerIssue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DefaultServerIssuesLoaderTest { private DefaultServerIssuesLoader loader; private WSLoader wsLoader; @Before public void prepare() { wsLoader = mock(WSLoader.class); loader = new DefaultServerIssuesLoader(wsLoader); } @Test public void loadFromWs() throws Exception { ByteSource bs = mock(ByteSource.class); when(wsLoader.loadSource("/batch/issues?key=foo")).thenReturn(new WSLoaderResult<>(bs, true)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ServerIssue.newBuilder().setKey("ab1").build() .writeDelimitedTo(bos); ServerIssue.newBuilder().setKey("ab2").build() .writeDelimitedTo(bos); when(bs.openBufferedStream()).thenReturn(new ByteArrayInputStream(bos.toByteArray())); final List<ServerIssue> result = new ArrayList<>(); loader.load("foo", new Function<BatchInput.ServerIssue, Void>() { @Override public Void apply(ServerIssue input) { result.add(input); return null; } }); assertThat(result).extracting("key").containsExactly("ab1", "ab2"); } @Test(expected = IllegalStateException.class) public void testError() throws IOException { ByteSource source = mock(ByteSource.class); when(source.openBufferedStream()).thenThrow(IOException.class); when(wsLoader.loadSource("/batch/issues?key=foo")).thenReturn(new WSLoaderResult<ByteSource>(source, true)); loader.load("foo", mock(Function.class)); } }
lgpl-3.0
Alfresco/alfresco-repository
src/test/java/org/alfresco/repo/preference/PreferenceServiceImplTest.java
11551
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.preference; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import java.io.Serializable; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.alfresco.model.ContentModel; import org.alfresco.repo.jscript.ClasspathScriptLocation; import org.alfresco.repo.model.Repository; import org.alfresco.repo.security.authentication.AuthenticationUtil; import org.alfresco.repo.security.permissions.AccessDeniedException; import org.alfresco.repo.transaction.RetryingTransactionHelper; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.cmr.preference.PreferenceService; import org.alfresco.service.cmr.repository.ContentReader; import org.alfresco.service.cmr.repository.ContentService; import org.alfresco.service.cmr.repository.NodeRef; import org.alfresco.service.cmr.repository.ScriptLocation; import org.alfresco.service.cmr.repository.ScriptService; import org.alfresco.service.cmr.security.PersonService; import org.alfresco.test_category.BaseSpringTestsCategory; import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.util.test.junitrules.AlfrescoPerson; import org.alfresco.util.test.junitrules.ApplicationContextInit; import org.alfresco.util.test.junitrules.RunAsFullyAuthenticatedRule; import org.alfresco.util.test.junitrules.RunAsFullyAuthenticatedRule.RunAsUser; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; /** * {@link PreferenceService} implementation unit test * * @author Roy Wetherall * @author Neil Mc Erlean (refactoring to JUnit Rules and enabling disabled tests) */ @Category(BaseSpringTestsCategory.class) public class PreferenceServiceImplTest { private static final Log log = LogFactory.getLog(PreferenceServiceImplTest.class); // JUnit rule to initialise the default Alfresco spring configuration @ClassRule public static ApplicationContextInit APP_CONTEXT_INIT = new ApplicationContextInit(); private static final String USERNAME2 = "username2"; // Rules to create test users. Note that this class is unusual in that we do *NOT* want to reuse users across test methods. public AlfrescoPerson testUser1 = new AlfrescoPerson(APP_CONTEXT_INIT); public AlfrescoPerson testUser2 = new AlfrescoPerson(APP_CONTEXT_INIT, USERNAME2); // A rule to have all test methods be run as "UserOne". public RunAsFullyAuthenticatedRule runAsRule = new RunAsFullyAuthenticatedRule(testUser1); // Tie them together in a Rule Chain @Rule public RuleChain ruleChain = RuleChain.outerRule(testUser1) .around(testUser2) .around(runAsRule); // Various services private static ContentService CONTENT_SERVICE; private static PersonService PERSON_SERVICE; private static PreferenceService PREFERENCE_SERVICE; private static RetryingTransactionHelper TRANSACTION_HELPER; private static ScriptService SCRIPT_SERVICE; private static NodeRef COMPANY_HOME; @BeforeClass public static void initStaticData() throws Exception { CONTENT_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("ContentService", ContentService.class); PERSON_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("PersonService", PersonService.class); PREFERENCE_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("PreferenceService", PreferenceService.class); SCRIPT_SERVICE = APP_CONTEXT_INIT.getApplicationContext().getBean("ScriptService", ScriptService.class); TRANSACTION_HELPER = APP_CONTEXT_INIT.getApplicationContext().getBean("retryingTransactionHelper", RetryingTransactionHelper.class); Repository repositoryHelper = (Repository) APP_CONTEXT_INIT.getApplicationContext().getBean("repositoryHelper"); COMPANY_HOME = repositoryHelper.getCompanyHome(); } @Test public void testPreferences() throws Exception { TRANSACTION_HELPER.doInTransaction(new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { // Try and get preferences before they have been set Map<String, Serializable> prefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername()); assertNotNull(prefs); assertEquals(0, prefs.size()); // Lets set some preferences for the user prefs = new HashMap<String, Serializable>(5); prefs.put("alfresco.one.alpha", "string"); prefs.put("alfresco.one.beta", 100); prefs.put("alfresco.two.alpha", 3.142); prefs.put("alfresco.two.beta", COMPANY_HOME); prefs.put("alfresco.two.gamma", new Date()); prefs.put("atTheRoot", "thisIsAtTheRoot"); PREFERENCE_SERVICE.setPreferences(testUser1.getUsername(), prefs); NodeRef personNodeRef = PERSON_SERVICE.getPerson(testUser1.getUsername()); ContentReader reader = CONTENT_SERVICE.getReader(personNodeRef, ContentModel.PROP_PREFERENCE_VALUES); log.debug("JSON: \n" + prettyJson(reader.getContentString())); // Try and get all the preferences prefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername(), null); assertNotNull(prefs); assertEquals(6, prefs.size()); // Try and get some of the preferences prefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername(), "alfresco.two"); assertNotNull(prefs); assertEquals(3, prefs.size()); // Clear some of the preferences PREFERENCE_SERVICE.clearPreferences(testUser1.getUsername(), "alfresco.two"); prefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername(), null); assertNotNull(prefs); assertEquals(3, prefs.size()); // Clear all the preferences PREFERENCE_SERVICE.clearPreferences(testUser1.getUsername()); prefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername()); assertNotNull(prefs); assertEquals(0, prefs.size()); return null; } }); } @Test(expected=AccessDeniedException.class) @RunAsUser(userName=USERNAME2) public void testBadUser() { TRANSACTION_HELPER.doInTransaction(new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { Map<String, Serializable> prefs = new HashMap<String, Serializable>(5); prefs.put("alfresco.one.alpha", "string"); PREFERENCE_SERVICE.setPreferences(testUser1.getUsername(), prefs); return null; } }); } @Test public void testGetOtherUserPreferences() { TRANSACTION_HELPER.doInTransaction(new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { // Lets set some preferences for the user one Map<String, Serializable> prefs = new HashMap<String, Serializable>(5); prefs.put("alfresco.one.alpha", "string"); prefs.put("alfresco.one.beta", 100); PREFERENCE_SERVICE.setPreferences(testUser1.getUsername(), prefs); Map<String, Serializable> userOnePrefs = PREFERENCE_SERVICE.getPreferences(testUser1.getUsername()); assertNotNull(userOnePrefs); assertEquals(2, prefs.size()); return null; } }); TRANSACTION_HELPER.doInTransaction(new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { AuthenticationUtil.setFullyAuthenticatedUser(USERNAME2); // This should not be possible try { PREFERENCE_SERVICE.getPreferences(testUser1.getUsername()); } catch (AccessDeniedException expected) { return null; } fail("Expected exception when trying to access another user's prefs"); return null; } }); } // == Test the JavaScript API == @Test public void testJSAPI() throws Exception { TRANSACTION_HELPER.doInTransaction(new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { // This test is running as user1 and the JavaScript needs to know that. Map<String, Object> model = new HashMap<String, Object>(); model.put("username1", testUser1.getUsername()); model.put("username2", testUser2.getUsername()); ScriptLocation location = new ClasspathScriptLocation("org/alfresco/repo/preference/script/test_preferenceService.js"); SCRIPT_SERVICE.executeScript(location, model); return null; } }); } private String prettyJson(String jsonString) { String result = jsonString; try { JSONObject json = new JSONObject(new JSONTokener(jsonString)); result = json.toString(2); } catch (JSONException ignored) { // Intentionally empty } return result; } }
lgpl-3.0
mmadzin/clusterbench
clusterbench-ee7-ejb/src/main/java/org/jboss/test/clusterbench/ejb/stateless/RemoteStatelessSBImpl.java
913
/* * Copyright 2013 Radoslav Husár * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.test.clusterbench.ejb.stateless; import javax.ejb.Stateless; import org.jboss.test.clusterbench.common.ejb.CommonStatelessSBImpl; /** * @author Radoslav Husar * @version Dec 2011 */ @Stateless public class RemoteStatelessSBImpl extends CommonStatelessSBImpl implements RemoteStatelessSB { }
apache-2.0
pax95/camel
components/camel-seda/src/main/java/org/apache/camel/component/seda/SedaProducer.java
10782
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.seda; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; import org.apache.camel.ExchangeTimedOutException; import org.apache.camel.ExtendedExchange; import org.apache.camel.WaitForTaskToComplete; import org.apache.camel.support.DefaultAsyncProducer; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.SynchronizationAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SedaProducer extends DefaultAsyncProducer { private static final Logger LOG = LoggerFactory.getLogger(SedaProducer.class); private final SedaEndpoint endpoint; private final WaitForTaskToComplete waitForTaskToComplete; private final long timeout; private final boolean blockWhenFull; private final boolean discardWhenFull; private final long offerTimeout; public SedaProducer(SedaEndpoint endpoint, WaitForTaskToComplete waitForTaskToComplete, long timeout, boolean blockWhenFull, boolean discardWhenFull, long offerTimeout) { super(endpoint); this.endpoint = endpoint; this.waitForTaskToComplete = waitForTaskToComplete; this.timeout = timeout; this.blockWhenFull = blockWhenFull; this.discardWhenFull = discardWhenFull; this.offerTimeout = offerTimeout; } @Override public boolean process(final Exchange exchange, final AsyncCallback callback) { WaitForTaskToComplete wait = waitForTaskToComplete; if (exchange.getProperty(Exchange.ASYNC_WAIT) != null) { wait = exchange.getProperty(Exchange.ASYNC_WAIT, WaitForTaskToComplete.class); } if (wait == WaitForTaskToComplete.Always || wait == WaitForTaskToComplete.IfReplyExpected && ExchangeHelper.isOutCapable(exchange)) { // do not handover the completion as we wait for the copy to complete, and copy its result back when it done Exchange copy = prepareCopy(exchange, false); // latch that waits until we are complete final CountDownLatch latch = new CountDownLatch(1); // we should wait for the reply so install a on completion so we know when its complete copy.adapt(ExtendedExchange.class).addOnCompletion(new SynchronizationAdapter() { @Override public void onDone(Exchange response) { // check for timeout, which then already would have invoked the latch if (latch.getCount() == 0) { if (LOG.isTraceEnabled()) { LOG.trace("{}. Timeout occurred so response will be ignored: {}", this, response.getMessage()); } return; } else { if (LOG.isTraceEnabled()) { LOG.trace("{} with response: {}", this, response.getMessage()); } try { ExchangeHelper.copyResults(exchange, response); } finally { // always ensure latch is triggered latch.countDown(); } } } @Override public boolean allowHandover() { // do not allow handover as we want to seda producer to have its completion triggered // at this point in the routing (at this leg), instead of at the very last (this ensure timeout is honored) return false; } @Override public String toString() { return "onDone at endpoint: " + endpoint; } }); try { // do not copy as we already did the copy addToQueue(copy, false); } catch (SedaConsumerNotAvailableException e) { exchange.setException(e); callback.done(true); return true; } if (timeout > 0) { if (LOG.isTraceEnabled()) { LOG.trace("Waiting for task to complete using timeout (ms): {} at [{}]", timeout, endpoint.getEndpointUri()); } // lets see if we can get the task done before the timeout boolean done = false; try { done = latch.await(timeout, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // ignore } if (!done) { exchange.setException(new ExchangeTimedOutException(exchange, timeout)); // remove timed out Exchange from queue endpoint.getQueue().remove(copy); // count down to indicate timeout latch.countDown(); } } else { if (LOG.isTraceEnabled()) { LOG.trace("Waiting for task to complete (blocking) at [{}]", endpoint.getEndpointUri()); } // no timeout then wait until its done try { latch.await(); } catch (InterruptedException e) { // ignore } } } else { // no wait, eg its a InOnly then just add to queue and return try { addToQueue(exchange, true); } catch (SedaConsumerNotAvailableException e) { exchange.setException(e); callback.done(true); return true; } } // we use OnCompletion on the Exchange to callback and wait for the Exchange to be done // so we should just signal the callback we are done synchronously callback.done(true); return true; } protected Exchange prepareCopy(Exchange exchange, boolean handover) { // use a new copy of the exchange to route async (and use same message id) // if handover we need to do special handover to avoid handing over // RestBindingMarshalOnCompletion as it should not be handed over with SEDA Exchange copy = ExchangeHelper.createCorrelatedCopy(exchange, handover, true, synchronization -> !synchronization.getClass().getName().contains("RestBindingMarshalOnCompletion")); return copy; } @Override protected void doStart() throws Exception { super.doStart(); endpoint.onStarted(this); } @Override protected void doStop() throws Exception { endpoint.onStopped(this); super.doStop(); } /** * Strategy method for adding the exchange to the queue. * <p> * Will perform a blocking "put" if blockWhenFull is true, otherwise it will simply add which will throw exception * if the queue is full * * @param exchange the exchange to add to the queue * @param copy whether to create a copy of the exchange to use for adding to the queue */ protected void addToQueue(Exchange exchange, boolean copy) throws SedaConsumerNotAvailableException { BlockingQueue<Exchange> queue = null; QueueReference queueReference = endpoint.getQueueReference(); if (queueReference != null) { queue = queueReference.getQueue(); } if (queue == null) { throw new SedaConsumerNotAvailableException("No queue available on endpoint: " + endpoint, exchange); } boolean empty = !queueReference.hasConsumers(); if (empty) { if (endpoint.isFailIfNoConsumers()) { throw new SedaConsumerNotAvailableException("No consumers available on endpoint: " + endpoint, exchange); } else if (endpoint.isDiscardIfNoConsumers()) { LOG.debug("Discard message as no active consumers on endpoint: {}", endpoint); return; } } Exchange target = exchange; // handover the completion so its the copy which performs that, as we do not wait if (copy) { target = prepareCopy(exchange, true); } LOG.trace("Adding Exchange to queue: {}", target); if (discardWhenFull) { try { boolean added = queue.offer(target, 0, TimeUnit.MILLISECONDS); if (!added) { LOG.trace("Discarding Exchange as queue is full: {}", target); } } catch (InterruptedException e) { // ignore LOG.debug("Offer interrupted, are we stopping? {}", isStopping() || isStopped()); } } else if (blockWhenFull && offerTimeout == 0) { try { queue.put(target); } catch (InterruptedException e) { // ignore LOG.debug("Put interrupted, are we stopping? {}", isStopping() || isStopped()); } } else if (blockWhenFull && offerTimeout > 0) { try { boolean added = queue.offer(target, offerTimeout, TimeUnit.MILLISECONDS); if (!added) { throw new IllegalStateException( "Fails to insert element into queue, " + "after timeout of " + offerTimeout + " milliseconds"); } } catch (InterruptedException e) { // ignore LOG.debug("Offer interrupted, are we stopping? {}", isStopping() || isStopped()); } } else { queue.add(target); } } }
apache-2.0
gradle/gradle
subprojects/tooling-api/src/main/java/org/gradle/tooling/model/GradleModuleVersion.java
1064
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.tooling.model; /** * Informs about a module version, i.e. group, name, version. * * @since 1.1 */ public interface GradleModuleVersion { /** * The group of the module, for example 'org.gradle'. */ String getGroup(); /** * The name of the module, for example 'gradle-tooling-api'. */ String getName(); /** * The version, for example '1.0'. */ String getVersion(); }
apache-2.0
HebaKhaled/bposs
src/pt_antlr/antlr/debug/GuessingEvent.java
544
package antlr.debug; public abstract class GuessingEvent extends Event { private int guessing; public GuessingEvent(Object source) { super(source); } public GuessingEvent(Object source, int type) { super(source, type); } public int getGuessing() { return guessing; } void setGuessing(int guessing) { this.guessing = guessing; } /** This should NOT be called from anyone other than ParserEventSupport! */ void setValues(int type, int guessing) { super.setValues(type); setGuessing(guessing); } }
apache-2.0
lsmall/flowable-engine
modules/flowable-form-rest/src/main/java/org/flowable/form/rest/service/api/form/FormInstanceResource.java
2939
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.form.rest.service.api.form; import javax.servlet.http.HttpServletRequest; import org.flowable.common.engine.api.FlowableObjectNotFoundException; import org.flowable.form.api.FormInstance; import org.flowable.form.api.FormService; import org.flowable.form.rest.FormRestApiInterceptor; import org.flowable.form.rest.FormRestResponseFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RestController; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.Authorization; /** * @author Yvo Swillens */ @RestController @Api(tags = { "Form Instances" }, description = "Manage Form Instances", authorizations = { @Authorization(value = "basicAuth") }) public class FormInstanceResource { @Autowired protected FormService formService; @Autowired protected FormRestResponseFactory formRestResponseFactory; @Autowired(required=false) protected FormRestApiInterceptor restApiInterceptor; @ApiOperation(value = "Get a form instance", tags = { "Form Instances" }, nickname = "getFormInstance") @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the form instance was found and returned."), @ApiResponse(code = 404, message = "Indicates the requested form instance was not found.") }) @GetMapping(value = "/form/form-instance/{formInstanceId}", produces = "application/json") public FormInstanceResponse getFormInstance(@ApiParam(name = "formInstanceId") @PathVariable String formInstanceId, HttpServletRequest request) { FormInstance formInstance = formService.createFormInstanceQuery().id(formInstanceId).singleResult(); if (formInstance == null) { throw new FlowableObjectNotFoundException("Could not find a form instance"); } if (restApiInterceptor != null) { restApiInterceptor.accessFormInstanceById(formInstance); } return formRestResponseFactory.createFormInstanceResponse(formInstance); } }
apache-2.0
walteryang47/ovirt-engine
backend/manager/modules/dal/src/main/java/org/ovirt/engine/core/dao/DiskImageDaoImpl.java
13907
package org.ovirt.engine.core.dao; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import javax.inject.Named; import javax.inject.Singleton; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.common.businessentities.QuotaEnforcementTypeEnum; import org.ovirt.engine.core.common.businessentities.storage.CinderDisk; import org.ovirt.engine.core.common.businessentities.storage.DiskContentType; import org.ovirt.engine.core.common.businessentities.storage.DiskImage; import org.ovirt.engine.core.common.businessentities.storage.DiskStorageType; import org.ovirt.engine.core.common.businessentities.storage.ImageStatus; import org.ovirt.engine.core.common.businessentities.storage.ImageTransferPhase; import org.ovirt.engine.core.common.businessentities.storage.StorageType; import org.ovirt.engine.core.common.businessentities.storage.VolumeClassification; import org.ovirt.engine.core.common.businessentities.storage.VolumeFormat; import org.ovirt.engine.core.common.businessentities.storage.VolumeType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.dal.dbbroker.DbFacadeUtils; import org.ovirt.engine.core.utils.GuidUtils; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; /** * <code>DiskImageDaoImpl</code> provides an implementation of {@link DiskImageDao} that uses previously * developed code from {@link org.ovirt.engine.core.dal.dbbroker.DbFacade}. */ @Named @Singleton public class DiskImageDaoImpl extends BaseDao implements DiskImageDao { @Override public DiskImage get(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("image_guid", id); return getCallsHandler().executeRead("GetImageByImageGuid", DiskImageRowMapper.instance, parameterSource); } @Override public DiskImage getSnapshotById(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("image_guid", id); return getCallsHandler().executeRead("GetSnapshotByGuid", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllSnapshotsForParent(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("parent_guid", id); return getCallsHandler().executeReadList("GetSnapshotByParentGuid", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllSnapshotsForLeaf(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("image_guid", id); return getCallsHandler().executeReadList("GetSnapshotByLeafGuid", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllSnapshotsForStorageDomain(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("storage_domain_id", id); return getCallsHandler().executeReadList("GetSnapshotsByStorageDomainId", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllSnapshotsForVmSnapshot(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_snapshot_id", id); return getCallsHandler().executeReadList("GetSnapshotsByVmSnapshotId", DiskImageRowMapper.instance, parameterSource); } @Override public DiskImage getDiskSnapshotForVmSnapshot(Guid diskId, Guid vmSnapshotId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_snapshot_id", vmSnapshotId) .addValue("image_group_id", diskId); return getCallsHandler().executeRead("GetDiskSnapshotForVmSnapshot", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllSnapshotsForImageGroup(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("image_group_id", id); return getCallsHandler().executeReadList("GetSnapshotsByImageGroupId", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAttachedDiskSnapshotsToVm(Guid vmId, Boolean isPlugged) { return getCallsHandler().executeReadList("GetAttachedDiskSnapshotsToVm", DiskImageRowMapper.instance, getCustomMapSqlParameterSource().addValue("vm_guid", vmId).addValue("is_plugged", isPlugged)); } @Override public List<DiskImage> getAll() { throw new NotImplementedException(); } @Override public DiskImage getAncestor(Guid id) { return getAncestor(id, null, false); } @Override public DiskImage getAncestor(Guid id, Guid userID, boolean isFiltered) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("image_guid", id).addValue("user_id", userID).addValue("is_filtered", isFiltered); return getCallsHandler().executeRead("GetAncestralImageByImageGuid", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getImagesWithNoDisk(Guid vmId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_id", vmId); return getCallsHandler().executeReadList("GetImagesWhichHaveNoDisk", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllForStorageDomain(Guid storageDomainId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("storage_domain_id", storageDomainId); return getCallsHandler().executeReadList("GetAllForStorageDomain", DiskImageRowMapper.instance, parameterSource); } @Override public List<DiskImage> getAllForDiskProfiles(Collection<Guid> diskProfileIds) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("disk_profile_ids", createArrayOfUUIDs(diskProfileIds)); return getCallsHandler().executeReadList("GetAllForDiskProfiles", DiskImageRowMapper.instance, parameterSource); } protected static class DiskImageRowMapper extends AbstractDiskRowMapper<DiskImage> { public static final DiskImageRowMapper instance = new DiskImageRowMapper(); private DiskImageRowMapper() { } @Override public DiskImage mapRow(ResultSet rs, int rowNum) throws SQLException { DiskImage entity = null; DiskStorageType diskStorageType = DiskStorageType.forValue(rs.getInt("disk_storage_type")); switch (diskStorageType) { case IMAGE: entity = super.mapRow(rs, rowNum); mapEntity(rs, entity); break; case CINDER: entity = CinderDiskRowMapper.instance.mapRow(rs, rowNum); break; } return entity; } protected void mapEntity(ResultSet rs, DiskImage entity) throws SQLException { entity.setCreationDate(DbFacadeUtils.fromDate(rs .getTimestamp("creation_date"))); entity.setActualSizeInBytes(rs.getLong("actual_size")); entity.setDescription(rs.getString("description")); entity.setImageId(getGuidDefaultEmpty(rs, "image_guid")); entity.setImageTemplateId(getGuidDefaultEmpty(rs, "it_guid")); entity.setSize(rs.getLong("size")); entity.setParentId(getGuidDefaultEmpty(rs, "ParentId")); entity.setImageStatus(ImageStatus.forValue(rs .getInt("imageStatus"))); entity.setLastModified(DbFacadeUtils.fromDate(rs .getTimestamp("lastModified"))); entity.setAppList(rs.getString("app_list")); entity.setStorageIds(GuidUtils.getGuidListFromString(rs.getString("storage_id"))); entity.setStorageTypes(getStorageTypesList(rs.getString("storage_type"))); entity.setStoragesNames(split(rs.getString("storage_name"))); entity.setVmSnapshotId(getGuid(rs, "vm_snapshot_id")); entity.setVolumeType(VolumeType.forValue(rs .getInt("volume_type"))); entity.setvolumeFormat(VolumeFormat.forValue(rs .getInt("volume_format"))); entity.setId(getGuidDefaultEmpty(rs, "image_group_id")); entity.setStoragePath(split(rs.getString("storage_path"))); entity.setStoragePoolId(getGuid(rs, "storage_pool_id")); entity.setBoot(rs.getBoolean("boot")); entity.setReadRate(rs.getInt("read_rate")); entity.setWriteRate(rs.getInt("write_rate")); entity.setContentType(rs.getBoolean("ovf_store") ? DiskContentType.OVF_STORE : DiskContentType.DATA); entity.setImageTransferPhase(rs.getObject("image_transfer_phase") != null ? ImageTransferPhase.forValue(rs.getInt("image_transfer_phase")) : null); entity.setImageTransferBytesSent(rs.getLong("image_transfer_bytes_sent")); entity.setImageTransferBytesTotal(rs.getLong("image_transfer_bytes_total")); entity.setReadLatency(rs.getObject("read_latency_seconds") != null ? rs.getDouble("read_latency_seconds") : null); entity.setWriteLatency(rs.getObject("write_latency_seconds") != null ? rs.getDouble("write_latency_seconds") : null); entity.setFlushLatency(rs.getObject("flush_latency_seconds") != null ? rs.getDouble("flush_latency_seconds") : null); entity.setActive(Boolean.TRUE.equals(rs.getObject("active"))); entity.setQuotaIds(getGuidListFromStringPreserveAllTokens(rs.getString("quota_id"))); entity.setQuotaNames(splitPreserveAllTokens(rs.getString("quota_name"))); entity.setQuotaEnforcementType(QuotaEnforcementTypeEnum.forValue(rs.getInt("quota_enforcement_type"))); entity.setDiskProfileIds(getGuidListFromStringPreserveAllTokens(rs.getString("disk_profile_id"))); entity.setDiskProfileNames(splitPreserveAllTokens(rs.getString("disk_profile_name"))); entity.setVolumeClassification(VolumeClassification.forValue(rs.getInt("volume_classification"))); } @Override protected DiskImage createDiskEntity() { return new DiskImage(); } private ArrayList<StorageType> getStorageTypesList(String storageTypesString) throws SQLException { List<String> splitTypes = split(storageTypesString); if (splitTypes == null) { return null; } ArrayList<StorageType> types = new ArrayList<>(); for (String typeStr : splitTypes) { try { types.add(StorageType.forValue(Integer.parseInt(typeStr))); } catch (NumberFormatException e) { throw new SQLException("Could not parse disk image storage domain type " + typeStr, e); } } return types; } /** * since quota can be null, we need to preserve null in the list * * @param str * @return */ private ArrayList<String> splitPreserveAllTokens(String str) { if (StringUtils.isEmpty(str)) { return null; } return new ArrayList<String>(Arrays.asList(StringUtils.splitPreserveAllTokens(str, SEPARATOR))); } /** * since some disk images can contain empty quota, we need to preserve null in the list. * * @param str * @return */ private ArrayList<Guid> getGuidListFromStringPreserveAllTokens(String str) { ArrayList<Guid> guidList = new ArrayList<Guid>(); if (StringUtils.isEmpty(str)) { return new ArrayList<Guid>(); } for (String guidString : splitPreserveAllTokens(str)) { Guid guidToAdd = null; if (!StringUtils.isEmpty(guidString)) { guidToAdd = Guid.createGuidFromString(guidString); } guidList.add(guidToAdd); } return guidList; } } protected static class CinderDiskRowMapper extends AbstractDiskRowMapper<CinderDisk> { public static final CinderDiskRowMapper instance = new CinderDiskRowMapper(); private CinderDiskRowMapper() { } @Override public CinderDisk mapRow(ResultSet rs, int rowNum) throws SQLException { CinderDisk cinderDisk = super.mapRow(rs, rowNum); DiskImageRowMapper.instance.mapEntity(rs, cinderDisk); mapEntity(rs, cinderDisk); return cinderDisk; } private void mapEntity(ResultSet rs, CinderDisk entity) throws SQLException { entity.setCinderVolumeType(rs.getString("cinder_volume_type")); } @Override protected CinderDisk createDiskEntity() { return new CinderDisk(); } } }
apache-2.0
damienmg/bazel
src/main/java/com/google/devtools/build/docgen/BuildDocCollector.java
14444
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.docgen; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.ListMultimap; import com.google.devtools.build.docgen.DocgenConsts.RuleType; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.RuleClass; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; /** * Class that parses the documentation fragments of rule-classes and * generates the html format documentation. */ @VisibleForTesting public class BuildDocCollector { private static final Splitter SHARP_SPLITTER = Splitter.on('#').limit(2).trimResults(); private final String productName; private final ConfiguredRuleClassProvider ruleClassProvider; private final boolean printMessages; public BuildDocCollector( String productName, ConfiguredRuleClassProvider ruleClassProvider, boolean printMessages) { this.productName = productName; this.ruleClassProvider = ruleClassProvider; this.printMessages = printMessages; } /** * Parse the file containing blacklisted rules for documentation. The list is simply a list of * rules separated by new lines. Line comments can be added to the file by starting them with #. * * @param blackList The name of the file containing the blacklist. * @return The set of blacklisted rules. * @throws IOException */ @VisibleForTesting public static Set<String> readBlackList(String blackList) throws IOException { Set<String> result = new HashSet<String>(); if (blackList != null && !blackList.isEmpty()) { File file = new File(blackList); try (BufferedReader reader = Files.newBufferedReader(file.toPath(), UTF_8)) { for (String line = reader.readLine(); line != null; line = reader.readLine()) { String rule = SHARP_SPLITTER.split(line).iterator().next(); if (!rule.isEmpty()) { result.add(rule); } } } } return result; } /** * Creates a map of rule names (keys) to rule documentation (values). * * <p>This method crawls the specified input directories for rule class definitions (as Java * source files) which contain the rules' and attributes' definitions as comments in a * specific format. The keys in the returned Map correspond to these rule classes. * * <p>In the Map's values, all references pointing to other rules, rule attributes, and general * documentation (e.g. common definitions, make variables, etc.) are expanded into hyperlinks. * The links generated follow either the multi-page or single-page Build Encyclopedia model * depending on the mode set for the provided {@link RuleLinkExpander}. * * @param inputDirs list of directories to scan for documentation * @param blackList specify an optional blacklist file that list some rules that should * not be listed in the output. * @param expander The RuleLinkExpander, which is used for expanding links in the rule doc. * @throws BuildEncyclopediaDocException * @throws IOException * @return Map of rule class to rule documentation. */ public Map<String, RuleDocumentation> collect( List<String> inputDirs, String blackList, RuleLinkExpander expander) throws BuildEncyclopediaDocException, IOException { // Read the blackList file Set<String> blacklistedRules = readBlackList(blackList); // RuleDocumentations are generated in order (based on rule type then alphabetically). // The ordering is also used to determine in which rule doc the common attribute docs are // generated (they are generated at the first appearance). Map<String, RuleDocumentation> ruleDocEntries = new TreeMap<>(); // RuleDocumentationAttribute objects equal based on attributeName so they have to be // collected in a List instead of a Set. ListMultimap<String, RuleDocumentationAttribute> attributeDocEntries = LinkedListMultimap.create(); // Map of rule class name to file that defined it. Map<String, File> ruleClassFiles = new HashMap<>(); // Set of files already processed. The same file may be encountered multiple times because // directories are processed recursively, and an input directory may be a subdirectory of // another one. Set<File> processedFiles = new HashSet<>(); for (String inputDir : inputDirs) { if (printMessages) { System.out.println(" Processing input directory: " + inputDir); } int ruleNum = ruleDocEntries.size(); collectDocs(processedFiles, ruleClassFiles, ruleDocEntries, blacklistedRules, attributeDocEntries, new File(inputDir)); if (printMessages) { System.out.println(" " + (ruleDocEntries.size() - ruleNum) + " rule documentations found."); } } processAttributeDocs(ruleDocEntries.values(), attributeDocEntries); expander.addIndex(buildRuleIndex(ruleDocEntries.values())); for (RuleDocumentation rule : ruleDocEntries.values()) { rule.setRuleLinkExpander(expander); } return ruleDocEntries; } /** * Creates a map of rule names (keys) to rule documentation (values). * * <p>This method crawls the specified input directories for rule class definitions (as Java * source files) which contain the rules' and attributes' definitions as comments in a * specific format. The keys in the returned Map correspond to these rule classes. * * <p>In the Map's values, all references pointing to other rules, rule attributes, and general * documentation (e.g. common definitions, make variables, etc.) are expanded into hyperlinks. * The links generated follow the multi-page Build Encyclopedia model (one page per rule clas.). * * @param inputDirs list of directories to scan for documentation * @param blackList specify an optional blacklist file that list some rules that should * not be listed in the output. * @throws BuildEncyclopediaDocException * @throws IOException * @return Map of rule class to rule documentation. */ public Map<String, RuleDocumentation> collect(List<String> inputDirs, String blackList) throws BuildEncyclopediaDocException, IOException { RuleLinkExpander expander = new RuleLinkExpander(productName, /* singlePage */ false); return collect(inputDirs, blackList, expander); } /** * Generates an index mapping rule name to its normalized rule family name. */ private Map<String, String> buildRuleIndex(Iterable<RuleDocumentation> rules) { Map<String, String> index = new HashMap<>(); for (RuleDocumentation rule : rules) { index.put(rule.getRuleName(), RuleFamily.normalize(rule.getRuleFamily())); } return index; } /** * Go through all attributes of all documented rules and search the best attribute documentation * if exists. The best documentation is the closest documentation in the ancestor graph. E.g. if * java_library.deps documented in $rule and $java_rule then the one in $java_rule is going to * apply since it's a closer ancestor of java_library. */ private void processAttributeDocs(Iterable<RuleDocumentation> ruleDocEntries, ListMultimap<String, RuleDocumentationAttribute> attributeDocEntries) throws BuildEncyclopediaDocException { for (RuleDocumentation ruleDoc : ruleDocEntries) { RuleClass ruleClass = ruleClassProvider.getRuleClassMap().get(ruleDoc.getRuleName()); if (ruleClass != null) { if (ruleClass.isDocumented()) { Class<? extends RuleDefinition> ruleDefinition = ruleClassProvider.getRuleClassDefinition(ruleDoc.getRuleName()).getClass(); for (Attribute attribute : ruleClass.getAttributes()) { String attrName = attribute.getName(); List<RuleDocumentationAttribute> attributeDocList = attributeDocEntries.get(attrName); if (attributeDocList != null) { // There are attribute docs for this attribute. // Search the closest one in the ancestor graph. // Note that there can be only one 'closest' attribute since we forbid multiple // inheritance of the same attribute in RuleClass. int minLevel = Integer.MAX_VALUE; RuleDocumentationAttribute bestAttributeDoc = null; for (RuleDocumentationAttribute attributeDoc : attributeDocList) { int level = attributeDoc.getDefinitionClassAncestryLevel( ruleDefinition, ruleClassProvider); if (level >= 0 && level < minLevel) { bestAttributeDoc = attributeDoc; minLevel = level; } } if (bestAttributeDoc != null) { // Add reference to the Attribute that the attribute doc is associated with // in order to generate documentation for the Attribute. bestAttributeDoc.setAttribute(attribute); ruleDoc.addAttribute(bestAttributeDoc); // If there is no matching attribute doc try to add the common. } else if (ruleDoc.getRuleType().equals(RuleType.BINARY) && PredefinedAttributes.BINARY_ATTRIBUTES.containsKey(attrName)) { ruleDoc.addAttribute(PredefinedAttributes.BINARY_ATTRIBUTES.get(attrName)); } else if (ruleDoc.getRuleType().equals(RuleType.TEST) && PredefinedAttributes.TEST_ATTRIBUTES.containsKey(attrName)) { ruleDoc.addAttribute(PredefinedAttributes.TEST_ATTRIBUTES.get(attrName)); } else if (PredefinedAttributes.COMMON_ATTRIBUTES.containsKey(attrName)) { ruleDoc.addAttribute(PredefinedAttributes.COMMON_ATTRIBUTES.get(attrName)); } } } } } else { throw ruleDoc.createException("Can't find RuleClass for " + ruleDoc.getRuleName()); } } } /** * Crawls the specified inputPath and collects the raw rule and rule attribute documentation. * * <p>This method crawls the specified input directory (recursively calling itself for all * subdirectories) and reads each Java source file using {@link SourceFileReader} to extract the * raw rule and attribute documentation embedded in comments in a specific format. The extracted * documentation is then further processed, such as by * {@link BuildDocCollector#collect(List<String>, String, RuleLinkExpander), collect}, in order * to associate each rule's documentation with its attribute documentation. * * <p>This method returns the following through its parameters: the set of Java source files * processed, a map of rule name to the source file it was extracted from, a map of rule name * to the documentation to the rule, and a multimap of attribute name to attribute documentation. * * @param processedFiles The set of Java source files files that have already been processed * in order to avoid reprocessing the same file. * @param ruleClassFiles Map of rule name to the source file it was extracted from. * @param ruleDocEntries Map of rule name to rule documentation. * @param blackList The set of blacklisted rules whose documentation should not be extracted. * @param attributeDocEntries Multimap of rule attribute name to attribute documentation. * @param inputPath The File representing the file or directory to read. * @throws BuildEncyclopediaDocException * @throws IOException */ public void collectDocs( Set<File> processedFiles, Map<String, File> ruleClassFiles, Map<String, RuleDocumentation> ruleDocEntries, Set<String> blackList, ListMultimap<String, RuleDocumentationAttribute> attributeDocEntries, File inputPath) throws BuildEncyclopediaDocException, IOException { if (processedFiles.contains(inputPath)) { return; } if (inputPath.isFile()) { if (DocgenConsts.JAVA_SOURCE_FILE_SUFFIX.apply(inputPath.getName())) { SourceFileReader sfr = new SourceFileReader(ruleClassProvider, inputPath.getAbsolutePath()); sfr.readDocsFromComments(); for (RuleDocumentation d : sfr.getRuleDocEntries()) { String ruleName = d.getRuleName(); if (!blackList.contains(ruleName)) { if (ruleDocEntries.containsKey(ruleName) && !ruleClassFiles.get(ruleName).equals(inputPath)) { System.err.printf( "WARNING: '%s' from '%s' overrides value already in map from '%s'\n", d.getRuleName(), inputPath, ruleClassFiles.get(ruleName)); } ruleClassFiles.put(ruleName, inputPath); ruleDocEntries.put(ruleName, d); } } if (attributeDocEntries != null) { // Collect all attribute documentations from this file. attributeDocEntries.putAll(sfr.getAttributeDocEntries()); } } } else if (inputPath.isDirectory()) { for (File childPath : inputPath.listFiles()) { collectDocs(processedFiles, ruleClassFiles, ruleDocEntries, blackList, attributeDocEntries, childPath); } } processedFiles.add(inputPath); } }
apache-2.0
esaunders/autopsy
Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchDetectorModuleSettings.java
6986
/* * Autopsy Forensic Browser * * Copyright 2011-2016 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.modules.fileextmismatch; import java.io.IOException; import java.io.ObjectInputStream; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; /** * Ingest options for the file extension mismatch detection ingest module. */ final class FileExtMismatchDetectorModuleSettings implements IngestModuleIngestJobSettings { private static final long serialVersionUID = 1L; private long versionNumber; private boolean skipFilesWithNoExtension; @Deprecated private boolean skipFilesWithTextPlainMimeType; // No longer used, retained to maintain serialization compatibility. private boolean skipKnownFiles; private CHECK_TYPE checkType; /* * Extension mismatches can be checked for all files, for all files except * text files, or for media and executable files only. */ enum CHECK_TYPE { ALL, NO_TEXT_FILES, ONLY_MEDIA_AND_EXE } /* * The set of the MIME types that will be checked for extension mismatches * when checkType is ONLY_MEDIA_AND_EXE. */ static final Set<String> MEDIA_AND_EXE_MIME_TYPES = Stream.of( "image/bmp", "image/gif", "image/jpeg", "image/png", "image/tiff", "image/x-ms-bmp", "application/dos-exe", "application/exe", "application/x-dosexec", "application/x-exe", "application/x-msdownload", "application/msword", "application/pdf", "application/rtf", "application/vnd.ms-excel", "application/vnd.ms-powerpoint", "application/vnd.oasis.opendocument.presentation", "application/vnd.oasis.opendocument.spreadsheet", "application/vnd.oasis.opendocument.text", "application/x-msoffice", "application/x-ooxml", "application/vnd.openxmlformats-officedocument.wordprocessingml.document", "application/vnd.openxmlformats-officedocument.wordprocessingml.template", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "application/vnd.openxmlformats-officedocument.spreadsheetml.template", "application/vnd.openxmlformats-officedocument.presentationml.presentation", "application/vnd.openxmlformats-officedocument.presentationml.template", "application/vnd.openxmlformats-officedocument.presentationml.slideshow" ).collect(Collectors.toSet()); /** * Constructs an object with the ingest options for the file extension * mismatch detection ingest module. */ FileExtMismatchDetectorModuleSettings() { this.versionNumber = 2; this.skipFilesWithNoExtension = true; this.skipKnownFiles = true; this.checkType = CHECK_TYPE.ONLY_MEDIA_AND_EXE; } /** * Gets the serialization version number. * * @return A serialization version number. */ @Override public long getVersionNumber() { return serialVersionUID; } /** * Sets the flag indicating whether or not files without extensions should * be skipped during file extension mismatch checking. * * @param skipFilesWithNoExtension The desired value of the flag. */ void setSkipFilesWithNoExtension(boolean skipFilesWithNoExtension) { this.skipFilesWithNoExtension = skipFilesWithNoExtension; } /** * Gets the flag indicating whether or not files without extensions should * be skipped during file extension mismatch checking. * * @return The flag value. */ boolean skipFilesWithNoExtension() { return skipFilesWithNoExtension; } /** * Sets the flag indicating whether or not known files should be skipped * during file extension mismatch checking. * * @param skipKnownFiles The desired value of the flag. */ void setSkipKnownFiles(boolean skipKnownFiles) { this.skipKnownFiles = skipKnownFiles; } /** * Gets the flag indicating whether or not known files should be skipped * during file extension mismatch checking. * * @return The flag value. */ boolean skipKnownFiles() { return skipKnownFiles; } /** * Sets whether extension mismatches should be checked for all files, for * all files except text files, or for media and executable files only. * * @param checkType The check type. */ void setCheckType(CHECK_TYPE checkType) { this.checkType = checkType; } /** * Gets whether extension mismatches should be checked for all files, for * all files except text files, or for media and executable files only. * * @return checkType The check type. */ CHECK_TYPE getCheckType() { return checkType; } /** * Called by convention by the serialization infrastructure when * deserializing a FileExtMismatchDetectorModuleSettings object. * * @param in The object input stream provided by the serialization * infrastructure. * * @throws IOException If there is a problem reading the * serialized data. * @throws ClassNotFoundException If the class definition for the serialized * data cannot be found. */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); if (0L == versionNumber) { /* * If the version number is set to the Java field default value of * zero, then versionNumber and skipKnownFiles are new fields. * Change this to the desired default value of true. */ skipKnownFiles = true; versionNumber = 1; } if (1 == versionNumber) { /* * Set the default value of the new checkType field, it is currently * null. */ checkType = CHECK_TYPE.ONLY_MEDIA_AND_EXE; versionNumber = 2; } } }
apache-2.0
williamchengit/TestRepo
solr/core/src/java/org/apache/solr/core/PluginInfo.java
4369
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.DOMUtil; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.util.*; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; /** * An Object which represents a Plugin of any type * */ public class PluginInfo { public final String name, className, type; public final NamedList initArgs; public final Map<String, String> attributes; public final List<PluginInfo> children; public PluginInfo(String type, Map<String, String> attrs ,NamedList initArgs, List<PluginInfo> children) { this.type = type; this.name = attrs.get("name"); this.className = attrs.get("class"); this.initArgs = initArgs; attributes = unmodifiableMap(attrs); this.children = children == null ? Collections.<PluginInfo>emptyList(): unmodifiableList(children); } public PluginInfo(Node node, String err, boolean requireName, boolean requireClass) { type = node.getNodeName(); name = DOMUtil.getAttr(node, "name", requireName ? err : null); className = DOMUtil.getAttr(node, "class", requireClass ? err : null); initArgs = DOMUtil.childNodesToNamedList(node); attributes = unmodifiableMap(DOMUtil.toMap(node.getAttributes())); children = loadSubPlugins(node); } private List<PluginInfo> loadSubPlugins(Node node) { List<PluginInfo> children = new ArrayList<>(); //if there is another sub tag with a non namedlist tag that has to be another plugin NodeList nlst = node.getChildNodes(); for (int i = 0; i < nlst.getLength(); i++) { Node nd = nlst.item(i); if (nd.getNodeType() != Node.ELEMENT_NODE) continue; if (NL_TAGS.contains(nd.getNodeName())) continue; PluginInfo pluginInfo = new PluginInfo(nd, null, false, false); if (pluginInfo.isEnabled()) children.add(pluginInfo); } return children.isEmpty() ? Collections.<PluginInfo>emptyList() : unmodifiableList(children); } @Override public String toString() { StringBuilder sb = new StringBuilder("{"); if (type != null) sb.append("type = " + type + ","); if (name != null) sb.append("name = " + name + ","); if (className != null) sb.append("class = " + className + ","); if (initArgs != null && initArgs.size() > 0) sb.append("args = " + initArgs); sb.append("}"); return sb.toString(); } public boolean isEnabled(){ String enable = attributes.get("enable"); return enable == null || Boolean.parseBoolean(enable); } public boolean isDefault() { return Boolean.parseBoolean(attributes.get("default")); } public PluginInfo getChild(String type){ List<PluginInfo> l = getChildren(type); return l.isEmpty() ? null:l.get(0); } /**Filter children by type * @param type The type name. must not be null * @return The mathcing children */ public List<PluginInfo> getChildren(String type){ if(children.isEmpty()) return children; List<PluginInfo> result = new ArrayList<>(); for (PluginInfo child : children) if(type.equals(child.type)) result.add(child); return result; } public static final PluginInfo EMPTY_INFO = new PluginInfo("",Collections.<String,String>emptyMap(), new NamedList(),Collections.<PluginInfo>emptyList()); private static final HashSet<String> NL_TAGS = new HashSet<> (Arrays.asList("lst", "arr", "bool", "str", "int","long", "float","double")); }
apache-2.0
DariusX/camel
components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/internal/processor/JsonRestProcessorTest.java
5265
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.salesforce.internal.processor; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.time.ZonedDateTime; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.Message; import org.apache.camel.component.salesforce.SalesforceComponent; import org.apache.camel.component.salesforce.SalesforceEndpoint; import org.apache.camel.component.salesforce.SalesforceEndpointConfig; import org.apache.camel.component.salesforce.api.SalesforceException; import org.apache.camel.component.salesforce.api.dto.AbstractSObjectBase; import org.apache.camel.component.salesforce.internal.OperationName; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.support.DefaultExchange; import org.apache.camel.support.DefaultMessage; import org.apache.commons.io.IOUtils; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; public class JsonRestProcessorTest { static class TestObject extends AbstractSObjectBase { private ZonedDateTime creationDate; public ZonedDateTime getCreationDate() { return creationDate; } public void setCreationDate(final ZonedDateTime creationDate) { this.creationDate = creationDate; } } @Test public void byDefaultItShouldNotSerializeNullValues() throws SalesforceException, IOException { final SalesforceComponent salesforce = new SalesforceComponent(); final SalesforceEndpointConfig configuration = new SalesforceEndpointConfig(); final SalesforceEndpoint endpoint = new SalesforceEndpoint("", salesforce, configuration, OperationName.UPDATE_SOBJECT, ""); final JsonRestProcessor jsonProcessor = new JsonRestProcessor(endpoint); final Message in = new DefaultMessage(new DefaultCamelContext()); try (InputStream stream = jsonProcessor.getRequestStream(in, new TestObject()); InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { final String json = IOUtils.toString(reader); assertThat(json).isEqualTo("{\"attributes\":{\"referenceId\":null,\"type\":null,\"url\":null}}"); } } @Test public void shouldSerializeNullValues() throws SalesforceException, IOException { final SalesforceComponent salesforce = new SalesforceComponent(); final SalesforceEndpointConfig configuration = new SalesforceEndpointConfig(); final SalesforceEndpoint endpoint = new SalesforceEndpoint("", salesforce, configuration, OperationName.UPDATE_SOBJECT, ""); final JsonRestProcessor jsonProcessor = new JsonRestProcessor(endpoint); final Message in = new DefaultMessage(new DefaultCamelContext()); TestObject testObject = new TestObject(); testObject.getFieldsToNull().add("creationDate"); try (InputStream stream = jsonProcessor.getRequestStream(in, testObject); InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { final String json = IOUtils.toString(reader); assertThat(json).isEqualTo("{\"creationDate\":null,\"attributes\":{\"referenceId\":null,\"type\":null,\"url\":null}}"); } } @Test public void getRequestStream() throws Exception { final SalesforceComponent comp = new SalesforceComponent(); final SalesforceEndpointConfig conf = new SalesforceEndpointConfig(); final OperationName op = OperationName.CREATE_BATCH; final SalesforceEndpoint endpoint = new SalesforceEndpoint("", comp, conf, op, ""); final JsonRestProcessor jsonRestProcessor = new JsonRestProcessor(endpoint); final DefaultCamelContext context = new DefaultCamelContext(); final Exchange exchange = new DefaultExchange(context, ExchangePattern.InOut); final TestObject doc = new TestObject(); doc.setCreationDate(ZonedDateTime.of(1717, 1, 2, 3, 4, 5, 6, ZoneId.systemDefault())); exchange.getIn().setBody(doc); try (InputStream stream = jsonRestProcessor.getRequestStream(exchange); InputStreamReader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) { final String result = IOUtils.toString(reader); assertThat(result.length()).isLessThanOrEqualTo(104); } } }
apache-2.0
hsanjuan/one
src/oca/java/src/org/opennebula/client/vnet/VirtualNetwork.java
21150
/******************************************************************************* * Copyright 2002-2016, OpenNebula Project, OpenNebula Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.opennebula.client.vnet; import org.opennebula.client.Client; import org.opennebula.client.OneResponse; import org.opennebula.client.PoolElement; import org.w3c.dom.Node; /** * This class represents an OpenNebula virtual network. * It also offers static XML-RPC call wrappers. */ public class VirtualNetwork extends PoolElement{ private static final String METHOD_PREFIX = "vn."; private static final String ALLOCATE = METHOD_PREFIX + "allocate"; private static final String INFO = METHOD_PREFIX + "info"; private static final String DELETE = METHOD_PREFIX + "delete"; private static final String ADDAR = METHOD_PREFIX + "add_ar"; private static final String RMAR = METHOD_PREFIX + "rm_ar"; private static final String UPDATEAR = METHOD_PREFIX + "update_ar"; private static final String CHOWN = METHOD_PREFIX + "chown"; private static final String CHMOD = METHOD_PREFIX + "chmod"; private static final String UPDATE = METHOD_PREFIX + "update"; private static final String HOLD = METHOD_PREFIX + "hold"; private static final String RELEASE = METHOD_PREFIX + "release"; private static final String RENAME = METHOD_PREFIX + "rename"; private static final String RESERVE = METHOD_PREFIX + "reserve"; private static final String FREEAR = METHOD_PREFIX + "free_ar"; /** * Creates a new virtual network representation. * * @param id The virtual network id (nid) . * @param client XML-RPC Client. */ public VirtualNetwork(int id, Client client) { super(id, client); } /** * @see PoolElement */ protected VirtualNetwork(Node xmlElement, Client client) { super(xmlElement, client); } // ================================= // Static XML-RPC methods // ================================= /** * Allocates a new virtual network in OpenNebula. * * @param client XML-RPC Client. * @param description A string containing the template * of the virtual network. * @param clusterId The cluster ID. If it is -1, this virtual network * won't be added to any cluster. * * @return If successful the message contains the associated * id generated for this virtual network. */ public static OneResponse allocate( Client client, String description, int clusterId) { return client.call(ALLOCATE, description, clusterId); } /** * Allocates a new virtual network in OpenNebula. * * @param client XML-RPC Client. * @param description A string containing the template * of the virtual network. * * @return If successful the message contains the associated * id generated for this virtual network. */ public static OneResponse allocate( Client client, String description) { return allocate(client, description, -1); } /** * Retrieves the information of the given virtual network * * @param client XML-RPC Client. * @param id the virtual network id (nid) for the network to * retrieve the information from. * @return If successful the message contains the string * with the information returned by OpenNebula. */ public static OneResponse info(Client client, int id) { return client.call(INFO, id); } /** * Deletes a network from OpenNebula. * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @return A encapsulated response. */ public static OneResponse delete(Client client, int id) { return client.call(DELETE, id); } /** * Publishes or unpublishes a virtual network. * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param publish True for publishing, false for unpublishing. * @return If successful the message contains the image id. */ public static OneResponse publish(Client client, int id, boolean publish) { int group_u = publish ? 1 : 0; return chmod(client, id, -1, -1, -1, group_u, -1, -1, -1, -1, -1); } /** * Adds an Address Range to the VirtualNetwork * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param template AR to add, example: * AR = [ * TYPE = IP4, * IP = 192.168.0.5, * SIZE = 10 ] * * @return A encapsulated response. */ public static OneResponse addAr(Client client, int id, String template) { return client.call(ADDAR, id, template); } /** * Removes an Address Range from the VirtualNetwork * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param arId Id of the Address Range to remove * @return A encapsulated response. */ public static OneResponse rmAr(Client client, int id, int arId) { return client.call(RMAR, id, arId); } /** * Upates an Address Range from the VirtualNetwork * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param template AR to update, example: * AR = [ * AR_ID = 3, * TYPE = IP4, * IP = 192.168.0.5, * SIZE = 10 ] * * @return A encapsulated response. */ public static OneResponse updateAr(Client client, int id, String template) { return client.call(UPDATEAR, id, template); } /** * Holds a VirtualNetwork lease, marking it as used * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param template Address to hold, examples: *<pre> * LEASES = [ IP = 192.168.0.5 ] * LEASES = [ MAC = 02:00:0a:00:00:96 ] * LEASES = [ IP = 192.168.0.5, AR_ID = 3 ] *</pre> * @return A encapsulated response. */ public static OneResponse hold(Client client, int id, String template) { return client.call(HOLD, id, template); } /** * Releases a VirtualNetwork lease on hold * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param template Address to release, examples: *<pre> * LEASES = [ IP = 192.168.0.5 ] * LEASES = [ MAC = 02:00:0a:00:00:96 ] * LEASES = [ IP = 192.168.0.5, AR_ID = 3 ] *</pre> * @return A encapsulated response. */ public static OneResponse release(Client client, int id, String template) { return client.call(RELEASE, id, template); } /** * Changes the owner/group * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param uid The new owner user ID. Set it to -1 to leave the current one. * @param gid The new group ID. Set it to -1 to leave the current one. * @return If an error occurs the error message contains the reason. */ public static OneResponse chown(Client client, int id, int uid, int gid) { return client.call(CHOWN, id, uid, gid); } /** * Changes the VirtualNetwork permissions * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param owner_u 1 to allow, 0 deny, -1 do not change * @param owner_m 1 to allow, 0 deny, -1 do not change * @param owner_a 1 to allow, 0 deny, -1 do not change * @param group_u 1 to allow, 0 deny, -1 do not change * @param group_m 1 to allow, 0 deny, -1 do not change * @param group_a 1 to allow, 0 deny, -1 do not change * @param other_u 1 to allow, 0 deny, -1 do not change * @param other_m 1 to allow, 0 deny, -1 do not change * @param other_a 1 to allow, 0 deny, -1 do not change * @return If an error occurs the error message contains the reason. */ public static OneResponse chmod(Client client, int id, int owner_u, int owner_m, int owner_a, int group_u, int group_m, int group_a, int other_u, int other_m, int other_a) { return chmod(client, CHMOD, id, owner_u, owner_m, owner_a, group_u, group_m, group_a, other_u, other_m, other_a); } /** * Changes the permissions * * @param client XML-RPC Client. * @param id The id of the target object. * @param octet Permissions octed , e.g. 640 * @return If an error occurs the error message contains the reason. */ public static OneResponse chmod(Client client, int id, String octet) { return chmod(client, CHMOD, id, octet); } /** * Changes the permissions * * @param client XML-RPC Client. * @param id The id of the target object. * @param octet Permissions octed , e.g. 640 * @return If an error occurs the error message contains the reason. */ public static OneResponse chmod(Client client, int id, int octet) { return chmod(client, CHMOD, id, octet); } /** * Replaces the VirtualNetwork template contents. * * @param client XML-RPC Client. * @param id The vnet id of the target vnet we want to modify. * @param new_template New template contents. * @param append True to append new attributes instead of replace the whole template * @return If successful the message contains the vnet id. */ public static OneResponse update(Client client, int id, String new_template, boolean append) { return client.call(UPDATE, id, new_template, append ? 1 : 0); } /** * Renames this VirtualNetwork * * @param client XML-RPC Client. * @param id The VirtualNetwork id of the target VirtualNetwork. * @param name New name for the VirtualNetwork. * @return If an error occurs the error message contains the reason. */ public static OneResponse rename(Client client, int id, String name) { return client.call(RENAME, id, name); } /** * Reserve a set of addresses from this virtual network * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param template of the reservation. Examples: *<pre> * SIZE = 10 * * SIZE = 10 * AR_ID = 3 * NAME = "new_network" * * SIZE = 10 * IP = 192.168.10.50 * NETWORK_ID = 9 *</pre> * @return A encapsulated response. */ public static OneResponse reserve(Client client, int id, String template) { return client.call(RESERVE, id, template); } /** * Removes an Address Range from the VirtualNetwork * * @param client XML-RPC Client. * @param id The virtual network id (nid) of the target network. * @param arId Id of the Address Range to remove * @return A encapsulated response. */ public static OneResponse free(Client client, int id, int arId) { return client.call(FREEAR, id, arId); } // ================================= // Instanced object XML-RPC methods // ================================= /** * Loads the xml representation of the virtual network. * The info is also stored internally. * * @see VirtualNetwork#info(Client, int) */ public OneResponse info() { OneResponse response = info(client, id); super.processInfo(response); return response; } /** * Deletes the network from OpenNebula. * * @return A encapsulated response. */ public OneResponse delete() { return delete(client, id); } /** * Publishes or unpublishes the virtual network. * * @param publish True for publishing, false for unpublishing. * @return If successful the message contains the image id. */ public OneResponse publish(boolean publish) { return publish(client, id, publish); } /** * Publishes the virtual network. * * @return If successful the message contains the image id. */ public OneResponse publish() { return publish(true); } /** * Unpublishes the virtual network. * * @return If successful the message contains the image id. */ public OneResponse unpublish() { return publish(false); } /** * Adds an Address Range to the VirtualNetwork * * @param template AR to add, example: *<pre> * AR = [ * TYPE = IP4, * IP = 192.168.0.5, * SIZE = 10 ] *</pre> * * @return A encapsulated response. */ public OneResponse addAr(String template) { return addAr(client, id, template); } /** * Removes an Address Range from the VirtualNetwork * * @param arId Id of the Address Range to remove * @return A encapsulated response. */ public OneResponse rmAr(int arId) { return rmAr(client, id, arId); } /** * Upates an Address Range from the VirtualNetwork * * @param template AR to update, example: *<pre> * AR = [ * AR_ID = 3, * TYPE = IP4, * IP = 192.168.0.5, * SIZE = 10 ] *</pre> * * @return A encapsulated response. */ public OneResponse updateAr(String template) { return updateAr(client, id, template); } /** * Holds a VirtualNetwork lease, marking it as used * * @param ip IP or MAC to hold, e.g. "192.168.0.5", "02:00:0a:00:00:96" * @return A encapsulated response. */ public OneResponse hold(String ip) { String addr_name = ip.contains(":") ? "MAC" : "IP"; String lease_template = "LEASES = [ "+addr_name+" = "+ip+"]"; return hold(client, id, lease_template); } /** * Holds a VirtualNetwork lease, marking it as used * * @param ip IP or MAC to hold, e.g. "192.168.0.5", "02:00:0a:00:00:96" * @param arId Id of the Address Range to hold the lease from * @return A encapsulated response. */ public OneResponse hold(String ip, int arId) { String addr_name = ip.contains(":") ? "MAC" : "IP"; String lease_template = "LEASES = [ "+addr_name+" = "+ip+", AR_ID = "+arId+" ]"; return hold(client, id, lease_template); } /** * Releases a VirtualNetwork lease on hold * * @param ip IP or MAC to hold, e.g. "192.168.0.5", "02:00:0a:00:00:96" * @return A encapsulated response. */ public OneResponse release(String ip) { String addr_name = ip.contains(":") ? "MAC" : "IP"; String lease_template = "LEASES = [ "+addr_name+" = "+ip+"]"; return release(client, id, lease_template); } /** * Releases a VirtualNetwork lease on hold * * @param ip IP or MAC to hold, e.g. "192.168.0.5", "02:00:0a:00:00:96" * @param arId Id of the Address Range to release the lease from * @return A encapsulated response. */ public OneResponse release(String ip, int arId) { String addr_name = ip.contains(":") ? "MAC" : "IP"; String lease_template = "LEASES = [ "+addr_name+" = "+ip+", AR_ID = "+arId+" ]"; return release(client, id, lease_template); } /** * Changes the owner/group * * @param uid The new owner user ID. Set it to -1 to leave the current one. * @param gid The new group ID. Set it to -1 to leave the current one. * @return If an error occurs the error message contains the reason. */ public OneResponse chown(int uid, int gid) { return chown(client, id, uid, gid); } /** * Changes the owner * * @param uid The new owner user ID. * @return If an error occurs the error message contains the reason. */ public OneResponse chown(int uid) { return chown(uid, -1); } /** * Changes the group * * @param gid The new group ID. * @return If an error occurs the error message contains the reason. */ public OneResponse chgrp(int gid) { return chown(-1, gid); } /** * Changes the VirtualNetwork permissions * * @param owner_u 1 to allow, 0 deny, -1 do not change * @param owner_m 1 to allow, 0 deny, -1 do not change * @param owner_a 1 to allow, 0 deny, -1 do not change * @param group_u 1 to allow, 0 deny, -1 do not change * @param group_m 1 to allow, 0 deny, -1 do not change * @param group_a 1 to allow, 0 deny, -1 do not change * @param other_u 1 to allow, 0 deny, -1 do not change * @param other_m 1 to allow, 0 deny, -1 do not change * @param other_a 1 to allow, 0 deny, -1 do not change * @return If an error occurs the error message contains the reason. */ public OneResponse chmod(int owner_u, int owner_m, int owner_a, int group_u, int group_m, int group_a, int other_u, int other_m, int other_a) { return chmod(client, id, owner_u, owner_m, owner_a, group_u, group_m, group_a, other_u, other_m, other_a); } /** * Changes the permissions * * @param octet Permissions octed , e.g. 640 * @return If an error occurs the error message contains the reason. */ public OneResponse chmod(String octet) { return chmod(client, id, octet); } /** * Changes the permissions * * @param octet Permissions octed , e.g. 640 * @return If an error occurs the error message contains the reason. */ public OneResponse chmod(int octet) { return chmod(client, id, octet); } /** * Replaces the VirtualNetwork template contents. * * @param new_template New template contents. * @return If successful the message contains the vnet id. */ public OneResponse update(String new_template) { return update(new_template, false); } /** * Replaces the VirtualNetwork template contents. * * @param new_template New template contents. * @param append True to append new attributes instead of replace the whole template * @return If successful the message contains the vnet id. */ public OneResponse update(String new_template, boolean append) { return update(client, id, new_template, append); } /** * Renames this VirtualNetwork * * @param name New name for the VirtualNetwork. * @return If an error occurs the error message contains the reason. */ public OneResponse rename(String name) { return rename(client, id, name); } /** * Reserve a set of addresses from this virtual network * * @param template of the reservation. Examples: *<pre> * SIZE = 10 * * SIZE = 10 * AR_ID = 3 * NAME = "new_network" * * SIZE = 10 * IP = 192.168.10.50 * NETWORK_ID = 9 *</pre> * @return A encapsulated response. */ public OneResponse reserve(String template) { return reserve(client, id, template); } /** * Removes an Address Range from the VirtualNetwork * * @param arId Id of the Address Range to remove * @return A encapsulated response. */ public OneResponse free(int arId) { return free(client, id, arId); } // ================================= // Helpers // ================================= }
apache-2.0
haint/jgentle
src/org/jgentleframework/services/eventservices/context/EventServicesContextImpl.java
13424
/* * Copyright 2007-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Project: JGentleFramework */ package org.jgentleframework.services.eventservices.context; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Map.Entry; import org.jgentleframework.context.ComponentServiceContextType; import org.jgentleframework.context.ServiceProvider; import org.jgentleframework.context.injecting.Provider; import org.jgentleframework.context.services.ServiceHandler; import org.jgentleframework.core.handling.DefinitionManager; import org.jgentleframework.reflection.metadata.Definition; import org.jgentleframework.services.eventservices.EventClass; import org.jgentleframework.services.eventservices.EventClassImpl; import org.jgentleframework.services.eventservices.EventServicesConfig; import org.jgentleframework.services.eventservices.EventServicesException; import org.jgentleframework.services.eventservices.ISubscriptionFilter; import org.jgentleframework.services.eventservices.SubscriberQueuedThread; import org.jgentleframework.services.eventservices.annotation.Subscriber; import org.jgentleframework.services.eventservices.objectmeta.ISubscriber; import org.jgentleframework.services.eventservices.objectmeta.ObjectEvent; import org.jgentleframework.services.eventservices.objectmeta.ObjectEventProxy; import org.jgentleframework.services.eventservices.objectmeta.SubscriberImpl; import org.jgentleframework.services.eventservices.objectmeta.Subscription; /** * Một cài đặt của <code>interface EventServicesContex</code>t, chịu trách nhiệm * quản lý, thực thi các <code>bean</code> có chức năng <code>fire</code> và * <code>receive event</code>. * * @author LE QUOC CHUNG - mailto: <a * href="mailto:skydunkpro@yahoo.com">skydunkpro@yahoo.com</a> * @date Oct 27, 2007 * @see EventServiceContext * @see ComponentServiceContextType */ public class EventServicesContextImpl<T extends EventServicesConfig> implements ComponentServiceContextType<T>, EventServiceContext<T> { private ServiceHandler serviceHandler; /** * Danh sách các config instances tương ứng của EventServicesContext hiện * đang nắm giữ. */ private ArrayList<T> configInstances; /** * Đối tượng Definition Manager */ private DefinitionManager defManager; /** * Danh sách các event đã được cấu hình. */ HashMap<String, EventClass> eventClassList = new HashMap<String, EventClass>(); /** * Danh sách các event proxy đã được cấu hình. */ ArrayList<ObjectEventProxy> eventProxyList = new ArrayList<ObjectEventProxy>(); /** * Đối tượng {@link Provider} */ private Provider provider; /** * Danh sách các subscribers đã được cấu hình. */ HashMap<String, ISubscriber> subscriberList = new HashMap<String, ISubscriber>(); /** * @param eventList * @param eventProxyList * @param subscriberList * @param provider */ public EventServicesContextImpl(ArrayList<ObjectEvent> eventList, ArrayList<ObjectEventProxy> eventProxyList, HashMap<Class<?>, String> subscriberList, Provider provider) { this.provider = provider; this.serviceHandler = provider.getServiceHandler(); this.defManager = serviceHandler.getDefinitionManager(); // Kiểm tra thông tin subscriberList this.checkSubscriberList(subscriberList); // Kiểm tra thông tin eventList this.checkEventList(eventList); // Kiểm tra thông tin eventProxyList this.checkEventProxyList(eventProxyList); } /** * Kiểm tra thông tin eventList được truyền vào. * * @param eventList */ private void checkEventList(ArrayList<ObjectEvent> eventList) { for (ObjectEvent obj : eventList) { String name = obj.getName(); Subscription subscription = obj.getSubscription(); if (this.eventClassList.containsKey(name)) { throw new EventServicesException("This " + name + " name of event is existed."); } if (subscription == null) { throw new EventServicesException("Subscription of " + name + " event must be not null."); } // Kiểm tra thông tin subscription tương ứng với filter và // subscriber name. String filterStr = subscription.getFilterString(); if ((filterStr == null || filterStr.isEmpty()) && subscription.getFilter() == null) { if (subscription.getSubscriberNames().size() == 0) { throw new EventServicesException( "Subscriber names must be defined in subscription if subscription filter is not defined."); } } // Xử lý chuỗi filter string if (filterStr != null && !filterStr.isEmpty()) { // Nếu filter string chỉ định tường minh 1 class name if (filterStr.indexOf(" ") != -1) { String[] strs = filterStr.split(" "); if (!strs[0].equals("Class")) { throw new EventServicesException("Filter string " + filterStr + " is invalid."); } subscription.setFilterString(strs[1].trim()); Class<?> clazz = null; try { clazz = Class.forName(subscription.getFilterString()); } catch (ClassNotFoundException e) { e.printStackTrace(); } subscription.setFilter((ISubscriptionFilter) this.provider .getBean(clazz)); } // Nếu filter string chỉ định một mapping bean. if (filterStr.indexOf(":") != -1) { ISubscriptionFilter result = null; result = (ISubscriptionFilter) provider.getBean(filterStr); // if (values[0].equals(Configurable.REF_CONSTANT)) { // result = (ISubscriptionFilter) InOutExecutor // .getFromMapDirectList(this.provider, values[1]); // } // else if (values[0].equals(Configurable.REF_MAPPING)) { // result = (ISubscriptionFilter) InOutExecutor // .getFromAliasMap(this.provider, values[1]); // } // else if (values[0].equals(Configurable.REF_ID)) { // Definition def = this.defManager // .getDefinition(values[1]); // result = (ISubscriptionFilter) this.provider.getBean( // (Class<?>) def.getKey(), values[1]); // } // else { // throw new InOutDependencyException("Filter string " // + filterStr + " is invalid."); // } subscription.setFilter(result); } } // add thông tin event vào event list. EventClass event = new EventClassImpl(obj, this); this.eventClassList.put(name, event); } } /** * Kiểm tra thông tin event proxy list được truyền vào. * * @param eventProxyList * danh sách các object event proxy. */ private void checkEventProxyList(ArrayList<ObjectEventProxy> eventProxyList) { // TODO checkEventProxyList } /** * Kiểm tra thông tin subscriberList được truyền vào. * * @param subscriberList */ private void checkSubscriberList(HashMap<Class<?>, String> subscriberList) { for (Entry<Class<?>, String> etr : subscriberList.entrySet()) { Class<?> clazz = etr.getKey(); String ID = subscriberList.get(clazz); // Nếu ID được chỉ định tường minh. if (ID != null && !ID.isEmpty()) { String name = null; Definition def = null; Method method = null; Object source = null; if (!this.defManager.containsDefinition(ID)) { throw new EventServicesException("Defnition with ID " + ID + " is not existed."); } for (Entry<Method, Definition> entry : this.defManager .getDefinition(ID).getMethodDefList().entrySet()) { def = entry.getValue(); if (def.isAnnotationPresent(Subscriber.class)) { Subscriber anno = def.getAnnotation(Subscriber.class); name = anno.value(); method = entry.getKey(); // if (source == null) { // source = this.provider.getBean(clazz, ID); // } ISubscriber subscriber = new SubscriberImpl(name, method, source, def); if (this.subscriberList.containsKey(name)) { throw new EventServicesException("Subscriber name " + name + " is existed."); } if (anno.queued() == true) { subscriber .setQueueThread(new SubscriberQueuedThread( subscriber)); Thread thread = new Thread(subscriber .getQueueThread()); thread.start(); } this.subscriberList.put(name, subscriber); } } } // Nếu ID không được chỉ định tường minh. else { // Nạp thông tin definition của clazz if (!this.defManager.containsDefinition(clazz)) { this.defManager.loadDefinition(clazz); } String name = null; Definition def = null; Method method = null; Object source = null; for (Entry<Method, Definition> entry : this.defManager .getDefinition(clazz).getMethodDefList().entrySet()) { def = entry.getValue(); if (def.isAnnotationPresent(Subscriber.class)) { Subscriber anno = def.getAnnotation(Subscriber.class); name = anno.value(); method = entry.getKey(); if (source == null) { source = this.provider.getBean(clazz); } ISubscriber subscriber = new SubscriberImpl(name, method, source, def); if (this.subscriberList.containsKey(name)) { throw new EventServicesException("Subscriber " + name + " is existed."); } if (anno.queued() == true) { subscriber .setQueueThread(new SubscriberQueuedThread( subscriber)); Thread thread = new Thread(subscriber .getQueueThread()); thread.start(); } this.subscriberList.put(name, subscriber); } } } } } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #containsEvent(java.lang.String) */ @Override public boolean containsEvent(String name) { return this.eventClassList.containsKey(name); } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getAoh() */ public ServiceHandler getAoh() { return serviceHandler; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getConfigInstances() */ @Override public ArrayList<T> getConfigInstances() { return configInstances; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getDefManager() */ public DefinitionManager getDefManager() { return defManager; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getEvent(java.lang.String) */ @Override public EventClass getEvent(String name) { if (!this.eventClassList.containsKey(name)) { throw new EventServicesException( "Does not found any event with name '" + name + "'"); } return this.eventClassList.get(name); } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getEventClassList() */ @Override public HashMap<String, EventClass> getEventClassList() { return eventClassList; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getEventProxyList() */ public ArrayList<ObjectEventProxy> getEventProxyList() { return eventProxyList; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventservices.context.EventServiceContext * #getProvider() */ @Override public Provider getProvider() { return provider; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getObjEventList() */ public HashMap<String, EventClass> getObjEventList() { return this.eventClassList; } /* * (non-Javadoc) * @see * org.jgentleframework.services.eventServices.context.EventServiceContext * #getSubscriberList() */ public HashMap<String, ISubscriber> getSubscriberList() { return subscriberList; } /* * (non-Javadoc) * @see * org.jgentleframework.context.ComponentServiceContext#init(org.exxlabs. * jgentle.context.ServiceProvider, T[]) */ @Override public void init(ServiceProvider serviceProvider, ArrayList<T> configInstances) { this.provider = serviceProvider; this.configInstances = configInstances; } @SuppressWarnings("unchecked") @Override public Class<T> returnClassType() { return (Class<T>) EventServicesConfig.class; } }
apache-2.0
cgruber/dagger
java/dagger/internal/DelegateFactory.java
2076
/* * Copyright (C) 2014 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.internal; import static dagger.internal.Preconditions.checkNotNull; import javax.inject.Provider; /** * A DelegateFactory that is used to stitch Provider/Lazy indirection based dependency cycles. * * @since 2.0.1 */ public final class DelegateFactory<T> implements Factory<T> { private Provider<T> delegate; @Override public T get() { if (delegate == null) { throw new IllegalStateException(); } return delegate.get(); } // TODO(ronshapiro): remove this once we can reasonably expect generated code is no longer using // this method @Deprecated public void setDelegatedProvider(Provider<T> delegate) { setDelegate(this, delegate); } /** * Sets {@code delegateFactory}'s delegate provider to {@code delegate}. * * <p>{@code delegateFactory} must be an instance of {@link DelegateFactory}, otherwise this * method will throw a {@link ClassCastException}. */ public static <T> void setDelegate(Provider<T> delegateFactory, Provider<T> delegate) { checkNotNull(delegate); DelegateFactory<T> asDelegateFactory = (DelegateFactory<T>) delegateFactory; if (asDelegateFactory.delegate != null) { throw new IllegalStateException(); } asDelegateFactory.delegate = delegate; } /** * Returns the factory's delegate. * * @throws NullPointerException if the delegate has not been set */ Provider<T> getDelegate() { return checkNotNull(delegate); } }
apache-2.0
kiyoka/fuzzy-string-match
original/TestJaroWinklerDistance.java
1890
package org.apache.lucene.search.spell; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import junit.framework.TestCase; public class TestJaroWinklerDistance extends TestCase { private StringDistance sd = new JaroWinklerDistance(); public void testGetDistance() { float d = sd.getDistance("al", "al"); assertTrue(d == 1.0f); d = sd.getDistance("martha", "marhta"); assertTrue(d > 0.961 && d <0.962); d = sd.getDistance("jones", "johnson"); assertTrue(d > 0.832 && d < 0.833); d = sd.getDistance("abcvwxyz", "cabvwxyz"); assertTrue(d > 0.958 && d < 0.959); d = sd.getDistance("dwayne", "duane"); assertTrue(d > 0.84 && d < 0.841); d = sd.getDistance("dixon", "dicksonx"); assertTrue(d > 0.813 && d < 0.814); d = sd.getDistance("fvie", "ten"); assertTrue(d == 0f); float d1 = sd.getDistance("zac ephron", "zac efron"); float d2 = sd.getDistance("zac ephron", "kai ephron"); assertTrue(d1 > d2); d1 = sd.getDistance("brittney spears", "britney spears"); d2 = sd.getDistance("brittney spears", "brittney startzman"); assertTrue(d1 > d2); } }
apache-2.0
pascalrobert/aribaweb
src/util/src/main/java/ariba/util/core/LockHandlerConditions.java
1211
/* Copyright 1996-2008 Ariba, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. $Id: //ariba/platform/util/core/ariba/util/core/LockHandlerConditions.java#4 $ */ package ariba.util.core; /** @aribaapi private */ public interface LockHandlerConditions { /** return false to indicate continuation context is allocated by caller to LockHandler.doWithLock and can be used to pass, by reference, any number of result data etc. Called WITH LOCK */ public boolean doWithLock (LockHandlerContext lockHandlerContext); /** the time, in milliseconds, to wait Called WITH LOCK */ public long timeoutIntervalMillis (); }
apache-2.0
knadikari/developer-studio
common/org.wso2.developerstudio.eclipse.artifact.security/src/org/wso2/developerstudio/eclipse/security/project/utils/SecurityPolicyUtils.java
5554
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.security.project.utils; import java.util.ArrayList; import java.util.List; public class SecurityPolicyUtils { private static SecurityPolicyUtils instance = null; private SecurityPolicyUtils() { } public static SecurityPolicyUtils getInstance() { if (instance == null) { instance = new SecurityPolicyUtils(); } return instance; } public String[] getBasicSecurityScenarios() { String[] basicSecurityScenarios = new String[] { SecurityPolicies.POLICY_TYPE_1, SecurityPolicies.POLICY_TYPE_2, SecurityPolicies.POLICY_TYPE_3, SecurityPolicies.POLICY_TYPE_4 }; return basicSecurityScenarios; } public String[] getAdvancedSecurityScenarios() { String[] advancedSecurityScenarios = new String[] { SecurityPolicies.POLICY_TYPE_5, SecurityPolicies.POLICY_TYPE_6, SecurityPolicies.POLICY_TYPE_7, SecurityPolicies.POLICY_TYPE_8, SecurityPolicies.POLICY_TYPE_9, SecurityPolicies.POLICY_TYPE_10, SecurityPolicies.POLICY_TYPE_11, SecurityPolicies.POLICY_TYPE_12, SecurityPolicies.POLICY_TYPE_13, SecurityPolicies.POLICY_TYPE_14, SecurityPolicies.POLICY_TYPE_15, SecurityPolicies.POLICY_TYPE_16, SecurityPolicies.POLICY_TYPE_17, SecurityPolicies.POLICY_TYPE_18, SecurityPolicies.POLICY_TYPE_19, SecurityPolicies.POLICY_TYPE_20 }; return advancedSecurityScenarios; } public List<String> getUserRoleRequiredSecurityScenarios() { List<String> policies = new ArrayList<String>(); policies.add(SecurityPolicies.POLICY_TYPE_1); policies.add(SecurityPolicies.POLICY_TYPE_7); policies.add(SecurityPolicies.POLICY_TYPE_8); policies.add(SecurityPolicies.POLICY_TYPE_14); policies.add(SecurityPolicies.POLICY_TYPE_15); return policies; } public List<String> getSecurityTrustRequiredSecurityScenarios() { List<String> policies = new ArrayList<String>(); policies.add(SecurityPolicies.POLICY_TYPE_9); policies.add(SecurityPolicies.POLICY_TYPE_10); policies.add(SecurityPolicies.POLICY_TYPE_11); policies.add(SecurityPolicies.POLICY_TYPE_12); policies.add(SecurityPolicies.POLICY_TYPE_13); policies.add(SecurityPolicies.POLICY_TYPE_14); policies.add(SecurityPolicies.POLICY_TYPE_15); policies.add(SecurityPolicies.POLICY_TYPE_17); policies.add(SecurityPolicies.POLICY_TYPE_18); policies.add(SecurityPolicies.POLICY_TYPE_19); policies.add(SecurityPolicies.POLICY_TYPE_20); return policies; } public String[] getRegistrySecurityScenarios() { String[] registrySecurityScenarios = new String[] { SecurityPolicies.POLICY_TYPE_21 }; return registrySecurityScenarios; } public String getPolicyTypeFromPolicyUUID(String policyUUID) { String policyType = null; if (policyUUID.equals("UTOverTransport")) { policyType = SecurityPolicies.POLICY_TYPE_1; } else if (policyUUID.equals("SigOnly")) { policyType = SecurityPolicies.POLICY_TYPE_2; } else if (policyUUID.equals("SgnOnlyAnonymous")) { policyType = SecurityPolicies.POLICY_TYPE_3; } else if (policyUUID.equals("EncrOnlyAnonymous")) { policyType = SecurityPolicies.POLICY_TYPE_4; } else if (policyUUID.equals("SigEncr")) { policyType = SecurityPolicies.POLICY_TYPE_5; } else if (policyUUID.equals("SgnEncrAnonymous")) { policyType = SecurityPolicies.POLICY_TYPE_6; } else if (policyUUID.equals("EncrOnlyUsername")) { policyType = SecurityPolicies.POLICY_TYPE_7; } else if (policyUUID.equals("SgnEncrUsername")) { policyType = SecurityPolicies.POLICY_TYPE_8; } else if (policyUUID.equals("SecConSignOnly")) { policyType = SecurityPolicies.POLICY_TYPE_9; } else if (policyUUID.equals("SecConEncrOnly")) { policyType = SecurityPolicies.POLICY_TYPE_10; } else if (policyUUID.equals("SecConSgnEncr")) { policyType = SecurityPolicies.POLICY_TYPE_11; } else if (policyUUID.equals("SecConSignOnlyAnonymous")) { policyType = SecurityPolicies.POLICY_TYPE_12; } else if (policyUUID.equals("SecConEncrOnlyAnonymous")) { policyType = SecurityPolicies.POLICY_TYPE_13; } else if (policyUUID.equals("SecConEncrUsername")) { policyType = SecurityPolicies.POLICY_TYPE_14; } else if (policyUUID.equals("SecConSgnEncrUsername")) { policyType = SecurityPolicies.POLICY_TYPE_15; } else if (policyUUID.equals("kerberossignandencrypt")) { policyType = SecurityPolicies.POLICY_TYPE_16; } else if (policyUUID.equals("SAML2HoKProtection31")) { policyType = SecurityPolicies.POLICY_TYPE_17; } else if (policyUUID.equals("SAML11HoKProtection32")) { policyType = SecurityPolicies.POLICY_TYPE_18; } else if (policyUUID.equals("SigEncrSAML20Supporting33")) { policyType = SecurityPolicies.POLICY_TYPE_19; } else if (policyUUID.equals("SigEncrSAML11Supporting34")) { policyType = SecurityPolicies.POLICY_TYPE_20; } else if (policyUUID.equals("policyFromRegistry")) { policyType = SecurityPolicies.POLICY_TYPE_21; } return policyType; } }
apache-2.0
baboune/compass
src/main/test/org/compass/annotations/test/converter/number1/NumberFormatGlobalConverterTests.java
1746
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.annotations.test.converter.number1; import org.compass.annotations.test.AbstractAnnotationsTestCase; import org.compass.core.CompassQuery; import org.compass.core.CompassQueryBuilder; import org.compass.core.CompassSession; import org.compass.core.CompassTransaction; import org.compass.core.config.CompassConfiguration; /** * @author kimchy */ public class NumberFormatGlobalConverterTests extends AbstractAnnotationsTestCase { protected void addExtraConf(CompassConfiguration conf) { conf.addClass(A.class); conf.setSetting("compass.converter.long.format", "#00000000"); } public void testGlobablLongFormat() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); A a = new A(); a.id = 1; a.property = 300l; session.save(a); CompassQueryBuilder queryBuilder = session.queryBuilder(); CompassQuery query = queryBuilder.ge("property", 300L); assertEquals("property:[00000300 TO *]", query.toString()); tr.commit(); session.close(); } }
apache-2.0
linearregression/lightwave
vmafd/vmevent/interop/java/vmevent/src/test/java/com/vmware/vmevent/VmEventLogClientTest.java
557
/** * * Copyright 2013 VMware, Inc. All rights reserved. */ package com.vmware.vmevent; import org.junit.Assert; import org.junit.Test; public class VmEventLogClientTest { private static String _hostname = "localhost"; private static int _eventType = 1; private static int _eventCategory = 1; private static String _eventText = "Test Event"; @Test public void testAddEvent() { int result = VmEventClient.addEvent(_hostname, _eventType, _eventCategory, _eventText); Assert.assertEquals(result, 0); } }
apache-2.0
tiarebalbi/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/couchbase/SpringBootCouchbaseReactiveDataConfiguration.java
2941
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.data.couchbase; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.couchbase.config.AbstractReactiveCouchbaseDataConfiguration; import org.springframework.data.couchbase.config.BeanNames; import org.springframework.data.couchbase.config.CouchbaseConfigurer; import org.springframework.data.couchbase.core.RxJavaCouchbaseTemplate; import org.springframework.data.couchbase.core.query.Consistency; import org.springframework.data.couchbase.repository.config.ReactiveRepositoryOperationsMapping; /** * Configure Spring Data's reactive couchbase support. * * @author Alex Derkach */ @Configuration @ConditionalOnMissingBean(AbstractReactiveCouchbaseDataConfiguration.class) @ConditionalOnBean(CouchbaseConfigurer.class) class SpringBootCouchbaseReactiveDataConfiguration extends AbstractReactiveCouchbaseDataConfiguration { private final CouchbaseDataProperties properties; private final CouchbaseConfigurer couchbaseConfigurer; SpringBootCouchbaseReactiveDataConfiguration(CouchbaseDataProperties properties, CouchbaseConfigurer couchbaseConfigurer) { this.properties = properties; this.couchbaseConfigurer = couchbaseConfigurer; } @Override protected CouchbaseConfigurer couchbaseConfigurer() { return this.couchbaseConfigurer; } @Override protected Consistency getDefaultConsistency() { return this.properties.getConsistency(); } @Override @ConditionalOnMissingBean(name = BeanNames.RXJAVA1_COUCHBASE_TEMPLATE) @Bean(name = BeanNames.RXJAVA1_COUCHBASE_TEMPLATE) public RxJavaCouchbaseTemplate reactiveCouchbaseTemplate() throws Exception { return super.reactiveCouchbaseTemplate(); } @Override @ConditionalOnMissingBean(name = BeanNames.REACTIVE_COUCHBASE_OPERATIONS_MAPPING) @Bean(name = BeanNames.REACTIVE_COUCHBASE_OPERATIONS_MAPPING) public ReactiveRepositoryOperationsMapping reactiveRepositoryOperationsMapping( RxJavaCouchbaseTemplate reactiveCouchbaseTemplate) throws Exception { return super.reactiveRepositoryOperationsMapping(reactiveCouchbaseTemplate); } }
apache-2.0
hurricup/intellij-community
platform/platform-impl/src/com/intellij/ide/FileChangedNotificationProvider.java
4921
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.LogUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileAttributes; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.VirtualFileSystem; import com.intellij.openapi.vfs.newvfs.BulkFileListener; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.ui.EditorNotificationPanel; import com.intellij.ui.EditorNotifications; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.Set; public class FileChangedNotificationProvider extends EditorNotifications.Provider<EditorNotificationPanel> implements DumbAware { private static final Logger LOG = Logger.getInstance(FileChangedNotificationProvider.class); private static final Key<EditorNotificationPanel> KEY = Key.create("file.changed.notification.panel"); private final Project myProject; public FileChangedNotificationProvider(@NotNull Project project, @NotNull FrameStateManager frameStateManager) { myProject = project; frameStateManager.addListener(new FrameStateListener.Adapter() { @Override public void onFrameActivated() { if (!myProject.isDisposed() && !GeneralSettings.getInstance().isSyncOnFrameActivation()) { EditorNotifications notifications = EditorNotifications.getInstance(myProject); for (VirtualFile file : FileEditorManager.getInstance(myProject).getSelectedFiles()) { notifications.updateNotifications(file); } } } }, project); MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(myProject); connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener.Adapter() { @Override public void after(@NotNull List<? extends VFileEvent> events) { if (!myProject.isDisposed() && !GeneralSettings.getInstance().isSyncOnFrameActivation()) { Set<VirtualFile> openFiles = ContainerUtil.newHashSet(FileEditorManager.getInstance(myProject).getSelectedFiles()); EditorNotifications notifications = EditorNotifications.getInstance(myProject); for (VFileEvent event : events) { VirtualFile file = event.getFile(); if (file != null && openFiles.contains(file)) { notifications.updateNotifications(file); } } } } }); } @NotNull @Override public Key<EditorNotificationPanel> getKey() { return KEY; } @Nullable @Override public EditorNotificationPanel createNotificationPanel(@NotNull VirtualFile file, @NotNull FileEditor fileEditor) { if (!myProject.isDisposed() && !GeneralSettings.getInstance().isSyncOnFrameActivation()) { VirtualFileSystem fs = file.getFileSystem(); if (fs instanceof LocalFileSystem) { FileAttributes attributes = ((LocalFileSystem)fs).getAttributes(file); if (attributes == null || file.getTimeStamp() != attributes.lastModified || file.getLength() != attributes.length) { LogUtil.debug(LOG, "%s: (%s,%s) -> %s", file, file.getTimeStamp(), file.getLength(), attributes); return createPanel(file); } } } return null; } private EditorNotificationPanel createPanel(@NotNull final VirtualFile file) { EditorNotificationPanel panel = new EditorNotificationPanel(); panel.setText(IdeBundle.message("file.changed.externally.message")); panel.createActionLabel(IdeBundle.message("file.changed.externally.reload"), () -> { if (!myProject.isDisposed()) { file.refresh(false, false); EditorNotifications.getInstance(myProject).updateNotifications(file); } }); return panel; } }
apache-2.0
galaxynut/aws-sdk-java
aws-java-sdk-core/src/main/java/com/amazonaws/util/Base16Codec.java
2978
/* * Copyright 2013-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.util; /** * A Base 16 codec implementation. * * @author Hanson Char */ class Base16Codec implements Codec { private static final int OFFSET_OF_a = 'a' - 10; private static final int OFFSET_OF_A = 'A' - 10; private static final int MASK_4BITS = (1 << 4) - 1; private static class LazyHolder { private static final byte[] DECODED = decodeTable(); private static byte[] decodeTable() { final byte[] dest = new byte['f'+1]; for (int i=0; i <= 'f'; i++) { if (i >= '0' && i <= '9') dest[i] = (byte)(i - '0'); else if (i >= 'A' && i <= 'F') dest[i] = (byte)(i - OFFSET_OF_A); else if (i >= 'a' && i <= 'f') dest[i] = (byte)(i - OFFSET_OF_a); else dest[i] = -1; } return dest; } } private final byte[] ALPAHBETS; Base16Codec() { this(true); } Base16Codec(boolean upperCase) { ALPAHBETS = upperCase ? CodecUtils.toBytesDirect("0123456789ABCDEF") : CodecUtils.toBytesDirect("0123456789abcdef"); } @Override public byte[] encode(byte[] src) { byte[] dest = new byte[src.length * 2]; byte p; for (int i=0,j=0; i < src.length; i++) { dest[j++] = (byte)ALPAHBETS[(p=src[i]) >>> 4 & MASK_4BITS]; dest[j++] = (byte)ALPAHBETS[p & MASK_4BITS]; } return dest; } @Override public byte[] decode(byte[] src, final int length) { if (length % 2 != 0) { throw new IllegalArgumentException( "Input is expected to be encoded in multiple of 2 bytes but found: " + length ); } final byte[] dest = new byte[length / 2]; for (int i=0, j=0; j < dest.length; j++) { dest[j] = (byte) ( pos(src[i++]) << 4 | pos(src[i++]) ) ; } return dest; } protected int pos(byte in) { int pos = LazyHolder.DECODED[in]; if (pos > -1) return pos; throw new IllegalArgumentException("Invalid base 16 character: \'" + (char)in + "\'"); } }
apache-2.0
sneivandt/elasticsearch
client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
40072
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.elasticsearch.client.http.entity.ContentType; import org.elasticsearch.client.http.entity.StringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.singletonMap; public class CrudIT extends ESRestHighLevelClientTestCase { public void testDelete() throws IOException { { // Testing deletion String docId = "id"; highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))); DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); if (randomBoolean()) { deleteRequest.version(1L); } DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); assertEquals("type", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } { // Testing non existing document String docId = "does_not_exist"; DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); assertEquals("type", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); } { // Testing version conflict String docId = "version_conflict"; highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))); DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).version(2); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync)); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" + docId + "]: " + "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } { // Testing version type String docId = "version_type"; highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")) .versionType(VersionType.EXTERNAL).version(12)); DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(13); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); assertEquals("type", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } { // Testing version type with a wrong version String docId = "wrong_version"; highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")) .versionType(VersionType.EXTERNAL).version(12)); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(10); execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); }); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" + docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } { // Testing routing String docId = "routing"; highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")).routing("foo")); DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).routing("foo"); DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); assertEquals("index", deleteResponse.getIndex()); assertEquals("type", deleteResponse.getType()); assertEquals(docId, deleteResponse.getId()); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } } public void testExists() throws IOException { { GetRequest getRequest = new GetRequest("index", "type", "id"); assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); } String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity); assertEquals(201, response.getStatusLine().getStatusCode()); { GetRequest getRequest = new GetRequest("index", "type", "id"); assertTrue(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); } { GetRequest getRequest = new GetRequest("index", "type", "does_not_exist"); assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); } { GetRequest getRequest = new GetRequest("index", "type", "does_not_exist").version(1); assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); } } public void testGet() throws IOException { { GetRequest getRequest = new GetRequest("index", "type", "id"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity); assertEquals(201, response.getStatusLine().getStatusCode()); { GetRequest getRequest = new GetRequest("index", "type", "id").version(2); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, " + "reason=[type][id]: " + "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } { GetRequest getRequest = new GetRequest("index", "type", "id"); if (randomBoolean()) { getRequest.version(1L); } GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); assertEquals("type", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertFalse(getResponse.isSourceEmpty()); assertEquals(1L, getResponse.getVersion()); assertEquals(document, getResponse.getSourceAsString()); } { GetRequest getRequest = new GetRequest("index", "type", "does_not_exist"); GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); assertEquals("type", getResponse.getType()); assertEquals("does_not_exist", getResponse.getId()); assertFalse(getResponse.isExists()); assertEquals(-1, getResponse.getVersion()); assertTrue(getResponse.isSourceEmpty()); assertNull(getResponse.getSourceAsString()); } { GetRequest getRequest = new GetRequest("index", "type", "id"); getRequest.fetchSourceContext(new FetchSourceContext(false, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)); GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); assertEquals("type", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertTrue(getResponse.isSourceEmpty()); assertEquals(1L, getResponse.getVersion()); assertNull(getResponse.getSourceAsString()); } { GetRequest getRequest = new GetRequest("index", "type", "id"); if (randomBoolean()) { getRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, Strings.EMPTY_ARRAY)); } else { getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[]{"field2"})); } GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); assertEquals("index", getResponse.getIndex()); assertEquals("type", getResponse.getType()); assertEquals("id", getResponse.getId()); assertTrue(getResponse.isExists()); assertFalse(getResponse.isSourceEmpty()); assertEquals(1L, getResponse.getVersion()); Map<String, Object> sourceAsMap = getResponse.getSourceAsMap(); assertEquals(1, sourceAsMap.size()); assertEquals("value1", sourceAsMap.get("field1")); } } public void testIndex() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); { IndexRequest indexRequest = new IndexRequest("index", "type"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject()); IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); assertEquals("index", indexResponse.getIndex()); assertEquals("type", indexResponse.getType()); assertTrue(Strings.hasLength(indexResponse.getId())); assertEquals(1L, indexResponse.getVersion()); assertNotNull(indexResponse.getShardId()); assertEquals(-1, indexResponse.getShardId().getId()); assertEquals("index", indexResponse.getShardId().getIndexName()); assertEquals("index", indexResponse.getShardId().getIndex().getName()); assertEquals("_na_", indexResponse.getShardId().getIndex().getUUID()); assertNotNull(indexResponse.getShardInfo()); assertEquals(0, indexResponse.getShardInfo().getFailed()); assertTrue(indexResponse.getShardInfo().getSuccessful() > 0); assertTrue(indexResponse.getShardInfo().getTotal() > 0); } { IndexRequest indexRequest = new IndexRequest("index", "type", "id"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 1).endObject()); IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); assertEquals("type", indexResponse.getType()); assertEquals("id", indexResponse.getId()); assertEquals(1L, indexResponse.getVersion()); indexRequest = new IndexRequest("index", "type", "id"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 2).endObject()); indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.OK, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); assertEquals("type", indexResponse.getType()); assertEquals("id", indexResponse.getId()); assertEquals(2L, indexResponse.getVersion()); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { IndexRequest wrongRequest = new IndexRequest("index", "type", "id"); wrongRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); wrongRequest.version(5L); execute(wrongRequest, highLevelClient()::index, highLevelClient()::indexAsync); }); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: " + "version conflict, current version [2] is different than the one provided [5]]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } { ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { IndexRequest indexRequest = new IndexRequest("index", "type", "missing_parent"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); indexRequest.parent("missing"); execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); }); assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertEquals("Elasticsearch exception [type=illegal_argument_exception, " + "reason=can't specify parent if no parent field has been configured]", exception.getMessage()); } { ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { IndexRequest indexRequest = new IndexRequest("index", "type", "missing_pipeline"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); indexRequest.setPipeline("missing"); execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); }); assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertEquals("Elasticsearch exception [type=illegal_argument_exception, " + "reason=pipeline with id [missing] does not exist]", exception.getMessage()); } { IndexRequest indexRequest = new IndexRequest("index", "type", "external_version_type"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); indexRequest.version(12L); indexRequest.versionType(VersionType.EXTERNAL); IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); assertEquals("type", indexResponse.getType()); assertEquals("external_version_type", indexResponse.getId()); assertEquals(12L, indexResponse.getVersion()); } { final IndexRequest indexRequest = new IndexRequest("index", "type", "with_create_op_type"); indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); indexRequest.opType(DocWriteRequest.OpType.CREATE); IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals("index", indexResponse.getIndex()); assertEquals("type", indexResponse.getType()); assertEquals("with_create_op_type", indexResponse.getId()); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); }); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][with_create_op_type]: " + "version conflict, document already exists (current version [1])]", exception.getMessage()); } } public void testUpdate() throws IOException { { UpdateRequest updateRequest = new UpdateRequest("index", "type", "does_not_exist"); updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][does_not_exist]: document missing]", exception.getMessage()); } { IndexRequest indexRequest = new IndexRequest("index", "type", "id"); indexRequest.source(singletonMap("field", "value")); IndexResponse indexResponse = highLevelClient().index(indexRequest); assertEquals(RestStatus.CREATED, indexResponse.status()); UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion()); UpdateRequest updateRequestConflict = new UpdateRequest("index", "type", "id"); updateRequestConflict.doc(singletonMap("field", "with_version_conflict"), randomFrom(XContentType.values())); updateRequestConflict.version(indexResponse.getVersion()); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(updateRequestConflict, highLevelClient()::update, highLevelClient()::updateAsync)); assertEquals(RestStatus.CONFLICT, exception.status()); assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: version conflict, " + "current version [2] is different than the one provided [1]]", exception.getMessage()); } { ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); if (randomBoolean()) { updateRequest.parent("missing"); } else { updateRequest.routing("missing"); } execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); }); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][id]: document missing]", exception.getMessage()); } { IndexRequest indexRequest = new IndexRequest("index", "type", "with_script"); indexRequest.source(singletonMap("counter", 12)); IndexResponse indexResponse = highLevelClient().index(indexRequest); assertEquals(RestStatus.CREATED, indexResponse.status()); UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_script"); Script script = new Script(ScriptType.INLINE, "painless", "ctx._source.counter += params.count", singletonMap("count", 8)); updateRequest.script(script); updateRequest.fetchSource(true); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(2L, updateResponse.getVersion()); assertEquals(20, updateResponse.getGetResult().sourceAsMap().get("counter")); } { IndexRequest indexRequest = new IndexRequest("index", "type", "with_doc"); indexRequest.source("field_1", "one", "field_3", "three"); indexRequest.version(12L); indexRequest.versionType(VersionType.EXTERNAL); IndexResponse indexResponse = highLevelClient().index(indexRequest); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(12L, indexResponse.getVersion()); UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc"); updateRequest.doc(singletonMap("field_2", "two"), randomFrom(XContentType.values())); updateRequest.fetchSource("field_*", "field_3"); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(13L, updateResponse.getVersion()); GetResult getResult = updateResponse.getGetResult(); assertEquals(13L, updateResponse.getVersion()); Map<String, Object> sourceAsMap = getResult.sourceAsMap(); assertEquals("one", sourceAsMap.get("field_1")); assertEquals("two", sourceAsMap.get("field_2")); assertFalse(sourceAsMap.containsKey("field_3")); } { IndexRequest indexRequest = new IndexRequest("index", "type", "noop"); indexRequest.source("field", "value"); IndexResponse indexResponse = highLevelClient().index(indexRequest); assertEquals(RestStatus.CREATED, indexResponse.status()); assertEquals(1L, indexResponse.getVersion()); UpdateRequest updateRequest = new UpdateRequest("index", "type", "noop"); updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult()); assertEquals(1L, updateResponse.getVersion()); updateRequest.detectNoop(false); updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.OK, updateResponse.status()); assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(2L, updateResponse.getVersion()); } { UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_upsert"); updateRequest.upsert(singletonMap("doc_status", "created")); updateRequest.doc(singletonMap("doc_status", "updated")); updateRequest.fetchSource(true); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); assertEquals("type", updateResponse.getType()); assertEquals("with_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); assertEquals(1L, updateResponse.getVersion()); assertEquals("created", getResult.sourceAsMap().get("doc_status")); } { UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc_as_upsert"); updateRequest.doc(singletonMap("field", "initialized")); updateRequest.fetchSource(true); updateRequest.docAsUpsert(true); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); assertEquals("type", updateResponse.getType()); assertEquals("with_doc_as_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); assertEquals(1L, updateResponse.getVersion()); assertEquals("initialized", getResult.sourceAsMap().get("field")); } { UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_scripted_upsert"); updateRequest.fetchSource(true); updateRequest.script(new Script(ScriptType.INLINE, "painless", "ctx._source.level = params.test", singletonMap("test", "C"))); updateRequest.scriptedUpsert(true); updateRequest.upsert(singletonMap("level", "A")); UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); assertEquals(RestStatus.CREATED, updateResponse.status()); assertEquals("index", updateResponse.getIndex()); assertEquals("type", updateResponse.getType()); assertEquals("with_scripted_upsert", updateResponse.getId()); GetResult getResult = updateResponse.getGetResult(); assertEquals(1L, updateResponse.getVersion()); assertEquals("C", getResult.sourceAsMap().get("level")); } { IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); updateRequest.doc(new IndexRequest().source(Collections.singletonMap("field", "doc"), XContentType.JSON)); updateRequest.upsert(new IndexRequest().source(Collections.singletonMap("field", "upsert"), XContentType.YAML)); execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); }); assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents", exception.getMessage()); } } public void testBulk() throws IOException { int nbItems = randomIntBetween(10, 100); boolean[] errors = new boolean[nbItems]; XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); BulkRequest bulkRequest = new BulkRequest(); for (int i = 0; i < nbItems; i++) { String id = String.valueOf(i); boolean erroneous = randomBoolean(); errors[i] = erroneous; DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); if (opType == DocWriteRequest.OpType.DELETE) { if (erroneous == false) { assertEquals(RestStatus.CREATED, highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); } DeleteRequest deleteRequest = new DeleteRequest("index", "test", id); bulkRequest.add(deleteRequest); } else { BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes(); if (opType == DocWriteRequest.OpType.INDEX) { IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType); if (erroneous) { indexRequest.version(12L); } bulkRequest.add(indexRequest); } else if (opType == DocWriteRequest.OpType.CREATE) { IndexRequest createRequest = new IndexRequest("index", "test", id).source(source, xContentType).create(true); if (erroneous) { assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status()); } bulkRequest.add(createRequest); } else if (opType == DocWriteRequest.OpType.UPDATE) { UpdateRequest updateRequest = new UpdateRequest("index", "test", id) .doc(new IndexRequest().source(source, xContentType)); if (erroneous == false) { assertEquals(RestStatus.CREATED, highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); } bulkRequest.add(updateRequest); } } } BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync); assertEquals(RestStatus.OK, bulkResponse.status()); assertTrue(bulkResponse.getTook().getMillis() > 0); assertEquals(nbItems, bulkResponse.getItems().length); validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); } public void testBulkProcessorIntegration() throws IOException, InterruptedException { int nbItems = randomIntBetween(10, 100); boolean[] errors = new boolean[nbItems]; XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); AtomicReference<BulkResponse> responseRef = new AtomicReference<>(); AtomicReference<BulkRequest> requestRef = new AtomicReference<>(); AtomicReference<Throwable> error = new AtomicReference<>(); BulkProcessor.Listener listener = new BulkProcessor.Listener() { @Override public void beforeBulk(long executionId, BulkRequest request) { } @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { responseRef.set(response); requestRef.set(request); } @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { error.set(failure); } }; ThreadPool threadPool = new ThreadPool(Settings.builder().put("node.name", getClass().getName()).build()); // Pull the client to a variable to work around https://bugs.eclipse.org/bugs/show_bug.cgi?id=514884 RestHighLevelClient hlClient = highLevelClient(); try(BulkProcessor processor = new BulkProcessor.Builder(hlClient::bulkAsync, listener, threadPool) .setConcurrentRequests(0) .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB)) .setBulkActions(nbItems + 1) .build()) { for (int i = 0; i < nbItems; i++) { String id = String.valueOf(i); boolean erroneous = randomBoolean(); errors[i] = erroneous; DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); if (opType == DocWriteRequest.OpType.DELETE) { if (erroneous == false) { assertEquals(RestStatus.CREATED, highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); } DeleteRequest deleteRequest = new DeleteRequest("index", "test", id); processor.add(deleteRequest); } else { if (opType == DocWriteRequest.OpType.INDEX) { IndexRequest indexRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i); if (erroneous) { indexRequest.version(12L); } processor.add(indexRequest); } else if (opType == DocWriteRequest.OpType.CREATE) { IndexRequest createRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i).create(true); if (erroneous) { assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status()); } processor.add(createRequest); } else if (opType == DocWriteRequest.OpType.UPDATE) { UpdateRequest updateRequest = new UpdateRequest("index", "test", id) .doc(new IndexRequest().source(xContentType, "id", i)); if (erroneous == false) { assertEquals(RestStatus.CREATED, highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); } processor.add(updateRequest); } } } assertNull(responseRef.get()); assertNull(requestRef.get()); } BulkResponse bulkResponse = responseRef.get(); BulkRequest bulkRequest = requestRef.get(); assertEquals(RestStatus.OK, bulkResponse.status()); assertTrue(bulkResponse.getTook().getMillis() > 0); assertEquals(nbItems, bulkResponse.getItems().length); assertNull(error.get()); validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); terminate(threadPool); } private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse bulkResponse, BulkRequest bulkRequest) { for (int i = 0; i < nbItems; i++) { BulkItemResponse bulkItemResponse = bulkResponse.getItems()[i]; assertEquals(i, bulkItemResponse.getItemId()); assertEquals("index", bulkItemResponse.getIndex()); assertEquals("test", bulkItemResponse.getType()); assertEquals(String.valueOf(i), bulkItemResponse.getId()); DocWriteRequest.OpType requestOpType = bulkRequest.requests().get(i).opType(); if (requestOpType == DocWriteRequest.OpType.INDEX || requestOpType == DocWriteRequest.OpType.CREATE) { assertEquals(errors[i], bulkItemResponse.isFailed()); assertEquals(errors[i] ? RestStatus.CONFLICT : RestStatus.CREATED, bulkItemResponse.status()); } else if (requestOpType == DocWriteRequest.OpType.UPDATE) { assertEquals(errors[i], bulkItemResponse.isFailed()); assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status()); } else if (requestOpType == DocWriteRequest.OpType.DELETE) { assertFalse(bulkItemResponse.isFailed()); assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status()); } } } }
apache-2.0
mahak/hbase
hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPermissionScope.java
1004
/** * Autogenerated by Thrift Compiler (0.14.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-07-19") public enum TPermissionScope implements org.apache.thrift.TEnum { TABLE(0), NAMESPACE(1); private final int value; private TPermissionScope(int value) { this.value = value; } /** * Get the integer value of this enum value, as defined in the Thrift IDL. */ public int getValue() { return value; } /** * Find a the enum type by its integer value, as defined in the Thrift IDL. * @return null if the value is not found. */ @org.apache.thrift.annotation.Nullable public static TPermissionScope findByValue(int value) { switch (value) { case 0: return TABLE; case 1: return NAMESPACE; default: return null; } } }
apache-2.0
thomasbecker/jetty-7
jetty-osgi/jetty-osgi-boot/src/main/java/org/eclipse/jetty/osgi/boot/internal/webapp/IWebBundleDeployerHelper.java
3614
// ======================================================================== // Copyright (c) 2010 Intalio, Inc. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // Contributors: // Hugues Malphettes - initial API and implementation // ======================================================================== package org.eclipse.jetty.osgi.boot.internal.webapp; import org.eclipse.jetty.deploy.ContextDeployer; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.webapp.WebAppContext; import org.osgi.framework.Bundle; /** * Internal interface for the class that deploys a webapp on a server. * Used as we migrate from the single instance of the jety server to multiple jetty servers. */ public interface IWebBundleDeployerHelper { /** when this property is present, the type of context handler registered is not * known in advance. */ public static final String INTERNAL_SERVICE_PROP_UNKNOWN_CONTEXT_HANDLER_TYPE = "unknownContextHandlerType"; /** * Deploy a new web application on the jetty server. * * @param bundle * The bundle * @param webappFolderPath * The path to the root of the webapp. Must be a path relative to * bundle; either an absolute path. * @param contextPath * The context path. Must start with "/" * @param extraClasspath * @param overrideBundleInstallLocation * @param requireTldBundle The list of bundles's symbolic names that contain * tld files that are required by this WAB. * @param webXmlPath * @param defaultWebXmlPath * TODO: parameter description * @return The contexthandler created and started * @throws Exception */ public abstract WebAppContext registerWebapplication(Bundle bundle, String webappFolderPath, String contextPath, String extraClasspath, String overrideBundleInstallLocation, String requireTldBundle, String webXmlPath, String defaultWebXmlPath, WebAppContext webAppContext) throws Exception; /** * Stop a ContextHandler and remove it from the collection. * * @see ContextDeployer#undeploy * @param contextHandler * @throws Exception */ public abstract void unregister(ContextHandler contextHandler) throws Exception; /** * This type of registration relies on jetty's complete context xml file. * Context encompasses jndi and all other things. This makes the definition * of the webapp a lot more self-contained. * * @param contributor * @param contextFileRelativePath * @param extraClasspath * @param overrideBundleInstallLocation * @param requireTldBundle The list of bundles'symbolic name that contain tld files for this webapp. * @param handler the context handler passed in the server * reference that will be configured, deployed and started. * @return The contexthandler created and started * @throws Exception */ public abstract ContextHandler registerContext(Bundle contributor, String contextFileRelativePath, String extraClasspath, String overrideBundleInstallLocation, String requireTldBundle, ContextHandler handler) throws Exception; }
apache-2.0
zstackorg/zstack
sdk/src/main/java/org/zstack/sdk/AddVmNicToSecurityGroupResult.java
75
package org.zstack.sdk; public class AddVmNicToSecurityGroupResult { }
apache-2.0
emre-aydin/hazelcast
hazelcast/src/main/java/com/hazelcast/map/impl/querycache/accumulator/AccumulatorInfoSupplier.java
1796
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.querycache.accumulator; import java.util.concurrent.ConcurrentMap; /** * Supplies {@link AccumulatorInfo} according to name of {@code IMap} and * name of {@code QueryCache}. */ public interface AccumulatorInfoSupplier { /** * Returns {@link AccumulatorInfo} for cache of ma{@code IMap}p. * * @param mapName map name. * @param cacheId cache name. * @return {@link AccumulatorInfo} for cache of map. */ AccumulatorInfo getAccumulatorInfoOrNull(String mapName, String cacheId); /** * Adds a new {@link AccumulatorInfo} for the query-cache of {@code IMap}. * * @param mapName map name. * @param cacheId cache name. */ void putIfAbsent(String mapName, String cacheId, AccumulatorInfo info); /** * Removes {@link AccumulatorInfo} from this supplier. * * @param mapName map name. * @param cacheId cache name. */ void remove(String mapName, String cacheId); /** * @return all {@link AccumulatorInfo} of all {@code QueryCache} by map name */ ConcurrentMap<String, ConcurrentMap<String, AccumulatorInfo>> getAll(); }
apache-2.0
b-long/ezbake-data-access
ezmongo/driver/src/main/com/mongodb/gridfs/GridFS.java
14033
/* * Copyright (c) 2008-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // GridFS.java package com.mongodb.gridfs; import com.mongodb.*; import org.bson.types.ObjectId; import java.io.*; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; /** * Implementation of GridFS v1.0 * * <a href="http://www.mongodb.org/display/DOCS/GridFS+Specification">GridFS 1.0 spec</a> * * @dochub gridfs */ public class GridFS implements Serializable { private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger( "com.mongodb.gridfs" ); /** * file's chunk size */ public static final int DEFAULT_CHUNKSIZE = 255 * 1024; /** * file's max chunk size * * @deprecated You can calculate max chunkSize with * a similar formula {@link com.mongodb.MongoClient#getMaxBsonObjectSize()} - 500*1000. * Please ensure that you left enough space for metadata (500kb is enough). */ @Deprecated public static final long MAX_CHUNKSIZE = (long) (3.5 * 1000 * 1000); /** * bucket to use for the collection namespaces */ public static final String DEFAULT_BUCKET = "fs"; // -------------------------- // ------ constructors ------- // -------------------------- /** * Creates a GridFS instance for the default bucket "fs" * in the given database. Set the preferred WriteConcern on the give DB with DB.setWriteConcern * @see com.mongodb.WriteConcern * @param db database to work with * @throws MongoException */ public GridFS(DB db) { this(db, DEFAULT_BUCKET); } /** * Creates a GridFS instance for the specified bucket * in the given database. Set the preferred WriteConcern on the give DB with DB.setWriteConcern * * @see com.mongodb.WriteConcern * @param db database to work with * @param bucket bucket to use in the given database * @throws MongoException */ public GridFS(DB db, String bucket) { _db = db; _bucketName = bucket; _filesCollection = _db.getCollection( _bucketName + ".files" ); _chunkCollection = _db.getCollection( _bucketName + ".chunks" ); // ensure standard indexes as long as collections are small try { if (_filesCollection.count() < 1000) { _filesCollection.ensureIndex( BasicDBObjectBuilder.start().add( "filename" , 1 ).add( "uploadDate" , 1 ).get() ); } if (_chunkCollection.count() < 1000) { _chunkCollection.ensureIndex( BasicDBObjectBuilder.start().add( "files_id" , 1 ).add( "n" , 1 ).get() , BasicDBObjectBuilder.start().add( "unique" , true ).get() ); } } catch (MongoException e) { LOGGER.info(String.format("Unable to ensure indices on GridFS collections in database %s", db.getName())); } _filesCollection.setObjectClass( GridFSDBFile.class ); } // -------------------------- // ------ utils ------- // -------------------------- /** * gets the list of files stored in this gridfs, sorted by filename * * @return cursor of file objects */ public DBCursor getFileList(){ return getFileList(new BasicDBObject()); } /** * gets a filtered list of files stored in this gridfs, sorted by filename * * @param query filter to apply * @return cursor of file objects */ public DBCursor getFileList( DBObject query ){ return getFileList(query, new BasicDBObject("filename",1)); } /** * gets a filtered list of files stored in this gridfs, sorted by param sort * * @param query filter to apply * @param sort sorting to apply * @return cursor of file objects */ public DBCursor getFileList( DBObject query, DBObject sort){ return _filesCollection.find( query ).sort(sort); } // -------------------------- // ------ reading ------- // -------------------------- /** * finds one file matching the given id. Equivalent to findOne(id) * @param id * @return * @throws MongoException */ public GridFSDBFile find( ObjectId id ){ return findOne( id ); } /** * finds one file matching the given id. * @param id * @return * @throws MongoException */ public GridFSDBFile findOne( ObjectId id ){ return findOne( new BasicDBObject( "_id" , id ) ); } /** * finds one file matching the given filename * @param filename * @return * @throws MongoException */ public GridFSDBFile findOne( String filename ){ return findOne( new BasicDBObject( "filename" , filename ) ); } /** * finds one file matching the given query * @param query * @return * @throws MongoException */ public GridFSDBFile findOne( DBObject query ){ return _fix( _filesCollection.findOne( query ) ); } /** * finds a list of files matching the given filename * @param filename * @return * @throws MongoException */ public List<GridFSDBFile> find( String filename ){ return find( filename, null ); } /** * finds a list of files matching the given filename * @param filename * @param sort * @return * @throws MongoException */ public List<GridFSDBFile> find( String filename , DBObject sort){ return find( new BasicDBObject( "filename" , filename ), sort ); } /** * finds a list of files matching the given query * @param query * @return * @throws MongoException */ public List<GridFSDBFile> find( DBObject query ){ return find(query, null); } /** * finds a list of files matching the given query * @param query * @param sort * @return * @throws MongoException */ public List<GridFSDBFile> find( DBObject query , DBObject sort){ List<GridFSDBFile> files = new ArrayList<GridFSDBFile>(); DBCursor c = null; try { c = _filesCollection.find( query ); if (sort != null) { c.sort(sort); } while ( c.hasNext() ){ files.add( _fix( c.next() ) ); } } finally { if (c != null){ c.close(); } } return files; } /** * @deprecated This method is NOT a part of public API and will be dropped in 3.x versions. */ @Deprecated protected GridFSDBFile _fix( Object o ){ if ( o == null ) return null; if (Mongo.isClientModeEnabled()) { GridFSDBFile tmp = new GridFSDBFile(); if (o instanceof BasicDBObject) { BasicDBObject b = (BasicDBObject)o; for (String key: b.keySet()) { tmp.put(key,b.get(key)); } } o = tmp; } if ( ! ( o instanceof GridFSDBFile ) ) throw new RuntimeException( "somehow didn't get a GridFSDBFile" ); GridFSDBFile f = (GridFSDBFile)o; f._fs = this; return f; } // -------------------------- // ------ remove ------- // -------------------------- /** * removes the file matching the given id * @param id * @throws MongoException */ public void remove( ObjectId id ){ if(id == null) { throw new IllegalArgumentException("file id can not be null"); } _filesCollection.remove( new BasicDBObject( "_id" , id ) ); _chunkCollection.remove( new BasicDBObject( "files_id" , id ) ); } /** * removes all files matching the given filename * @param filename * @throws MongoException */ public void remove( String filename ){ if(filename == null) { throw new IllegalArgumentException("filename can not be null"); } remove( new BasicDBObject( "filename" , filename ) ); } /** * removes all files matching the given query * @param query * @throws MongoException */ public void remove( DBObject query ){ if(query == null) { throw new IllegalArgumentException("query can not be null"); } for ( GridFSDBFile f : find( query ) ){ f.remove(); } } // -------------------------- // ------ writing ------- // -------------------------- /** * creates a file entry. * After calling this method, you have to call save() on the GridFSInputFile file * @param data the file's data * @return */ public GridFSInputFile createFile( byte[] data ){ return createFile( new ByteArrayInputStream( data ), true ); } /** * creates a file entry. * After calling this method, you have to call save() on the GridFSInputFile file * @param f the file object * @return * @throws IOException */ public GridFSInputFile createFile( File f ) throws IOException { return createFile( new FileInputStream( f ) , f.getName(), true ); } /** * creates a file entry. * after calling this method, you have to call save() on the GridFSInputFile file * @param in an inputstream containing the file's data * @return */ public GridFSInputFile createFile( InputStream in ){ return createFile( in , null ); } /** * creates a file entry. * after calling this method, you have to call save() on the GridFSInputFile file * @param in an inputstream containing the file's data * @param closeStreamOnPersist indicate the passed in input stream should be closed * once the data chunk persisted * @return */ public GridFSInputFile createFile( InputStream in, boolean closeStreamOnPersist ){ return createFile( in , null, closeStreamOnPersist ); } /** * creates a file entry. * After calling this method, you have to call save() on the GridFSInputFile file * @param in an inputstream containing the file's data * @param filename the file name as stored in the db * @return */ public GridFSInputFile createFile( InputStream in , String filename ){ return new GridFSInputFile( this , in , filename ); } /** * creates a file entry. * After calling this method, you have to call save() on the GridFSInputFile file * @param in an inputstream containing the file's data * @param filename the file name as stored in the db * @param closeStreamOnPersist indicate the passed in input stream should be closed * once the data chunk persisted * @return */ public GridFSInputFile createFile( InputStream in , String filename, boolean closeStreamOnPersist ){ return new GridFSInputFile( this , in , filename, closeStreamOnPersist ); } /** * @see {@link GridFS#createFile()} on how to use this method * @param filename the file name as stored in the db * @return */ public GridFSInputFile createFile(String filename) { return new GridFSInputFile( this , filename ); } /** * This method creates an empty {@link GridFSInputFile} instance. On this * instance an {@link java.io.OutputStream} can be obtained using the * {@link GridFSInputFile#getOutputStream()} method. You can still call * {@link GridFSInputFile#setContentType(String)} and * {@link GridFSInputFile#setFilename(String)}. The file will be completely * written and closed after calling the {@link java.io.OutputStream#close()} * method on the output stream. * * @return GridFS file handle instance. */ public GridFSInputFile createFile() { return new GridFSInputFile( this ); } // -------------------------- // ------ members ------- // -------------------------- /** * gets the bucket name used in the collection's namespace * @return */ public String getBucketName(){ return _bucketName; } /** * gets the db used * @return */ public DB getDB(){ return _db; } /** * Gets the {@link DBCollection} in which the file’s metadata is stored. * * @return the collection */ protected DBCollection getFilesCollection() { return _filesCollection; } /** * Gets the {@link DBCollection} in which the binary chunks are stored. * * @return the collection */ protected DBCollection getChunksCollection() { return _chunkCollection; } /** * @deprecated Please use {@link #getDB()} for access. */ @Deprecated protected transient final DB _db; /** * @deprecated Please use {@link #getBucketName()} for access. */ @Deprecated protected final String _bucketName; /** * @deprecated Please use {@link #getFilesCollection()} for access. */ @Deprecated protected transient final DBCollection _filesCollection; /** * @deprecated Please use {@link #getChunksCollection()} for access. */ @Deprecated protected transient final DBCollection _chunkCollection; }
apache-2.0